/*1* This file is subject to the terms and conditions of the GNU General Public2* License. See the file "COPYING" in the main directory of this archive3* for more details.4*5* Copyright (C) 1998, 1999, 2000 by Ralf Baechle6* Copyright (C) 1999, 2000 Silicon Graphics, Inc.7* Copyright (C) 2007 by Maciej W. Rozycki8* Copyright (C) 2011, 2012 MIPS Technologies, Inc.9*/10#include <linux/export.h>11#include <asm/asm.h>12#include <asm/asm-offsets.h>13#include <asm/regdef.h>1415#if LONGSIZE == 416#define LONG_S_L swl17#define LONG_S_R swr18#else19#define LONG_S_L sdl20#define LONG_S_R sdr21#endif2223#ifdef CONFIG_CPU_MICROMIPS24#define STORSIZE (LONGSIZE * 2)25#define STORMASK (STORSIZE - 1)26#define FILL64RG t827#define FILLPTRG t728#undef LONG_S29#define LONG_S LONG_SP30#else31#define STORSIZE LONGSIZE32#define STORMASK LONGMASK33#define FILL64RG a134#define FILLPTRG t035#endif3637#define LEGACY_MODE 138#define EVA_MODE 23940/*41* No need to protect it with EVA #ifdefery. The generated block of code42* will never be assembled if EVA is not enabled.43*/44#define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)45#define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)4647#define EX(insn,reg,addr,handler) \48.if \mode == LEGACY_MODE; \499: insn reg, addr; \50.else; \519: ___BUILD_EVA_INSN(insn, reg, addr); \52.endif; \53.section __ex_table,"a"; \54PTR_WD 9b, handler; \55.previous5657.macro f_fill64 dst, offset, val, fixup, mode58EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup)59EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup)60EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup)61EX(LONG_S, \val, (\offset + 3 * STORSIZE)(\dst), \fixup)62#if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))63EX(LONG_S, \val, (\offset + 4 * STORSIZE)(\dst), \fixup)64EX(LONG_S, \val, (\offset + 5 * STORSIZE)(\dst), \fixup)65EX(LONG_S, \val, (\offset + 6 * STORSIZE)(\dst), \fixup)66EX(LONG_S, \val, (\offset + 7 * STORSIZE)(\dst), \fixup)67#endif68#if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))69EX(LONG_S, \val, (\offset + 8 * STORSIZE)(\dst), \fixup)70EX(LONG_S, \val, (\offset + 9 * STORSIZE)(\dst), \fixup)71EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)72EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)73EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)74EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)75EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)76EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)77#endif78.endm7980.align 58182/*83* Macro to generate the __bzero{,_user} symbol84* Arguments:85* mode: LEGACY_MODE or EVA_MODE86*/87.macro __BUILD_BZERO mode88/* Initialize __memset if this is the first time we call this macro */89.ifnotdef __memset90.set __memset, 191.hidden __memset /* Make sure it does not leak */92.endif9394sltiu t0, a2, STORSIZE /* very small region? */95.set noreorder96bnez t0, .Lsmall_memset\@97andi t0, a0, STORMASK /* aligned? */98.set reorder99100#ifdef CONFIG_CPU_MICROMIPS101move t8, a1 /* used by 'swp' instruction */102move t9, a1103#endif104.set noreorder105#ifndef CONFIG_CPU_DADDI_WORKAROUNDS106beqz t0, 1f107PTR_SUBU t0, STORSIZE /* alignment in bytes */108#else109.set noat110li AT, STORSIZE111beqz t0, 1f112PTR_SUBU t0, AT /* alignment in bytes */113.set at114#endif115.set reorder116117#ifndef CONFIG_CPU_NO_LOAD_STORE_LR118R10KCBARRIER(0(ra))119#ifdef __MIPSEB__120EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */121#else122EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */123#endif124PTR_SUBU a0, t0 /* long align ptr */125PTR_ADDU a2, t0 /* correct size */126127#else /* CONFIG_CPU_NO_LOAD_STORE_LR */128#define STORE_BYTE(N) \129EX(sb, a1, N(a0), .Lbyte_fixup\@); \130.set noreorder; \131beqz t0, 0f; \132PTR_ADDU t0, 1; \133.set reorder;134135PTR_ADDU a2, t0 /* correct size */136PTR_ADDU t0, 1137STORE_BYTE(0)138STORE_BYTE(1)139#if LONGSIZE == 4140EX(sb, a1, 2(a0), .Lbyte_fixup\@)141#else142STORE_BYTE(2)143STORE_BYTE(3)144STORE_BYTE(4)145STORE_BYTE(5)146EX(sb, a1, 6(a0), .Lbyte_fixup\@)147#endif1480:149ori a0, STORMASK150xori a0, STORMASK151PTR_ADDIU a0, STORSIZE152#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */1531: ori t1, a2, 0x3f /* # of full blocks */154xori t1, 0x3f155andi t0, a2, 0x40-STORSIZE156beqz t1, .Lmemset_partial\@ /* no block to fill */157158PTR_ADDU t1, a0 /* end address */1591: PTR_ADDIU a0, 64160R10KCBARRIER(0(ra))161f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode162bne t1, a0, 1b163164.Lmemset_partial\@:165R10KCBARRIER(0(ra))166PTR_LA t1, 2f /* where to start */167#ifdef CONFIG_CPU_MICROMIPS168LONG_SRL t7, t0, 1169#endif170#if LONGSIZE == 4171PTR_SUBU t1, FILLPTRG172#else173.set noat174LONG_SRL AT, FILLPTRG, 1175PTR_SUBU t1, AT176.set at177#endif178PTR_ADDU a0, t0 /* dest ptr */179jr t1180181/* ... but first do longs ... */182f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode1832: andi a2, STORMASK /* At most one long to go */184185.set noreorder186beqz a2, 1f187#ifndef CONFIG_CPU_NO_LOAD_STORE_LR188PTR_ADDU a0, a2 /* What's left */189.set reorder190R10KCBARRIER(0(ra))191#ifdef __MIPSEB__192EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)193#else194EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)195#endif196#else /* CONFIG_CPU_NO_LOAD_STORE_LR */197PTR_SUBU t0, $0, a2198.set reorder199move a2, zero /* No remaining longs */200PTR_ADDIU t0, 1201STORE_BYTE(0)202STORE_BYTE(1)203#if LONGSIZE == 4204EX(sb, a1, 2(a0), .Lbyte_fixup\@)205#else206STORE_BYTE(2)207STORE_BYTE(3)208STORE_BYTE(4)209STORE_BYTE(5)210EX(sb, a1, 6(a0), .Lbyte_fixup\@)211#endif2120:213#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */2141: move a2, zero215jr ra216217.Lsmall_memset\@:218PTR_ADDU t1, a0, a2219beqz a2, 2f2202211: PTR_ADDIU a0, 1 /* fill bytewise */222R10KCBARRIER(0(ra))223.set noreorder224bne t1, a0, 1b225EX(sb, a1, -1(a0), .Lsmall_fixup\@)226.set reorder2272282: move a2, zero229jr ra /* done */230.if __memset == 1231END(memset)232.set __memset, 0233.hidden __memset234.endif235236#ifdef CONFIG_CPU_NO_LOAD_STORE_LR237.Lbyte_fixup\@:238/*239* unset_bytes = (#bytes - (#unaligned bytes)) - (-#unaligned bytes remaining + 1) + 1240* a2 = a2 - t0 + 1241*/242PTR_SUBU a2, t0243PTR_ADDIU a2, 1244jr ra245#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */246247.Lfirst_fixup\@:248/* unset_bytes already in a2 */249jr ra250251.Lfwd_fixup\@:252/*253* unset_bytes = partial_start_addr + #bytes - fault_addr254* a2 = t1 + (a2 & 3f) - $28->task->BUADDR255*/256PTR_L t0, TI_TASK($28)257andi a2, 0x3f258LONG_L t0, THREAD_BUADDR(t0)259LONG_ADDU a2, t1260LONG_SUBU a2, t0261jr ra262263.Lpartial_fixup\@:264/*265* unset_bytes = partial_end_addr + #bytes - fault_addr266* a2 = a0 + (a2 & STORMASK) - $28->task->BUADDR267*/268PTR_L t0, TI_TASK($28)269andi a2, STORMASK270LONG_L t0, THREAD_BUADDR(t0)271LONG_ADDU a2, a0272LONG_SUBU a2, t0273jr ra274275.Llast_fixup\@:276/* unset_bytes already in a2 */277jr ra278279.Lsmall_fixup\@:280/*281* unset_bytes = end_addr - current_addr + 1282* a2 = t1 - a0 + 1283*/284PTR_SUBU a2, t1, a0285PTR_ADDIU a2, 1286jr ra287288.endm289290/*291* memset(void *s, int c, size_t n)292*293* a0: start of area to clear294* a1: char to fill with295* a2: size of area to clear296*/297298LEAF(memset)299EXPORT_SYMBOL(memset)300move v0, a0 /* result */301beqz a1, 1f302303andi a1, 0xff /* spread fillword */304LONG_SLL t1, a1, 8305or a1, t1306LONG_SLL t1, a1, 16307#if LONGSIZE == 8308or a1, t1309LONG_SLL t1, a1, 32310#endif311or a1, t13121:313#ifndef CONFIG_EVA314FEXPORT(__bzero)315EXPORT_SYMBOL(__bzero)316#endif317__BUILD_BZERO LEGACY_MODE318319#ifdef CONFIG_EVA320LEAF(__bzero)321EXPORT_SYMBOL(__bzero)322__BUILD_BZERO EVA_MODE323END(__bzero)324#endif325326327