1/* 2 * This file is subject to the terms and conditions of the GNU General Public 3 * License. See the file "COPYING" in the main directory of this archive 4 * for more details. 5 * 6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle 7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc. 8 * Copyright (C) 2007 by Maciej W. Rozycki 9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc. 10 */ 11#include <asm/asm.h> 12#include <asm/asm-offsets.h> 13#include <asm/export.h> 14#include <asm/regdef.h> 15 16#if LONGSIZE == 4 17#define LONG_S_L swl 18#define LONG_S_R swr 19#else 20#define LONG_S_L sdl 21#define LONG_S_R sdr 22#endif 23 24#ifdef CONFIG_CPU_MICROMIPS 25#define STORSIZE (LONGSIZE * 2) 26#define STORMASK (STORSIZE - 1) 27#define FILL64RG t8 28#define FILLPTRG t7 29#undef LONG_S 30#define LONG_S LONG_SP 31#else 32#define STORSIZE LONGSIZE 33#define STORMASK LONGMASK 34#define FILL64RG a1 35#define FILLPTRG t0 36#endif 37 38#define LEGACY_MODE 1 39#define EVA_MODE 2 40 41/* 42 * No need to protect it with EVA #ifdefery. The generated block of code 43 * will never be assembled if EVA is not enabled. 44 */ 45#define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr) 46#define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr) 47 48#define EX(insn,reg,addr,handler) \ 49 .if \mode == LEGACY_MODE; \ 509: insn reg, addr; \ 51 .else; \ 529: ___BUILD_EVA_INSN(insn, reg, addr); \ 53 .endif; \ 54 .section __ex_table,"a"; \ 55 PTR 9b, handler; \ 56 .previous 57 58 .macro f_fill64 dst, offset, val, fixup, mode 59 EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup) 60 EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup) 61 EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup) 62 EX(LONG_S, \val, (\offset + 3 * STORSIZE)(\dst), \fixup) 63#if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS)) 64 EX(LONG_S, \val, (\offset + 4 * STORSIZE)(\dst), \fixup) 65 EX(LONG_S, \val, (\offset + 5 * STORSIZE)(\dst), \fixup) 66 EX(LONG_S, \val, (\offset + 6 * STORSIZE)(\dst), \fixup) 67 EX(LONG_S, \val, (\offset + 7 * STORSIZE)(\dst), \fixup) 68#endif 69#if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) 70 EX(LONG_S, \val, (\offset + 8 * STORSIZE)(\dst), \fixup) 71 EX(LONG_S, \val, (\offset + 9 * STORSIZE)(\dst), \fixup) 72 EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup) 73 EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup) 74 EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup) 75 EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup) 76 EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup) 77 EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup) 78#endif 79 .endm 80 81 .set noreorder 82 .align 5 83 84 /* 85 * Macro to generate the __bzero{,_user} symbol 86 * Arguments: 87 * mode: LEGACY_MODE or EVA_MODE 88 */ 89 .macro __BUILD_BZERO mode 90 /* Initialize __memset if this is the first time we call this macro */ 91 .ifnotdef __memset 92 .set __memset, 1 93 .hidden __memset /* Make sure it does not leak */ 94 .endif 95 96 sltiu t0, a2, STORSIZE /* very small region? */ 97 bnez t0, .Lsmall_memset\@ 98 andi t0, a0, STORMASK /* aligned? */ 99 100#ifdef CONFIG_CPU_MICROMIPS 101 move t8, a1 /* used by 'swp' instruction */ 102 move t9, a1 103#endif 104#ifndef CONFIG_CPU_DADDI_WORKAROUNDS 105 beqz t0, 1f 106 PTR_SUBU t0, STORSIZE /* alignment in bytes */ 107#else 108 .set noat 109 li AT, STORSIZE 110 beqz t0, 1f 111 PTR_SUBU t0, AT /* alignment in bytes */ 112 .set at 113#endif 114 115#ifndef CONFIG_CPU_MIPSR6 116 R10KCBARRIER(0(ra)) 117#ifdef __MIPSEB__ 118 EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */ 119#else 120 EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */ 121#endif 122 PTR_SUBU a0, t0 /* long align ptr */ 123 PTR_ADDU a2, t0 /* correct size */ 124 125#else /* CONFIG_CPU_MIPSR6 */ 126#define STORE_BYTE(N) \ 127 EX(sb, a1, N(a0), .Lbyte_fixup\@); \ 128 beqz t0, 0f; \ 129 PTR_ADDU t0, 1; 130 131 PTR_ADDU a2, t0 /* correct size */ 132 PTR_ADDU t0, 1 133 STORE_BYTE(0) 134 STORE_BYTE(1) 135#if LONGSIZE == 4 136 EX(sb, a1, 2(a0), .Lbyte_fixup\@) 137#else 138 STORE_BYTE(2) 139 STORE_BYTE(3) 140 STORE_BYTE(4) 141 STORE_BYTE(5) 142 EX(sb, a1, 6(a0), .Lbyte_fixup\@) 143#endif 1440: 145 ori a0, STORMASK 146 xori a0, STORMASK 147 PTR_ADDIU a0, STORSIZE 148#endif /* CONFIG_CPU_MIPSR6 */ 1491: ori t1, a2, 0x3f /* # of full blocks */ 150 xori t1, 0x3f 151 beqz t1, .Lmemset_partial\@ /* no block to fill */ 152 andi t0, a2, 0x40-STORSIZE 153 154 PTR_ADDU t1, a0 /* end address */ 155 .set reorder 1561: PTR_ADDIU a0, 64 157 R10KCBARRIER(0(ra)) 158 f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode 159 bne t1, a0, 1b 160 .set noreorder 161 162.Lmemset_partial\@: 163 R10KCBARRIER(0(ra)) 164 PTR_LA t1, 2f /* where to start */ 165#ifdef CONFIG_CPU_MICROMIPS 166 LONG_SRL t7, t0, 1 167#endif 168#if LONGSIZE == 4 169 PTR_SUBU t1, FILLPTRG 170#else 171 .set noat 172 LONG_SRL AT, FILLPTRG, 1 173 PTR_SUBU t1, AT 174 .set at 175#endif 176 jr t1 177 PTR_ADDU a0, t0 /* dest ptr */ 178 179 .set push 180 .set noreorder 181 .set nomacro 182 /* ... but first do longs ... */ 183 f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode 1842: .set pop 185 andi a2, STORMASK /* At most one long to go */ 186 187 beqz a2, 1f 188#ifndef CONFIG_CPU_MIPSR6 189 PTR_ADDU a0, a2 /* What's left */ 190 R10KCBARRIER(0(ra)) 191#ifdef __MIPSEB__ 192 EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@) 193#else 194 EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@) 195#endif 196#else 197 PTR_SUBU t0, $0, a2 198 PTR_ADDIU t0, 1 199 STORE_BYTE(0) 200 STORE_BYTE(1) 201#if LONGSIZE == 4 202 EX(sb, a1, 2(a0), .Lbyte_fixup\@) 203#else 204 STORE_BYTE(2) 205 STORE_BYTE(3) 206 STORE_BYTE(4) 207 STORE_BYTE(5) 208 EX(sb, a1, 6(a0), .Lbyte_fixup\@) 209#endif 2100: 211#endif 2121: jr ra 213 move a2, zero 214 215.Lsmall_memset\@: 216 beqz a2, 2f 217 PTR_ADDU t1, a0, a2 218 2191: PTR_ADDIU a0, 1 /* fill bytewise */ 220 R10KCBARRIER(0(ra)) 221 bne t1, a0, 1b 222 EX(sb, a1, -1(a0), .Lsmall_fixup\@) 223 2242: jr ra /* done */ 225 move a2, zero 226 .if __memset == 1 227 END(memset) 228 .set __memset, 0 229 .hidden __memset 230 .endif 231 232#ifdef CONFIG_CPU_MIPSR6 233.Lbyte_fixup\@: 234 PTR_SUBU a2, $0, t0 235 jr ra 236 PTR_ADDIU a2, 1 237#endif /* CONFIG_CPU_MIPSR6 */ 238 239.Lfirst_fixup\@: 240 jr ra 241 nop 242 243.Lfwd_fixup\@: 244 PTR_L t0, TI_TASK($28) 245 andi a2, 0x3f 246 LONG_L t0, THREAD_BUADDR(t0) 247 LONG_ADDU a2, t1 248 jr ra 249 LONG_SUBU a2, t0 250 251.Lpartial_fixup\@: 252 PTR_L t0, TI_TASK($28) 253 andi a2, STORMASK 254 LONG_L t0, THREAD_BUADDR(t0) 255 LONG_ADDU a2, a0 256 jr ra 257 LONG_SUBU a2, t0 258 259.Llast_fixup\@: 260 jr ra 261 nop 262 263.Lsmall_fixup\@: 264 PTR_SUBU a2, t1, a0 265 jr ra 266 PTR_ADDIU a2, 1 267 268 .endm 269 270/* 271 * memset(void *s, int c, size_t n) 272 * 273 * a0: start of area to clear 274 * a1: char to fill with 275 * a2: size of area to clear 276 */ 277 278LEAF(memset) 279EXPORT_SYMBOL(memset) 280 beqz a1, 1f 281 move v0, a0 /* result */ 282 283 andi a1, 0xff /* spread fillword */ 284 LONG_SLL t1, a1, 8 285 or a1, t1 286 LONG_SLL t1, a1, 16 287#if LONGSIZE == 8 288 or a1, t1 289 LONG_SLL t1, a1, 32 290#endif 291 or a1, t1 2921: 293#ifndef CONFIG_EVA 294FEXPORT(__bzero) 295EXPORT_SYMBOL(__bzero) 296#else 297FEXPORT(__bzero_kernel) 298EXPORT_SYMBOL(__bzero_kernel) 299#endif 300 __BUILD_BZERO LEGACY_MODE 301 302#ifdef CONFIG_EVA 303LEAF(__bzero) 304EXPORT_SYMBOL(__bzero) 305 __BUILD_BZERO EVA_MODE 306END(__bzero) 307#endif 308