1 /* 2 * This file is subject to the terms and condi 3 * License. See the file "COPYING" in the mai 4 * for more details. 5 * 6 * Copyright (C) 1998, 1999, 2000 by Ralf Baec 7 * Copyright (C) 1999, 2000 Silicon Graphics, 8 * Copyright (C) 2007 by Maciej W. Rozycki 9 * Copyright (C) 2011, 2012 MIPS Technologies, 10 */ 11 #include <linux/export.h> 12 #include <asm/asm.h> 13 #include <asm/asm-offsets.h> 14 #include <asm/regdef.h> 15 16 #if LONGSIZE == 4 17 #define LONG_S_L swl 18 #define LONG_S_R swr 19 #else 20 #define LONG_S_L sdl 21 #define LONG_S_R sdr 22 #endif 23 24 #ifdef CONFIG_CPU_MICROMIPS 25 #define STORSIZE (LONGSIZE * 2) 26 #define STORMASK (STORSIZE - 1) 27 #define FILL64RG t8 28 #define FILLPTRG t7 29 #undef LONG_S 30 #define LONG_S LONG_SP 31 #else 32 #define STORSIZE LONGSIZE 33 #define STORMASK LONGMASK 34 #define FILL64RG a1 35 #define FILLPTRG t0 36 #endif 37 38 #define LEGACY_MODE 1 39 #define EVA_MODE 2 40 41 /* 42 * No need to protect it with EVA #ifdefery. T 43 * will never be assembled if EVA is not enabl 44 */ 45 #define __EVAFY(insn, reg, addr) __BUILD_EVA_I 46 #define ___BUILD_EVA_INSN(insn, reg, addr) __E 47 48 #define EX(insn,reg,addr,handler) 49 .if \mode == LEGACY_MODE; 50 9: insn reg, addr; 51 .else; 52 9: ___BUILD_EVA_INSN(insn, reg, a 53 .endif; 54 .section __ex_table,"a"; 55 PTR_WD 9b, handler; 56 .previous 57 58 .macro f_fill64 dst, offset, val, fix 59 EX(LONG_S, \val, (\offset + 0 * STORS 60 EX(LONG_S, \val, (\offset + 1 * STORS 61 EX(LONG_S, \val, (\offset + 2 * STORS 62 EX(LONG_S, \val, (\offset + 3 * STORS 63 #if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSI 64 EX(LONG_S, \val, (\offset + 4 * STORS 65 EX(LONG_S, \val, (\offset + 5 * STORS 66 EX(LONG_S, \val, (\offset + 6 * STORS 67 EX(LONG_S, \val, (\offset + 7 * STORS 68 #endif 69 #if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSI 70 EX(LONG_S, \val, (\offset + 8 * STORS 71 EX(LONG_S, \val, (\offset + 9 * STORS 72 EX(LONG_S, \val, (\offset + 10 * STORS 73 EX(LONG_S, \val, (\offset + 11 * STORS 74 EX(LONG_S, \val, (\offset + 12 * STORS 75 EX(LONG_S, \val, (\offset + 13 * STORS 76 EX(LONG_S, \val, (\offset + 14 * STORS 77 EX(LONG_S, \val, (\offset + 15 * STORS 78 #endif 79 .endm 80 81 .align 5 82 83 /* 84 * Macro to generate the __bzero{,_use 85 * Arguments: 86 * mode: LEGACY_MODE or EVA_MODE 87 */ 88 .macro __BUILD_BZERO mode 89 /* Initialize __memset if this is the 90 .ifnotdef __memset 91 .set __memset, 1 92 .hidden __memset /* Make sure it does 93 .endif 94 95 sltiu t0, a2, STORSIZE 96 .set noreorder 97 bnez t0, .Lsmall_memset\@ 98 andi t0, a0, STORMASK 99 .set reorder 100 101 #ifdef CONFIG_CPU_MICROMIPS 102 move t8, a1 103 move t9, a1 104 #endif 105 .set noreorder 106 #ifndef CONFIG_CPU_DADDI_WORKAROUNDS 107 beqz t0, 1f 108 PTR_SUBU t0, STORSIZE 109 #else 110 .set noat 111 li AT, STORSIZE 112 beqz t0, 1f 113 PTR_SUBU t0, AT 114 .set at 115 #endif 116 .set reorder 117 118 #ifndef CONFIG_CPU_NO_LOAD_STORE_LR 119 R10KCBARRIER(0(ra)) 120 #ifdef __MIPSEB__ 121 EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@ 122 #else 123 EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@ 124 #endif 125 PTR_SUBU a0, t0 126 PTR_ADDU a2, t0 127 128 #else /* CONFIG_CPU_NO_LOAD_STORE_LR */ 129 #define STORE_BYTE(N) 130 EX(sb, a1, N(a0), .Lbyte_fixup\@); 131 .set noreorder; 132 beqz t0, 0f; 133 PTR_ADDU t0, 1; 134 .set reorder; 135 136 PTR_ADDU a2, t0 137 PTR_ADDU t0, 1 138 STORE_BYTE(0) 139 STORE_BYTE(1) 140 #if LONGSIZE == 4 141 EX(sb, a1, 2(a0), .Lbyte_fixup\@) 142 #else 143 STORE_BYTE(2) 144 STORE_BYTE(3) 145 STORE_BYTE(4) 146 STORE_BYTE(5) 147 EX(sb, a1, 6(a0), .Lbyte_fixup\@) 148 #endif 149 0: 150 ori a0, STORMASK 151 xori a0, STORMASK 152 PTR_ADDIU a0, STORSIZE 153 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ 154 1: ori t1, a2, 0x3f 155 xori t1, 0x3f 156 andi t0, a2, 0x40-STORSIZE 157 beqz t1, .Lmemset_partial\@ 158 159 PTR_ADDU t1, a0 160 1: PTR_ADDIU a0, 64 161 R10KCBARRIER(0(ra)) 162 f_fill64 a0, -64, FILL64RG, .Lfwd_fixu 163 bne t1, a0, 1b 164 165 .Lmemset_partial\@: 166 R10KCBARRIER(0(ra)) 167 PTR_LA t1, 2f 168 #ifdef CONFIG_CPU_MICROMIPS 169 LONG_SRL t7, t0, 1 170 #endif 171 #if LONGSIZE == 4 172 PTR_SUBU t1, FILLPTRG 173 #else 174 .set noat 175 LONG_SRL AT, FILLPTRG, 1 176 PTR_SUBU t1, AT 177 .set at 178 #endif 179 PTR_ADDU a0, t0 180 jr t1 181 182 /* ... but first do longs ... */ 183 f_fill64 a0, -64, FILL64RG, .Lpartial_ 184 2: andi a2, STORMASK 185 186 .set noreorder 187 beqz a2, 1f 188 #ifndef CONFIG_CPU_NO_LOAD_STORE_LR 189 PTR_ADDU a0, a2 190 .set reorder 191 R10KCBARRIER(0(ra)) 192 #ifdef __MIPSEB__ 193 EX(LONG_S_R, a1, -1(a0), .Llast_fixup\ 194 #else 195 EX(LONG_S_L, a1, -1(a0), .Llast_fixup\ 196 #endif 197 #else /* CONFIG_CPU_NO_LOAD_STORE_LR */ 198 PTR_SUBU t0, $0, a2 199 .set reorder 200 move a2, zero 201 PTR_ADDIU t0, 1 202 STORE_BYTE(0) 203 STORE_BYTE(1) 204 #if LONGSIZE == 4 205 EX(sb, a1, 2(a0), .Lbyte_fixup\@) 206 #else 207 STORE_BYTE(2) 208 STORE_BYTE(3) 209 STORE_BYTE(4) 210 STORE_BYTE(5) 211 EX(sb, a1, 6(a0), .Lbyte_fixup\@) 212 #endif 213 0: 214 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ 215 1: move a2, zero 216 jr ra 217 218 .Lsmall_memset\@: 219 PTR_ADDU t1, a0, a2 220 beqz a2, 2f 221 222 1: PTR_ADDIU a0, 1 223 R10KCBARRIER(0(ra)) 224 .set noreorder 225 bne t1, a0, 1b 226 EX(sb, a1, -1(a0), .Lsmall_fixup\@) 227 .set reorder 228 229 2: move a2, zero 230 jr ra 231 .if __memset == 1 232 END(memset) 233 .set __memset, 0 234 .hidden __memset 235 .endif 236 237 #ifdef CONFIG_CPU_NO_LOAD_STORE_LR 238 .Lbyte_fixup\@: 239 /* 240 * unset_bytes = (#bytes - (#unaligned 241 * a2 = a2 242 */ 243 PTR_SUBU a2, t0 244 PTR_ADDIU a2, 1 245 jr ra 246 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ 247 248 .Lfirst_fixup\@: 249 /* unset_bytes already in a2 */ 250 jr ra 251 252 .Lfwd_fixup\@: 253 /* 254 * unset_bytes = partial_start_addr + 255 * a2 = t1 + 256 */ 257 PTR_L t0, TI_TASK($28) 258 andi a2, 0x3f 259 LONG_L t0, THREAD_BUADDR(t0) 260 LONG_ADDU a2, t1 261 LONG_SUBU a2, t0 262 jr ra 263 264 .Lpartial_fixup\@: 265 /* 266 * unset_bytes = partial_end_addr + 267 * a2 = a0 + (a 268 */ 269 PTR_L t0, TI_TASK($28) 270 andi a2, STORMASK 271 LONG_L t0, THREAD_BUADDR(t0) 272 LONG_ADDU a2, a0 273 LONG_SUBU a2, t0 274 jr ra 275 276 .Llast_fixup\@: 277 /* unset_bytes already in a2 */ 278 jr ra 279 280 .Lsmall_fixup\@: 281 /* 282 * unset_bytes = end_addr - current_ad 283 * a2 = t1 - a0 284 */ 285 PTR_SUBU a2, t1, a0 286 PTR_ADDIU a2, 1 287 jr ra 288 289 .endm 290 291 /* 292 * memset(void *s, int c, size_t n) 293 * 294 * a0: start of area to clear 295 * a1: char to fill with 296 * a2: size of area to clear 297 */ 298 299 LEAF(memset) 300 EXPORT_SYMBOL(memset) 301 move v0, a0 302 beqz a1, 1f 303 304 andi a1, 0xff 305 LONG_SLL t1, a1, 8 306 or a1, t1 307 LONG_SLL t1, a1, 16 308 #if LONGSIZE == 8 309 or a1, t1 310 LONG_SLL t1, a1, 32 311 #endif 312 or a1, t1 313 1: 314 #ifndef CONFIG_EVA 315 FEXPORT(__bzero) 316 EXPORT_SYMBOL(__bzero) 317 #endif 318 __BUILD_BZERO LEGACY_MODE 319 320 #ifdef CONFIG_EVA 321 LEAF(__bzero) 322 EXPORT_SYMBOL(__bzero) 323 __BUILD_BZERO EVA_MODE 324 END(__bzero) 325 #endif
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.