1 /* 1 /* 2 * This file is subject to the terms and condi 2 * This file is subject to the terms and conditions of the GNU General Public 3 * License. See the file "COPYING" in the mai 3 * License. See the file "COPYING" in the main directory of this archive 4 * for more details. 4 * for more details. 5 * 5 * 6 * Copyright (C) 1998, 1999, 2000 by Ralf Baec 6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle 7 * Copyright (C) 1999, 2000 Silicon Graphics, 7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc. 8 * Copyright (C) 2007 by Maciej W. Rozycki 8 * Copyright (C) 2007 by Maciej W. Rozycki 9 * Copyright (C) 2011, 2012 MIPS Technologies, 9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc. 10 */ 10 */ 11 #include <linux/export.h> << 12 #include <asm/asm.h> 11 #include <asm/asm.h> 13 #include <asm/asm-offsets.h> 12 #include <asm/asm-offsets.h> >> 13 #include <asm/export.h> 14 #include <asm/regdef.h> 14 #include <asm/regdef.h> 15 15 16 #if LONGSIZE == 4 16 #if LONGSIZE == 4 17 #define LONG_S_L swl 17 #define LONG_S_L swl 18 #define LONG_S_R swr 18 #define LONG_S_R swr 19 #else 19 #else 20 #define LONG_S_L sdl 20 #define LONG_S_L sdl 21 #define LONG_S_R sdr 21 #define LONG_S_R sdr 22 #endif 22 #endif 23 23 24 #ifdef CONFIG_CPU_MICROMIPS 24 #ifdef CONFIG_CPU_MICROMIPS 25 #define STORSIZE (LONGSIZE * 2) 25 #define STORSIZE (LONGSIZE * 2) 26 #define STORMASK (STORSIZE - 1) 26 #define STORMASK (STORSIZE - 1) 27 #define FILL64RG t8 27 #define FILL64RG t8 28 #define FILLPTRG t7 28 #define FILLPTRG t7 29 #undef LONG_S 29 #undef LONG_S 30 #define LONG_S LONG_SP 30 #define LONG_S LONG_SP 31 #else 31 #else 32 #define STORSIZE LONGSIZE 32 #define STORSIZE LONGSIZE 33 #define STORMASK LONGMASK 33 #define STORMASK LONGMASK 34 #define FILL64RG a1 34 #define FILL64RG a1 35 #define FILLPTRG t0 35 #define FILLPTRG t0 36 #endif 36 #endif 37 37 38 #define LEGACY_MODE 1 38 #define LEGACY_MODE 1 39 #define EVA_MODE 2 39 #define EVA_MODE 2 40 40 41 /* 41 /* 42 * No need to protect it with EVA #ifdefery. T 42 * No need to protect it with EVA #ifdefery. The generated block of code 43 * will never be assembled if EVA is not enabl 43 * will never be assembled if EVA is not enabled. 44 */ 44 */ 45 #define __EVAFY(insn, reg, addr) __BUILD_EVA_I 45 #define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr) 46 #define ___BUILD_EVA_INSN(insn, reg, addr) __E 46 #define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr) 47 47 48 #define EX(insn,reg,addr,handler) 48 #define EX(insn,reg,addr,handler) \ 49 .if \mode == LEGACY_MODE; 49 .if \mode == LEGACY_MODE; \ 50 9: insn reg, addr; 50 9: insn reg, addr; \ 51 .else; 51 .else; \ 52 9: ___BUILD_EVA_INSN(insn, reg, a 52 9: ___BUILD_EVA_INSN(insn, reg, addr); \ 53 .endif; 53 .endif; \ 54 .section __ex_table,"a"; 54 .section __ex_table,"a"; \ 55 PTR_WD 9b, handler; !! 55 PTR 9b, handler; \ 56 .previous 56 .previous 57 57 58 .macro f_fill64 dst, offset, val, fix 58 .macro f_fill64 dst, offset, val, fixup, mode 59 EX(LONG_S, \val, (\offset + 0 * STORS 59 EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup) 60 EX(LONG_S, \val, (\offset + 1 * STORS 60 EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup) 61 EX(LONG_S, \val, (\offset + 2 * STORS 61 EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup) 62 EX(LONG_S, \val, (\offset + 3 * STORS 62 EX(LONG_S, \val, (\offset + 3 * STORSIZE)(\dst), \fixup) 63 #if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSI 63 #if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS)) 64 EX(LONG_S, \val, (\offset + 4 * STORS 64 EX(LONG_S, \val, (\offset + 4 * STORSIZE)(\dst), \fixup) 65 EX(LONG_S, \val, (\offset + 5 * STORS 65 EX(LONG_S, \val, (\offset + 5 * STORSIZE)(\dst), \fixup) 66 EX(LONG_S, \val, (\offset + 6 * STORS 66 EX(LONG_S, \val, (\offset + 6 * STORSIZE)(\dst), \fixup) 67 EX(LONG_S, \val, (\offset + 7 * STORS 67 EX(LONG_S, \val, (\offset + 7 * STORSIZE)(\dst), \fixup) 68 #endif 68 #endif 69 #if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSI 69 #if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) 70 EX(LONG_S, \val, (\offset + 8 * STORS 70 EX(LONG_S, \val, (\offset + 8 * STORSIZE)(\dst), \fixup) 71 EX(LONG_S, \val, (\offset + 9 * STORS 71 EX(LONG_S, \val, (\offset + 9 * STORSIZE)(\dst), \fixup) 72 EX(LONG_S, \val, (\offset + 10 * STORS 72 EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup) 73 EX(LONG_S, \val, (\offset + 11 * STORS 73 EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup) 74 EX(LONG_S, \val, (\offset + 12 * STORS 74 EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup) 75 EX(LONG_S, \val, (\offset + 13 * STORS 75 EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup) 76 EX(LONG_S, \val, (\offset + 14 * STORS 76 EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup) 77 EX(LONG_S, \val, (\offset + 15 * STORS 77 EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup) 78 #endif 78 #endif 79 .endm 79 .endm 80 80 >> 81 .set noreorder 81 .align 5 82 .align 5 82 83 83 /* 84 /* 84 * Macro to generate the __bzero{,_use 85 * Macro to generate the __bzero{,_user} symbol 85 * Arguments: 86 * Arguments: 86 * mode: LEGACY_MODE or EVA_MODE 87 * mode: LEGACY_MODE or EVA_MODE 87 */ 88 */ 88 .macro __BUILD_BZERO mode 89 .macro __BUILD_BZERO mode 89 /* Initialize __memset if this is the 90 /* Initialize __memset if this is the first time we call this macro */ 90 .ifnotdef __memset 91 .ifnotdef __memset 91 .set __memset, 1 92 .set __memset, 1 92 .hidden __memset /* Make sure it does 93 .hidden __memset /* Make sure it does not leak */ 93 .endif 94 .endif 94 95 95 sltiu t0, a2, STORSIZE 96 sltiu t0, a2, STORSIZE /* very small region? */ 96 .set noreorder << 97 bnez t0, .Lsmall_memset\@ 97 bnez t0, .Lsmall_memset\@ 98 andi t0, a0, STORMASK !! 98 andi t0, a0, STORMASK /* aligned? */ 99 .set reorder << 100 99 101 #ifdef CONFIG_CPU_MICROMIPS 100 #ifdef CONFIG_CPU_MICROMIPS 102 move t8, a1 101 move t8, a1 /* used by 'swp' instruction */ 103 move t9, a1 102 move t9, a1 104 #endif 103 #endif 105 .set noreorder << 106 #ifndef CONFIG_CPU_DADDI_WORKAROUNDS 104 #ifndef CONFIG_CPU_DADDI_WORKAROUNDS 107 beqz t0, 1f 105 beqz t0, 1f 108 PTR_SUBU t0, STORSIZE !! 106 PTR_SUBU t0, STORSIZE /* alignment in bytes */ 109 #else 107 #else 110 .set noat 108 .set noat 111 li AT, STORSIZE 109 li AT, STORSIZE 112 beqz t0, 1f 110 beqz t0, 1f 113 PTR_SUBU t0, AT !! 111 PTR_SUBU t0, AT /* alignment in bytes */ 114 .set at 112 .set at 115 #endif 113 #endif 116 .set reorder << 117 114 118 #ifndef CONFIG_CPU_NO_LOAD_STORE_LR !! 115 #ifndef CONFIG_CPU_MIPSR6 119 R10KCBARRIER(0(ra)) 116 R10KCBARRIER(0(ra)) 120 #ifdef __MIPSEB__ 117 #ifdef __MIPSEB__ 121 EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@ 118 EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */ 122 #else 119 #else 123 EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@ 120 EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */ 124 #endif 121 #endif 125 PTR_SUBU a0, t0 122 PTR_SUBU a0, t0 /* long align ptr */ 126 PTR_ADDU a2, t0 123 PTR_ADDU a2, t0 /* correct size */ 127 124 128 #else /* CONFIG_CPU_NO_LOAD_STORE_LR */ !! 125 #else /* CONFIG_CPU_MIPSR6 */ 129 #define STORE_BYTE(N) 126 #define STORE_BYTE(N) \ 130 EX(sb, a1, N(a0), .Lbyte_fixup\@); 127 EX(sb, a1, N(a0), .Lbyte_fixup\@); \ 131 .set noreorder; << 132 beqz t0, 0f; 128 beqz t0, 0f; \ 133 PTR_ADDU t0, 1; !! 129 PTR_ADDU t0, 1; 134 .set reorder; << 135 130 136 PTR_ADDU a2, t0 131 PTR_ADDU a2, t0 /* correct size */ 137 PTR_ADDU t0, 1 132 PTR_ADDU t0, 1 138 STORE_BYTE(0) 133 STORE_BYTE(0) 139 STORE_BYTE(1) 134 STORE_BYTE(1) 140 #if LONGSIZE == 4 135 #if LONGSIZE == 4 141 EX(sb, a1, 2(a0), .Lbyte_fixup\@) 136 EX(sb, a1, 2(a0), .Lbyte_fixup\@) 142 #else 137 #else 143 STORE_BYTE(2) 138 STORE_BYTE(2) 144 STORE_BYTE(3) 139 STORE_BYTE(3) 145 STORE_BYTE(4) 140 STORE_BYTE(4) 146 STORE_BYTE(5) 141 STORE_BYTE(5) 147 EX(sb, a1, 6(a0), .Lbyte_fixup\@) 142 EX(sb, a1, 6(a0), .Lbyte_fixup\@) 148 #endif 143 #endif 149 0: 144 0: 150 ori a0, STORMASK 145 ori a0, STORMASK 151 xori a0, STORMASK 146 xori a0, STORMASK 152 PTR_ADDIU a0, STORSIZE 147 PTR_ADDIU a0, STORSIZE 153 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ !! 148 #endif /* CONFIG_CPU_MIPSR6 */ 154 1: ori t1, a2, 0x3f 149 1: ori t1, a2, 0x3f /* # of full blocks */ 155 xori t1, 0x3f 150 xori t1, 0x3f 156 andi t0, a2, 0x40-STORSIZE << 157 beqz t1, .Lmemset_partial\@ 151 beqz t1, .Lmemset_partial\@ /* no block to fill */ >> 152 andi t0, a2, 0x40-STORSIZE 158 153 159 PTR_ADDU t1, a0 154 PTR_ADDU t1, a0 /* end address */ >> 155 .set reorder 160 1: PTR_ADDIU a0, 64 156 1: PTR_ADDIU a0, 64 161 R10KCBARRIER(0(ra)) 157 R10KCBARRIER(0(ra)) 162 f_fill64 a0, -64, FILL64RG, .Lfwd_fixu 158 f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode 163 bne t1, a0, 1b 159 bne t1, a0, 1b >> 160 .set noreorder 164 161 165 .Lmemset_partial\@: 162 .Lmemset_partial\@: 166 R10KCBARRIER(0(ra)) 163 R10KCBARRIER(0(ra)) 167 PTR_LA t1, 2f 164 PTR_LA t1, 2f /* where to start */ 168 #ifdef CONFIG_CPU_MICROMIPS 165 #ifdef CONFIG_CPU_MICROMIPS 169 LONG_SRL t7, t0, 1 166 LONG_SRL t7, t0, 1 170 #endif 167 #endif 171 #if LONGSIZE == 4 168 #if LONGSIZE == 4 172 PTR_SUBU t1, FILLPTRG 169 PTR_SUBU t1, FILLPTRG 173 #else 170 #else 174 .set noat 171 .set noat 175 LONG_SRL AT, FILLPTRG, 1 172 LONG_SRL AT, FILLPTRG, 1 176 PTR_SUBU t1, AT 173 PTR_SUBU t1, AT 177 .set at 174 .set at 178 #endif 175 #endif 179 PTR_ADDU a0, t0 << 180 jr t1 176 jr t1 >> 177 PTR_ADDU a0, t0 /* dest ptr */ 181 178 >> 179 .set push >> 180 .set noreorder >> 181 .set nomacro 182 /* ... but first do longs ... */ 182 /* ... but first do longs ... */ 183 f_fill64 a0, -64, FILL64RG, .Lpartial_ 183 f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode 184 2: andi a2, STORMASK !! 184 2: .set pop >> 185 andi a2, STORMASK /* At most one long to go */ 185 186 186 .set noreorder << 187 beqz a2, 1f 187 beqz a2, 1f 188 #ifndef CONFIG_CPU_NO_LOAD_STORE_LR !! 188 #ifndef CONFIG_CPU_MIPSR6 189 PTR_ADDU a0, a2 !! 189 PTR_ADDU a0, a2 /* What's left */ 190 .set reorder << 191 R10KCBARRIER(0(ra)) 190 R10KCBARRIER(0(ra)) 192 #ifdef __MIPSEB__ 191 #ifdef __MIPSEB__ 193 EX(LONG_S_R, a1, -1(a0), .Llast_fixup\ 192 EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@) 194 #else 193 #else 195 EX(LONG_S_L, a1, -1(a0), .Llast_fixup\ 194 EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@) 196 #endif 195 #endif 197 #else /* CONFIG_CPU_NO_LOAD_STORE_LR */ !! 196 #else 198 PTR_SUBU t0, $0, a2 !! 197 PTR_SUBU t0, $0, a2 199 .set reorder << 200 move a2, zero << 201 PTR_ADDIU t0, 1 198 PTR_ADDIU t0, 1 202 STORE_BYTE(0) 199 STORE_BYTE(0) 203 STORE_BYTE(1) 200 STORE_BYTE(1) 204 #if LONGSIZE == 4 201 #if LONGSIZE == 4 205 EX(sb, a1, 2(a0), .Lbyte_fixup\@) 202 EX(sb, a1, 2(a0), .Lbyte_fixup\@) 206 #else 203 #else 207 STORE_BYTE(2) 204 STORE_BYTE(2) 208 STORE_BYTE(3) 205 STORE_BYTE(3) 209 STORE_BYTE(4) 206 STORE_BYTE(4) 210 STORE_BYTE(5) 207 STORE_BYTE(5) 211 EX(sb, a1, 6(a0), .Lbyte_fixup\@) 208 EX(sb, a1, 6(a0), .Lbyte_fixup\@) 212 #endif 209 #endif 213 0: 210 0: 214 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ !! 211 #endif 215 1: move a2, zero !! 212 1: jr ra 216 jr ra !! 213 move a2, zero 217 214 218 .Lsmall_memset\@: 215 .Lsmall_memset\@: 219 PTR_ADDU t1, a0, a2 << 220 beqz a2, 2f 216 beqz a2, 2f >> 217 PTR_ADDU t1, a0, a2 221 218 222 1: PTR_ADDIU a0, 1 219 1: PTR_ADDIU a0, 1 /* fill bytewise */ 223 R10KCBARRIER(0(ra)) 220 R10KCBARRIER(0(ra)) 224 .set noreorder << 225 bne t1, a0, 1b 221 bne t1, a0, 1b 226 EX(sb, a1, -1(a0), .Lsmall_fixup\@) 222 EX(sb, a1, -1(a0), .Lsmall_fixup\@) 227 .set reorder << 228 223 229 2: move a2, zero !! 224 2: jr ra /* done */ 230 jr ra !! 225 move a2, zero 231 .if __memset == 1 226 .if __memset == 1 232 END(memset) 227 END(memset) 233 .set __memset, 0 228 .set __memset, 0 234 .hidden __memset 229 .hidden __memset 235 .endif 230 .endif 236 231 237 #ifdef CONFIG_CPU_NO_LOAD_STORE_LR !! 232 #ifdef CONFIG_CPU_MIPSR6 238 .Lbyte_fixup\@: 233 .Lbyte_fixup\@: 239 /* !! 234 PTR_SUBU a2, $0, t0 240 * unset_bytes = (#bytes - (#unaligned << 241 * a2 = a2 << 242 */ << 243 PTR_SUBU a2, t0 << 244 PTR_ADDIU a2, 1 << 245 jr ra 235 jr ra 246 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ !! 236 PTR_ADDIU a2, 1 >> 237 #endif /* CONFIG_CPU_MIPSR6 */ 247 238 248 .Lfirst_fixup\@: 239 .Lfirst_fixup\@: 249 /* unset_bytes already in a2 */ << 250 jr ra 240 jr ra >> 241 nop 251 242 252 .Lfwd_fixup\@: 243 .Lfwd_fixup\@: 253 /* << 254 * unset_bytes = partial_start_addr + << 255 * a2 = t1 + << 256 */ << 257 PTR_L t0, TI_TASK($28) 244 PTR_L t0, TI_TASK($28) 258 andi a2, 0x3f 245 andi a2, 0x3f 259 LONG_L t0, THREAD_BUADDR(t0) 246 LONG_L t0, THREAD_BUADDR(t0) 260 LONG_ADDU a2, t1 247 LONG_ADDU a2, t1 261 LONG_SUBU a2, t0 << 262 jr ra 248 jr ra >> 249 LONG_SUBU a2, t0 263 250 264 .Lpartial_fixup\@: 251 .Lpartial_fixup\@: 265 /* << 266 * unset_bytes = partial_end_addr + << 267 * a2 = a0 + (a << 268 */ << 269 PTR_L t0, TI_TASK($28) 252 PTR_L t0, TI_TASK($28) 270 andi a2, STORMASK 253 andi a2, STORMASK 271 LONG_L t0, THREAD_BUADDR(t0) 254 LONG_L t0, THREAD_BUADDR(t0) 272 LONG_ADDU a2, a0 255 LONG_ADDU a2, a0 273 LONG_SUBU a2, t0 << 274 jr ra 256 jr ra >> 257 LONG_SUBU a2, t0 275 258 276 .Llast_fixup\@: 259 .Llast_fixup\@: 277 /* unset_bytes already in a2 */ << 278 jr ra 260 jr ra >> 261 nop 279 262 280 .Lsmall_fixup\@: 263 .Lsmall_fixup\@: 281 /* << 282 * unset_bytes = end_addr - current_ad << 283 * a2 = t1 - a0 << 284 */ << 285 PTR_SUBU a2, t1, a0 264 PTR_SUBU a2, t1, a0 286 PTR_ADDIU a2, 1 << 287 jr ra 265 jr ra >> 266 PTR_ADDIU a2, 1 288 267 289 .endm 268 .endm 290 269 291 /* 270 /* 292 * memset(void *s, int c, size_t n) 271 * memset(void *s, int c, size_t n) 293 * 272 * 294 * a0: start of area to clear 273 * a0: start of area to clear 295 * a1: char to fill with 274 * a1: char to fill with 296 * a2: size of area to clear 275 * a2: size of area to clear 297 */ 276 */ 298 277 299 LEAF(memset) 278 LEAF(memset) 300 EXPORT_SYMBOL(memset) 279 EXPORT_SYMBOL(memset) 301 move v0, a0 << 302 beqz a1, 1f 280 beqz a1, 1f >> 281 move v0, a0 /* result */ 303 282 304 andi a1, 0xff 283 andi a1, 0xff /* spread fillword */ 305 LONG_SLL t1, a1, 8 284 LONG_SLL t1, a1, 8 306 or a1, t1 285 or a1, t1 307 LONG_SLL t1, a1, 16 286 LONG_SLL t1, a1, 16 308 #if LONGSIZE == 8 287 #if LONGSIZE == 8 309 or a1, t1 288 or a1, t1 310 LONG_SLL t1, a1, 32 289 LONG_SLL t1, a1, 32 311 #endif 290 #endif 312 or a1, t1 291 or a1, t1 313 1: 292 1: 314 #ifndef CONFIG_EVA 293 #ifndef CONFIG_EVA 315 FEXPORT(__bzero) 294 FEXPORT(__bzero) 316 EXPORT_SYMBOL(__bzero) 295 EXPORT_SYMBOL(__bzero) >> 296 #else >> 297 FEXPORT(__bzero_kernel) >> 298 EXPORT_SYMBOL(__bzero_kernel) 317 #endif 299 #endif 318 __BUILD_BZERO LEGACY_MODE 300 __BUILD_BZERO LEGACY_MODE 319 301 320 #ifdef CONFIG_EVA 302 #ifdef CONFIG_EVA 321 LEAF(__bzero) 303 LEAF(__bzero) 322 EXPORT_SYMBOL(__bzero) 304 EXPORT_SYMBOL(__bzero) 323 __BUILD_BZERO EVA_MODE 305 __BUILD_BZERO EVA_MODE 324 END(__bzero) 306 END(__bzero) 325 #endif 307 #endif
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.