1 /* 1 /* 2 * This file is subject to the terms and condi 2 * This file is subject to the terms and conditions of the GNU General Public 3 * License. See the file "COPYING" in the mai 3 * License. See the file "COPYING" in the main directory of this archive 4 * for more details. 4 * for more details. 5 * 5 * 6 * Copyright (C) 1998, 1999, 2000 by Ralf Baec 6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle 7 * Copyright (C) 1999, 2000 Silicon Graphics, 7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc. 8 * Copyright (C) 2007 by Maciej W. Rozycki !! 8 * Copyright (C) 2007 Maciej W. Rozycki 9 * Copyright (C) 2011, 2012 MIPS Technologies, << 10 */ 9 */ 11 #include <linux/export.h> << 12 #include <asm/asm.h> 10 #include <asm/asm.h> 13 #include <asm/asm-offsets.h> 11 #include <asm/asm-offsets.h> 14 #include <asm/regdef.h> 12 #include <asm/regdef.h> 15 13 16 #if LONGSIZE == 4 14 #if LONGSIZE == 4 17 #define LONG_S_L swl 15 #define LONG_S_L swl 18 #define LONG_S_R swr 16 #define LONG_S_R swr 19 #else 17 #else 20 #define LONG_S_L sdl 18 #define LONG_S_L sdl 21 #define LONG_S_R sdr 19 #define LONG_S_R sdr 22 #endif 20 #endif 23 21 24 #ifdef CONFIG_CPU_MICROMIPS << 25 #define STORSIZE (LONGSIZE * 2) << 26 #define STORMASK (STORSIZE - 1) << 27 #define FILL64RG t8 << 28 #define FILLPTRG t7 << 29 #undef LONG_S << 30 #define LONG_S LONG_SP << 31 #else << 32 #define STORSIZE LONGSIZE << 33 #define STORMASK LONGMASK << 34 #define FILL64RG a1 << 35 #define FILLPTRG t0 << 36 #endif << 37 << 38 #define LEGACY_MODE 1 << 39 #define EVA_MODE 2 << 40 << 41 /* << 42 * No need to protect it with EVA #ifdefery. T << 43 * will never be assembled if EVA is not enabl << 44 */ << 45 #define __EVAFY(insn, reg, addr) __BUILD_EVA_I << 46 #define ___BUILD_EVA_INSN(insn, reg, addr) __E << 47 << 48 #define EX(insn,reg,addr,handler) 22 #define EX(insn,reg,addr,handler) \ 49 .if \mode == LEGACY_MODE; !! 23 9: insn reg, addr; \ 50 9: insn reg, addr; !! 24 .section __ex_table,"a"; \ 51 .else; !! 25 PTR 9b, handler; \ 52 9: ___BUILD_EVA_INSN(insn, reg, a << 53 .endif; << 54 .section __ex_table,"a"; << 55 PTR_WD 9b, handler; << 56 .previous 26 .previous 57 27 58 .macro f_fill64 dst, offset, val, fix !! 28 .macro f_fill64 dst, offset, val, fixup 59 EX(LONG_S, \val, (\offset + 0 * STORS !! 29 EX(LONG_S, \val, (\offset + 0 * LONGSIZE)(\dst), \fixup) 60 EX(LONG_S, \val, (\offset + 1 * STORS !! 30 EX(LONG_S, \val, (\offset + 1 * LONGSIZE)(\dst), \fixup) 61 EX(LONG_S, \val, (\offset + 2 * STORS !! 31 EX(LONG_S, \val, (\offset + 2 * LONGSIZE)(\dst), \fixup) 62 EX(LONG_S, \val, (\offset + 3 * STORS !! 32 EX(LONG_S, \val, (\offset + 3 * LONGSIZE)(\dst), \fixup) 63 #if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSI !! 33 EX(LONG_S, \val, (\offset + 4 * LONGSIZE)(\dst), \fixup) 64 EX(LONG_S, \val, (\offset + 4 * STORS !! 34 EX(LONG_S, \val, (\offset + 5 * LONGSIZE)(\dst), \fixup) 65 EX(LONG_S, \val, (\offset + 5 * STORS !! 35 EX(LONG_S, \val, (\offset + 6 * LONGSIZE)(\dst), \fixup) 66 EX(LONG_S, \val, (\offset + 6 * STORS !! 36 EX(LONG_S, \val, (\offset + 7 * LONGSIZE)(\dst), \fixup) 67 EX(LONG_S, \val, (\offset + 7 * STORS !! 37 #if LONGSIZE == 4 68 #endif !! 38 EX(LONG_S, \val, (\offset + 8 * LONGSIZE)(\dst), \fixup) 69 #if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSI !! 39 EX(LONG_S, \val, (\offset + 9 * LONGSIZE)(\dst), \fixup) 70 EX(LONG_S, \val, (\offset + 8 * STORS !! 40 EX(LONG_S, \val, (\offset + 10 * LONGSIZE)(\dst), \fixup) 71 EX(LONG_S, \val, (\offset + 9 * STORS !! 41 EX(LONG_S, \val, (\offset + 11 * LONGSIZE)(\dst), \fixup) 72 EX(LONG_S, \val, (\offset + 10 * STORS !! 42 EX(LONG_S, \val, (\offset + 12 * LONGSIZE)(\dst), \fixup) 73 EX(LONG_S, \val, (\offset + 11 * STORS !! 43 EX(LONG_S, \val, (\offset + 13 * LONGSIZE)(\dst), \fixup) 74 EX(LONG_S, \val, (\offset + 12 * STORS !! 44 EX(LONG_S, \val, (\offset + 14 * LONGSIZE)(\dst), \fixup) 75 EX(LONG_S, \val, (\offset + 13 * STORS !! 45 EX(LONG_S, \val, (\offset + 15 * LONGSIZE)(\dst), \fixup) 76 EX(LONG_S, \val, (\offset + 14 * STORS << 77 EX(LONG_S, \val, (\offset + 15 * STORS << 78 #endif 46 #endif 79 .endm 47 .endm 80 48 >> 49 /* >> 50 * memset(void *s, int c, size_t n) >> 51 * >> 52 * a0: start of area to clear >> 53 * a1: char to fill with >> 54 * a2: size of area to clear >> 55 */ >> 56 .set noreorder 81 .align 5 57 .align 5 >> 58 LEAF(memset) >> 59 beqz a1, 1f >> 60 move v0, a0 /* result */ 82 61 83 /* !! 62 andi a1, 0xff /* spread fillword */ 84 * Macro to generate the __bzero{,_use !! 63 LONG_SLL t1, a1, 8 85 * Arguments: !! 64 or a1, t1 86 * mode: LEGACY_MODE or EVA_MODE !! 65 LONG_SLL t1, a1, 16 87 */ !! 66 #if LONGSIZE == 8 88 .macro __BUILD_BZERO mode !! 67 or a1, t1 89 /* Initialize __memset if this is the !! 68 LONG_SLL t1, a1, 32 90 .ifnotdef __memset !! 69 #endif 91 .set __memset, 1 !! 70 or a1, t1 92 .hidden __memset /* Make sure it does !! 71 1: 93 .endif << 94 72 95 sltiu t0, a2, STORSIZE !! 73 FEXPORT(__bzero) 96 .set noreorder !! 74 sltiu t0, a2, LONGSIZE /* very small region? */ 97 bnez t0, .Lsmall_memset\@ !! 75 bnez t0, .Lsmall_memset 98 andi t0, a0, STORMASK !! 76 andi t0, a0, LONGMASK /* aligned? */ 99 .set reorder << 100 77 101 #ifdef CONFIG_CPU_MICROMIPS << 102 move t8, a1 << 103 move t9, a1 << 104 #endif << 105 .set noreorder << 106 #ifndef CONFIG_CPU_DADDI_WORKAROUNDS 78 #ifndef CONFIG_CPU_DADDI_WORKAROUNDS 107 beqz t0, 1f 79 beqz t0, 1f 108 PTR_SUBU t0, STORSIZE !! 80 PTR_SUBU t0, LONGSIZE /* alignment in bytes */ 109 #else 81 #else 110 .set noat 82 .set noat 111 li AT, STORSIZE !! 83 li AT, LONGSIZE 112 beqz t0, 1f 84 beqz t0, 1f 113 PTR_SUBU t0, AT 85 PTR_SUBU t0, AT /* alignment in bytes */ 114 .set at 86 .set at 115 #endif 87 #endif 116 .set reorder << 117 88 118 #ifndef CONFIG_CPU_NO_LOAD_STORE_LR << 119 R10KCBARRIER(0(ra)) 89 R10KCBARRIER(0(ra)) 120 #ifdef __MIPSEB__ 90 #ifdef __MIPSEB__ 121 EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@ !! 91 EX(LONG_S_L, a1, (a0), .Lfirst_fixup) /* make word/dword aligned */ 122 #else !! 92 #endif 123 EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@ !! 93 #ifdef __MIPSEL__ >> 94 EX(LONG_S_R, a1, (a0), .Lfirst_fixup) /* make word/dword aligned */ 124 #endif 95 #endif 125 PTR_SUBU a0, t0 96 PTR_SUBU a0, t0 /* long align ptr */ 126 PTR_ADDU a2, t0 97 PTR_ADDU a2, t0 /* correct size */ 127 98 128 #else /* CONFIG_CPU_NO_LOAD_STORE_LR */ << 129 #define STORE_BYTE(N) << 130 EX(sb, a1, N(a0), .Lbyte_fixup\@); << 131 .set noreorder; << 132 beqz t0, 0f; << 133 PTR_ADDU t0, 1; << 134 .set reorder; << 135 << 136 PTR_ADDU a2, t0 << 137 PTR_ADDU t0, 1 << 138 STORE_BYTE(0) << 139 STORE_BYTE(1) << 140 #if LONGSIZE == 4 << 141 EX(sb, a1, 2(a0), .Lbyte_fixup\@) << 142 #else << 143 STORE_BYTE(2) << 144 STORE_BYTE(3) << 145 STORE_BYTE(4) << 146 STORE_BYTE(5) << 147 EX(sb, a1, 6(a0), .Lbyte_fixup\@) << 148 #endif << 149 0: << 150 ori a0, STORMASK << 151 xori a0, STORMASK << 152 PTR_ADDIU a0, STORSIZE << 153 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ << 154 1: ori t1, a2, 0x3f 99 1: ori t1, a2, 0x3f /* # of full blocks */ 155 xori t1, 0x3f 100 xori t1, 0x3f 156 andi t0, a2, 0x40-STORSIZE !! 101 beqz t1, .Lmemset_partial /* no block to fill */ 157 beqz t1, .Lmemset_partial\@ !! 102 andi t0, a2, 0x40-LONGSIZE 158 103 159 PTR_ADDU t1, a0 104 PTR_ADDU t1, a0 /* end address */ >> 105 .set reorder 160 1: PTR_ADDIU a0, 64 106 1: PTR_ADDIU a0, 64 161 R10KCBARRIER(0(ra)) 107 R10KCBARRIER(0(ra)) 162 f_fill64 a0, -64, FILL64RG, .Lfwd_fixu !! 108 f_fill64 a0, -64, a1, .Lfwd_fixup 163 bne t1, a0, 1b 109 bne t1, a0, 1b >> 110 .set noreorder 164 111 165 .Lmemset_partial\@: !! 112 .Lmemset_partial: 166 R10KCBARRIER(0(ra)) 113 R10KCBARRIER(0(ra)) 167 PTR_LA t1, 2f 114 PTR_LA t1, 2f /* where to start */ 168 #ifdef CONFIG_CPU_MICROMIPS << 169 LONG_SRL t7, t0, 1 << 170 #endif << 171 #if LONGSIZE == 4 115 #if LONGSIZE == 4 172 PTR_SUBU t1, FILLPTRG !! 116 PTR_SUBU t1, t0 173 #else 117 #else 174 .set noat 118 .set noat 175 LONG_SRL AT, FILLPTRG, 1 !! 119 LONG_SRL AT, t0, 1 176 PTR_SUBU t1, AT 120 PTR_SUBU t1, AT 177 .set at 121 .set at 178 #endif 122 #endif 179 PTR_ADDU a0, t0 << 180 jr t1 123 jr t1 >> 124 PTR_ADDU a0, t0 /* dest ptr */ 181 125 182 /* ... but first do longs ... */ !! 126 .set push 183 f_fill64 a0, -64, FILL64RG, .Lpartial_ << 184 2: andi a2, STORMASK << 185 << 186 .set noreorder 127 .set noreorder >> 128 .set nomacro >> 129 f_fill64 a0, -64, a1, .Lpartial_fixup /* ... but first do longs ... */ >> 130 2: .set pop >> 131 andi a2, LONGMASK /* At most one long to go */ >> 132 187 beqz a2, 1f 133 beqz a2, 1f 188 #ifndef CONFIG_CPU_NO_LOAD_STORE_LR << 189 PTR_ADDU a0, a2 134 PTR_ADDU a0, a2 /* What's left */ 190 .set reorder << 191 R10KCBARRIER(0(ra)) 135 R10KCBARRIER(0(ra)) 192 #ifdef __MIPSEB__ 136 #ifdef __MIPSEB__ 193 EX(LONG_S_R, a1, -1(a0), .Llast_fixup\ !! 137 EX(LONG_S_R, a1, -1(a0), .Llast_fixup) 194 #else << 195 EX(LONG_S_L, a1, -1(a0), .Llast_fixup\ << 196 #endif 138 #endif 197 #else /* CONFIG_CPU_NO_LOAD_STORE_LR */ !! 139 #ifdef __MIPSEL__ 198 PTR_SUBU t0, $0, a2 !! 140 EX(LONG_S_L, a1, -1(a0), .Llast_fixup) 199 .set reorder << 200 move a2, zero << 201 PTR_ADDIU t0, 1 << 202 STORE_BYTE(0) << 203 STORE_BYTE(1) << 204 #if LONGSIZE == 4 << 205 EX(sb, a1, 2(a0), .Lbyte_fixup\@) << 206 #else << 207 STORE_BYTE(2) << 208 STORE_BYTE(3) << 209 STORE_BYTE(4) << 210 STORE_BYTE(5) << 211 EX(sb, a1, 6(a0), .Lbyte_fixup\@) << 212 #endif 141 #endif 213 0: !! 142 1: jr ra 214 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ !! 143 move a2, zero 215 1: move a2, zero << 216 jr ra << 217 144 218 .Lsmall_memset\@: !! 145 .Lsmall_memset: 219 PTR_ADDU t1, a0, a2 << 220 beqz a2, 2f 146 beqz a2, 2f >> 147 PTR_ADDU t1, a0, a2 221 148 222 1: PTR_ADDIU a0, 1 149 1: PTR_ADDIU a0, 1 /* fill bytewise */ 223 R10KCBARRIER(0(ra)) 150 R10KCBARRIER(0(ra)) 224 .set noreorder << 225 bne t1, a0, 1b 151 bne t1, a0, 1b 226 EX(sb, a1, -1(a0), .Lsmall_fixup\@) !! 152 sb a1, -1(a0) 227 .set reorder << 228 153 229 2: move a2, zero !! 154 2: jr ra /* done */ 230 jr ra !! 155 move a2, zero 231 .if __memset == 1 << 232 END(memset) 156 END(memset) 233 .set __memset, 0 << 234 .hidden __memset << 235 .endif << 236 << 237 #ifdef CONFIG_CPU_NO_LOAD_STORE_LR << 238 .Lbyte_fixup\@: << 239 /* << 240 * unset_bytes = (#bytes - (#unaligned << 241 * a2 = a2 << 242 */ << 243 PTR_SUBU a2, t0 << 244 PTR_ADDIU a2, 1 << 245 jr ra << 246 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ << 247 157 248 .Lfirst_fixup\@: !! 158 .Lfirst_fixup: 249 /* unset_bytes already in a2 */ << 250 jr ra 159 jr ra >> 160 nop 251 161 252 .Lfwd_fixup\@: !! 162 .Lfwd_fixup: 253 /* << 254 * unset_bytes = partial_start_addr + << 255 * a2 = t1 + << 256 */ << 257 PTR_L t0, TI_TASK($28) 163 PTR_L t0, TI_TASK($28) 258 andi a2, 0x3f << 259 LONG_L t0, THREAD_BUADDR(t0) 164 LONG_L t0, THREAD_BUADDR(t0) >> 165 andi a2, 0x3f 260 LONG_ADDU a2, t1 166 LONG_ADDU a2, t1 261 LONG_SUBU a2, t0 << 262 jr ra 167 jr ra >> 168 LONG_SUBU a2, t0 263 169 264 .Lpartial_fixup\@: !! 170 .Lpartial_fixup: 265 /* << 266 * unset_bytes = partial_end_addr + << 267 * a2 = a0 + (a << 268 */ << 269 PTR_L t0, TI_TASK($28) 171 PTR_L t0, TI_TASK($28) 270 andi a2, STORMASK << 271 LONG_L t0, THREAD_BUADDR(t0) 172 LONG_L t0, THREAD_BUADDR(t0) 272 LONG_ADDU a2, a0 !! 173 andi a2, LONGMASK 273 LONG_SUBU a2, t0 !! 174 LONG_ADDU a2, t1 274 jr ra << 275 << 276 .Llast_fixup\@: << 277 /* unset_bytes already in a2 */ << 278 jr ra 175 jr ra >> 176 LONG_SUBU a2, t0 279 177 280 .Lsmall_fixup\@: !! 178 .Llast_fixup: 281 /* << 282 * unset_bytes = end_addr - current_ad << 283 * a2 = t1 - a0 << 284 */ << 285 PTR_SUBU a2, t1, a0 << 286 PTR_ADDIU a2, 1 << 287 jr ra 179 jr ra 288 !! 180 andi v1, a2, LONGMASK 289 .endm << 290 << 291 /* << 292 * memset(void *s, int c, size_t n) << 293 * << 294 * a0: start of area to clear << 295 * a1: char to fill with << 296 * a2: size of area to clear << 297 */ << 298 << 299 LEAF(memset) << 300 EXPORT_SYMBOL(memset) << 301 move v0, a0 << 302 beqz a1, 1f << 303 << 304 andi a1, 0xff << 305 LONG_SLL t1, a1, 8 << 306 or a1, t1 << 307 LONG_SLL t1, a1, 16 << 308 #if LONGSIZE == 8 << 309 or a1, t1 << 310 LONG_SLL t1, a1, 32 << 311 #endif << 312 or a1, t1 << 313 1: << 314 #ifndef CONFIG_EVA << 315 FEXPORT(__bzero) << 316 EXPORT_SYMBOL(__bzero) << 317 #endif << 318 __BUILD_BZERO LEGACY_MODE << 319 << 320 #ifdef CONFIG_EVA << 321 LEAF(__bzero) << 322 EXPORT_SYMBOL(__bzero) << 323 __BUILD_BZERO EVA_MODE << 324 END(__bzero) << 325 #endif <<
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.