1 /* 1 /* 2 * This file is subject to the terms and condi 2 * This file is subject to the terms and conditions of the GNU General Public 3 * License. See the file "COPYING" in the mai 3 * License. See the file "COPYING" in the main directory of this archive 4 * for more details. 4 * for more details. 5 * 5 * 6 * Copyright (C) 1998, 1999, 2000 by Ralf Baec !! 6 * Copyright (C) 1998 by Ralf Baechle 7 * Copyright (C) 1999, 2000 Silicon Graphics, << 8 * Copyright (C) 2007 by Maciej W. Rozycki << 9 * Copyright (C) 2011, 2012 MIPS Technologies, << 10 */ 7 */ 11 #include <linux/export.h> << 12 #include <asm/asm.h> 8 #include <asm/asm.h> 13 #include <asm/asm-offsets.h> !! 9 #include <asm/offset.h> 14 #include <asm/regdef.h> 10 #include <asm/regdef.h> 15 11 16 #if LONGSIZE == 4 !! 12 #define EX(insn,reg,addr,handler) \ 17 #define LONG_S_L swl !! 13 9: insn reg, addr; \ 18 #define LONG_S_R swr !! 14 .section __ex_table,"a"; \ 19 #else !! 15 PTR 9b, handler; \ 20 #define LONG_S_L sdl !! 16 .previous 21 #define LONG_S_R sdr << 22 #endif << 23 << 24 #ifdef CONFIG_CPU_MICROMIPS << 25 #define STORSIZE (LONGSIZE * 2) << 26 #define STORMASK (STORSIZE - 1) << 27 #define FILL64RG t8 << 28 #define FILLPTRG t7 << 29 #undef LONG_S << 30 #define LONG_S LONG_SP << 31 #else << 32 #define STORSIZE LONGSIZE << 33 #define STORMASK LONGMASK << 34 #define FILL64RG a1 << 35 #define FILLPTRG t0 << 36 #endif << 37 17 38 #define LEGACY_MODE 1 !! 18 #define F_FILL64(dst, offset, val, fixup) \ 39 #define EVA_MODE 2 !! 19 EX(sw, val, (offset + 0x00)(dst), fixup); \ >> 20 EX(sw, val, (offset + 0x04)(dst), fixup); \ >> 21 EX(sw, val, (offset + 0x08)(dst), fixup); \ >> 22 EX(sw, val, (offset + 0x0c)(dst), fixup); \ >> 23 EX(sw, val, (offset + 0x10)(dst), fixup); \ >> 24 EX(sw, val, (offset + 0x14)(dst), fixup); \ >> 25 EX(sw, val, (offset + 0x18)(dst), fixup); \ >> 26 EX(sw, val, (offset + 0x1c)(dst), fixup); \ >> 27 EX(sw, val, (offset + 0x20)(dst), fixup); \ >> 28 EX(sw, val, (offset + 0x24)(dst), fixup); \ >> 29 EX(sw, val, (offset + 0x28)(dst), fixup); \ >> 30 EX(sw, val, (offset + 0x2c)(dst), fixup); \ >> 31 EX(sw, val, (offset + 0x30)(dst), fixup); \ >> 32 EX(sw, val, (offset + 0x34)(dst), fixup); \ >> 33 EX(sw, val, (offset + 0x38)(dst), fixup); \ >> 34 EX(sw, val, (offset + 0x3c)(dst), fixup) 40 35 41 /* 36 /* 42 * No need to protect it with EVA #ifdefery. T !! 37 * memset(void *s, int c, size_t n) 43 * will never be assembled if EVA is not enabl !! 38 * >> 39 * a0: start of area to clear >> 40 * a1: char to fill with >> 41 * a2: size of area to clear 44 */ 42 */ 45 #define __EVAFY(insn, reg, addr) __BUILD_EVA_I !! 43 .set noreorder 46 #define ___BUILD_EVA_INSN(insn, reg, addr) __E !! 44 .align 5 47 !! 45 LEAF(memset) 48 #define EX(insn,reg,addr,handler) !! 46 beqz a1, 1f 49 .if \mode == LEGACY_MODE; !! 47 move v0, a0 /* result */ 50 9: insn reg, addr; << 51 .else; << 52 9: ___BUILD_EVA_INSN(insn, reg, a << 53 .endif; << 54 .section __ex_table,"a"; << 55 PTR_WD 9b, handler; << 56 .previous << 57 48 58 .macro f_fill64 dst, offset, val, fix !! 49 andi a1, 0xff /* spread fillword */ 59 EX(LONG_S, \val, (\offset + 0 * STORS !! 50 sll t1, a1, 8 60 EX(LONG_S, \val, (\offset + 1 * STORS !! 51 or a1, t1 61 EX(LONG_S, \val, (\offset + 2 * STORS !! 52 sll t1, a1, 16 62 EX(LONG_S, \val, (\offset + 3 * STORS !! 53 or a1, t1 63 #if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSI !! 54 1: 64 EX(LONG_S, \val, (\offset + 4 * STORS << 65 EX(LONG_S, \val, (\offset + 5 * STORS << 66 EX(LONG_S, \val, (\offset + 6 * STORS << 67 EX(LONG_S, \val, (\offset + 7 * STORS << 68 #endif << 69 #if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSI << 70 EX(LONG_S, \val, (\offset + 8 * STORS << 71 EX(LONG_S, \val, (\offset + 9 * STORS << 72 EX(LONG_S, \val, (\offset + 10 * STORS << 73 EX(LONG_S, \val, (\offset + 11 * STORS << 74 EX(LONG_S, \val, (\offset + 12 * STORS << 75 EX(LONG_S, \val, (\offset + 13 * STORS << 76 EX(LONG_S, \val, (\offset + 14 * STORS << 77 EX(LONG_S, \val, (\offset + 15 * STORS << 78 #endif << 79 .endm << 80 55 81 .align 5 !! 56 EXPORT(__bzero) >> 57 sltiu t0, a2, 4 /* very small region? */ >> 58 bnez t0, small_memset >> 59 andi t0, a0, 3 /* aligned? */ 82 60 83 /* !! 61 beqz t0, 1f 84 * Macro to generate the __bzero{,_use !! 62 subu t0, 4 /* alignment in bytes */ 85 * Arguments: << 86 * mode: LEGACY_MODE or EVA_MODE << 87 */ << 88 .macro __BUILD_BZERO mode << 89 /* Initialize __memset if this is the << 90 .ifnotdef __memset << 91 .set __memset, 1 << 92 .hidden __memset /* Make sure it does << 93 .endif << 94 << 95 sltiu t0, a2, STORSIZE << 96 .set noreorder << 97 bnez t0, .Lsmall_memset\@ << 98 andi t0, a0, STORMASK << 99 .set reorder << 100 << 101 #ifdef CONFIG_CPU_MICROMIPS << 102 move t8, a1 << 103 move t9, a1 << 104 #endif << 105 .set noreorder << 106 #ifndef CONFIG_CPU_DADDI_WORKAROUNDS << 107 beqz t0, 1f << 108 PTR_SUBU t0, STORSIZE << 109 #else << 110 .set noat << 111 li AT, STORSIZE << 112 beqz t0, 1f << 113 PTR_SUBU t0, AT << 114 .set at << 115 #endif << 116 .set reorder << 117 63 118 #ifndef CONFIG_CPU_NO_LOAD_STORE_LR << 119 R10KCBARRIER(0(ra)) << 120 #ifdef __MIPSEB__ 64 #ifdef __MIPSEB__ 121 EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@ !! 65 EX(swl, a1, (a0), first_fixup) /* make word aligned */ 122 #else << 123 EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@ << 124 #endif << 125 PTR_SUBU a0, t0 << 126 PTR_ADDU a2, t0 << 127 << 128 #else /* CONFIG_CPU_NO_LOAD_STORE_LR */ << 129 #define STORE_BYTE(N) << 130 EX(sb, a1, N(a0), .Lbyte_fixup\@); << 131 .set noreorder; << 132 beqz t0, 0f; << 133 PTR_ADDU t0, 1; << 134 .set reorder; << 135 << 136 PTR_ADDU a2, t0 << 137 PTR_ADDU t0, 1 << 138 STORE_BYTE(0) << 139 STORE_BYTE(1) << 140 #if LONGSIZE == 4 << 141 EX(sb, a1, 2(a0), .Lbyte_fixup\@) << 142 #else << 143 STORE_BYTE(2) << 144 STORE_BYTE(3) << 145 STORE_BYTE(4) << 146 STORE_BYTE(5) << 147 EX(sb, a1, 6(a0), .Lbyte_fixup\@) << 148 #endif 66 #endif 149 0: !! 67 #ifdef __MIPSEL__ 150 ori a0, STORMASK !! 68 EX(swr, a1, (a0), first_fixup) /* make word aligned */ 151 xori a0, STORMASK << 152 PTR_ADDIU a0, STORSIZE << 153 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ << 154 1: ori t1, a2, 0x3f << 155 xori t1, 0x3f << 156 andi t0, a2, 0x40-STORSIZE << 157 beqz t1, .Lmemset_partial\@ << 158 << 159 PTR_ADDU t1, a0 << 160 1: PTR_ADDIU a0, 64 << 161 R10KCBARRIER(0(ra)) << 162 f_fill64 a0, -64, FILL64RG, .Lfwd_fixu << 163 bne t1, a0, 1b << 164 << 165 .Lmemset_partial\@: << 166 R10KCBARRIER(0(ra)) << 167 PTR_LA t1, 2f << 168 #ifdef CONFIG_CPU_MICROMIPS << 169 LONG_SRL t7, t0, 1 << 170 #endif 69 #endif 171 #if LONGSIZE == 4 !! 70 subu a0, t0 /* word align ptr */ 172 PTR_SUBU t1, FILLPTRG !! 71 addu a2, t0 /* correct size */ 173 #else !! 72 174 .set noat !! 73 1: ori t1, a2, 0x3f /* # of full blocks */ 175 LONG_SRL AT, FILLPTRG, 1 !! 74 xori t1, 0x3f 176 PTR_SUBU t1, AT !! 75 beqz t1, memset_partial /* no block to fill */ 177 .set at !! 76 andi t0, a2, 0x3c 178 #endif !! 77 179 PTR_ADDU a0, t0 !! 78 addu t1, a0 /* end address */ 180 jr t1 !! 79 .set reorder >> 80 1: addiu a0, 64 >> 81 F_FILL64(a0, -64, a1, fwd_fixup) >> 82 bne t1, a0, 1b >> 83 .set noreorder >> 84 >> 85 memset_partial: >> 86 PTR_LA t1, 2f /* where to start */ >> 87 subu t1, t0 >> 88 jr t1 >> 89 addu a0, t0 /* dest ptr */ >> 90 >> 91 .set push >> 92 .set noreorder >> 93 .set nomacro >> 94 F_FILL64(a0, -64, a1, partial_fixup) /* ... but first do wrds ... */ >> 95 2: .set pop >> 96 andi a2, 3 /* 0 <= n <= 3 to go */ 181 97 182 /* ... but first do longs ... */ !! 98 beqz a2, 1f 183 f_fill64 a0, -64, FILL64RG, .Lpartial_ !! 99 addu a0, a2 /* What's left */ 184 2: andi a2, STORMASK << 185 << 186 .set noreorder << 187 beqz a2, 1f << 188 #ifndef CONFIG_CPU_NO_LOAD_STORE_LR << 189 PTR_ADDU a0, a2 << 190 .set reorder << 191 R10KCBARRIER(0(ra)) << 192 #ifdef __MIPSEB__ 100 #ifdef __MIPSEB__ 193 EX(LONG_S_R, a1, -1(a0), .Llast_fixup\ !! 101 EX(swr, a1, -1(a0), last_fixup) 194 #else << 195 EX(LONG_S_L, a1, -1(a0), .Llast_fixup\ << 196 #endif 102 #endif 197 #else /* CONFIG_CPU_NO_LOAD_STORE_LR */ !! 103 #ifdef __MIPSEL__ 198 PTR_SUBU t0, $0, a2 !! 104 EX(swl, a1, -1(a0), last_fixup) 199 .set reorder << 200 move a2, zero << 201 PTR_ADDIU t0, 1 << 202 STORE_BYTE(0) << 203 STORE_BYTE(1) << 204 #if LONGSIZE == 4 << 205 EX(sb, a1, 2(a0), .Lbyte_fixup\@) << 206 #else << 207 STORE_BYTE(2) << 208 STORE_BYTE(3) << 209 STORE_BYTE(4) << 210 STORE_BYTE(5) << 211 EX(sb, a1, 6(a0), .Lbyte_fixup\@) << 212 #endif 105 #endif 213 0: !! 106 1: jr ra 214 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ !! 107 move a2, zero 215 1: move a2, zero << 216 jr ra << 217 << 218 .Lsmall_memset\@: << 219 PTR_ADDU t1, a0, a2 << 220 beqz a2, 2f << 221 << 222 1: PTR_ADDIU a0, 1 << 223 R10KCBARRIER(0(ra)) << 224 .set noreorder << 225 bne t1, a0, 1b << 226 EX(sb, a1, -1(a0), .Lsmall_fixup\@) << 227 .set reorder << 228 << 229 2: move a2, zero << 230 jr ra << 231 .if __memset == 1 << 232 END(memset) << 233 .set __memset, 0 << 234 .hidden __memset << 235 .endif << 236 << 237 #ifdef CONFIG_CPU_NO_LOAD_STORE_LR << 238 .Lbyte_fixup\@: << 239 /* << 240 * unset_bytes = (#bytes - (#unaligned << 241 * a2 = a2 << 242 */ << 243 PTR_SUBU a2, t0 << 244 PTR_ADDIU a2, 1 << 245 jr ra << 246 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ << 247 108 248 .Lfirst_fixup\@: !! 109 small_memset: 249 /* unset_bytes already in a2 */ !! 110 beqz a2, 2f 250 jr ra !! 111 addu t1, a0, a2 251 112 252 .Lfwd_fixup\@: !! 113 1: addiu a0, 1 /* fill bytewise */ 253 /* !! 114 bne t1, a0, 1b 254 * unset_bytes = partial_start_addr + !! 115 sb a1, -1(a0) 255 * a2 = t1 + << 256 */ << 257 PTR_L t0, TI_TASK($28) << 258 andi a2, 0x3f << 259 LONG_L t0, THREAD_BUADDR(t0) << 260 LONG_ADDU a2, t1 << 261 LONG_SUBU a2, t0 << 262 jr ra << 263 << 264 .Lpartial_fixup\@: << 265 /* << 266 * unset_bytes = partial_end_addr + << 267 * a2 = a0 + (a << 268 */ << 269 PTR_L t0, TI_TASK($28) << 270 andi a2, STORMASK << 271 LONG_L t0, THREAD_BUADDR(t0) << 272 LONG_ADDU a2, a0 << 273 LONG_SUBU a2, t0 << 274 jr ra << 275 << 276 .Llast_fixup\@: << 277 /* unset_bytes already in a2 */ << 278 jr ra << 279 << 280 .Lsmall_fixup\@: << 281 /* << 282 * unset_bytes = end_addr - current_ad << 283 * a2 = t1 - a0 << 284 */ << 285 PTR_SUBU a2, t1, a0 << 286 PTR_ADDIU a2, 1 << 287 jr ra << 288 116 289 .endm !! 117 2: jr ra /* done */ >> 118 move a2, zero >> 119 END(memset) 290 120 291 /* !! 121 first_fixup: 292 * memset(void *s, int c, size_t n) !! 122 jr ra 293 * !! 123 nop 294 * a0: start of area to clear << 295 * a1: char to fill with << 296 * a2: size of area to clear << 297 */ << 298 124 299 LEAF(memset) !! 125 fwd_fixup: 300 EXPORT_SYMBOL(memset) !! 126 lw t0, THREAD_BUADDR($28) 301 move v0, a0 !! 127 andi a2, 0x3f 302 beqz a1, 1f !! 128 addu a2, t1 303 !! 129 jr ra 304 andi a1, 0xff !! 130 subu a2, t0 305 LONG_SLL t1, a1, 8 << 306 or a1, t1 << 307 LONG_SLL t1, a1, 16 << 308 #if LONGSIZE == 8 << 309 or a1, t1 << 310 LONG_SLL t1, a1, 32 << 311 #endif << 312 or a1, t1 << 313 1: << 314 #ifndef CONFIG_EVA << 315 FEXPORT(__bzero) << 316 EXPORT_SYMBOL(__bzero) << 317 #endif << 318 __BUILD_BZERO LEGACY_MODE << 319 131 320 #ifdef CONFIG_EVA !! 132 partial_fixup: 321 LEAF(__bzero) !! 133 lw t0, THREAD_BUADDR($28) 322 EXPORT_SYMBOL(__bzero) !! 134 andi a2, 3 323 __BUILD_BZERO EVA_MODE !! 135 addu a2, t1 324 END(__bzero) !! 136 jr ra 325 #endif !! 137 subu a2, t0 >> 138 >> 139 last_fixup: >> 140 jr ra >> 141 andi v1, a2, 3
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.