>> 1 /* SPDX-License-Identifier: GPL-2.0 */ 1 /* 2 /* 2 * This file is subject to the terms and condi !! 3 * linux/arch/alpha/lib/memset.S 3 * License. See the file "COPYING" in the mai << 4 * for more details. << 5 * 4 * 6 * Copyright (C) 1998, 1999, 2000 by Ralf Baec !! 5 * This is an efficient (and small) implementation of the C library "memset()" 7 * Copyright (C) 1999, 2000 Silicon Graphics, !! 6 * function for the alpha. 8 * Copyright (C) 2007 by Maciej W. Rozycki !! 7 * 9 * Copyright (C) 2011, 2012 MIPS Technologies, !! 8 * (C) Copyright 1996 Linus Torvalds 10 */ << 11 #include <linux/export.h> << 12 #include <asm/asm.h> << 13 #include <asm/asm-offsets.h> << 14 #include <asm/regdef.h> << 15 << 16 #if LONGSIZE == 4 << 17 #define LONG_S_L swl << 18 #define LONG_S_R swr << 19 #else << 20 #define LONG_S_L sdl << 21 #define LONG_S_R sdr << 22 #endif << 23 << 24 #ifdef CONFIG_CPU_MICROMIPS << 25 #define STORSIZE (LONGSIZE * 2) << 26 #define STORMASK (STORSIZE - 1) << 27 #define FILL64RG t8 << 28 #define FILLPTRG t7 << 29 #undef LONG_S << 30 #define LONG_S LONG_SP << 31 #else << 32 #define STORSIZE LONGSIZE << 33 #define STORMASK LONGMASK << 34 #define FILL64RG a1 << 35 #define FILLPTRG t0 << 36 #endif << 37 << 38 #define LEGACY_MODE 1 << 39 #define EVA_MODE 2 << 40 << 41 /* << 42 * No need to protect it with EVA #ifdefery. T << 43 * will never be assembled if EVA is not enabl << 44 */ << 45 #define __EVAFY(insn, reg, addr) __BUILD_EVA_I << 46 #define ___BUILD_EVA_INSN(insn, reg, addr) __E << 47 << 48 #define EX(insn,reg,addr,handler) << 49 .if \mode == LEGACY_MODE; << 50 9: insn reg, addr; << 51 .else; << 52 9: ___BUILD_EVA_INSN(insn, reg, a << 53 .endif; << 54 .section __ex_table,"a"; << 55 PTR_WD 9b, handler; << 56 .previous << 57 << 58 .macro f_fill64 dst, offset, val, fix << 59 EX(LONG_S, \val, (\offset + 0 * STORS << 60 EX(LONG_S, \val, (\offset + 1 * STORS << 61 EX(LONG_S, \val, (\offset + 2 * STORS << 62 EX(LONG_S, \val, (\offset + 3 * STORS << 63 #if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSI << 64 EX(LONG_S, \val, (\offset + 4 * STORS << 65 EX(LONG_S, \val, (\offset + 5 * STORS << 66 EX(LONG_S, \val, (\offset + 6 * STORS << 67 EX(LONG_S, \val, (\offset + 7 * STORS << 68 #endif << 69 #if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSI << 70 EX(LONG_S, \val, (\offset + 8 * STORS << 71 EX(LONG_S, \val, (\offset + 9 * STORS << 72 EX(LONG_S, \val, (\offset + 10 * STORS << 73 EX(LONG_S, \val, (\offset + 11 * STORS << 74 EX(LONG_S, \val, (\offset + 12 * STORS << 75 EX(LONG_S, \val, (\offset + 13 * STORS << 76 EX(LONG_S, \val, (\offset + 14 * STORS << 77 EX(LONG_S, \val, (\offset + 15 * STORS << 78 #endif << 79 .endm << 80 << 81 .align 5 << 82 << 83 /* << 84 * Macro to generate the __bzero{,_use << 85 * Arguments: << 86 * mode: LEGACY_MODE or EVA_MODE << 87 */ << 88 .macro __BUILD_BZERO mode << 89 /* Initialize __memset if this is the << 90 .ifnotdef __memset << 91 .set __memset, 1 << 92 .hidden __memset /* Make sure it does << 93 .endif << 94 << 95 sltiu t0, a2, STORSIZE << 96 .set noreorder << 97 bnez t0, .Lsmall_memset\@ << 98 andi t0, a0, STORMASK << 99 .set reorder << 100 << 101 #ifdef CONFIG_CPU_MICROMIPS << 102 move t8, a1 << 103 move t9, a1 << 104 #endif << 105 .set noreorder << 106 #ifndef CONFIG_CPU_DADDI_WORKAROUNDS << 107 beqz t0, 1f << 108 PTR_SUBU t0, STORSIZE << 109 #else << 110 .set noat << 111 li AT, STORSIZE << 112 beqz t0, 1f << 113 PTR_SUBU t0, AT << 114 .set at << 115 #endif << 116 .set reorder << 117 << 118 #ifndef CONFIG_CPU_NO_LOAD_STORE_LR << 119 R10KCBARRIER(0(ra)) << 120 #ifdef __MIPSEB__ << 121 EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@ << 122 #else << 123 EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@ << 124 #endif << 125 PTR_SUBU a0, t0 << 126 PTR_ADDU a2, t0 << 127 << 128 #else /* CONFIG_CPU_NO_LOAD_STORE_LR */ << 129 #define STORE_BYTE(N) << 130 EX(sb, a1, N(a0), .Lbyte_fixup\@); << 131 .set noreorder; << 132 beqz t0, 0f; << 133 PTR_ADDU t0, 1; << 134 .set reorder; << 135 << 136 PTR_ADDU a2, t0 << 137 PTR_ADDU t0, 1 << 138 STORE_BYTE(0) << 139 STORE_BYTE(1) << 140 #if LONGSIZE == 4 << 141 EX(sb, a1, 2(a0), .Lbyte_fixup\@) << 142 #else << 143 STORE_BYTE(2) << 144 STORE_BYTE(3) << 145 STORE_BYTE(4) << 146 STORE_BYTE(5) << 147 EX(sb, a1, 6(a0), .Lbyte_fixup\@) << 148 #endif << 149 0: << 150 ori a0, STORMASK << 151 xori a0, STORMASK << 152 PTR_ADDIU a0, STORSIZE << 153 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ << 154 1: ori t1, a2, 0x3f << 155 xori t1, 0x3f << 156 andi t0, a2, 0x40-STORSIZE << 157 beqz t1, .Lmemset_partial\@ << 158 << 159 PTR_ADDU t1, a0 << 160 1: PTR_ADDIU a0, 64 << 161 R10KCBARRIER(0(ra)) << 162 f_fill64 a0, -64, FILL64RG, .Lfwd_fixu << 163 bne t1, a0, 1b << 164 << 165 .Lmemset_partial\@: << 166 R10KCBARRIER(0(ra)) << 167 PTR_LA t1, 2f << 168 #ifdef CONFIG_CPU_MICROMIPS << 169 LONG_SRL t7, t0, 1 << 170 #endif << 171 #if LONGSIZE == 4 << 172 PTR_SUBU t1, FILLPTRG << 173 #else << 174 .set noat << 175 LONG_SRL AT, FILLPTRG, 1 << 176 PTR_SUBU t1, AT << 177 .set at << 178 #endif << 179 PTR_ADDU a0, t0 << 180 jr t1 << 181 << 182 /* ... but first do longs ... */ << 183 f_fill64 a0, -64, FILL64RG, .Lpartial_ << 184 2: andi a2, STORMASK << 185 << 186 .set noreorder << 187 beqz a2, 1f << 188 #ifndef CONFIG_CPU_NO_LOAD_STORE_LR << 189 PTR_ADDU a0, a2 << 190 .set reorder << 191 R10KCBARRIER(0(ra)) << 192 #ifdef __MIPSEB__ << 193 EX(LONG_S_R, a1, -1(a0), .Llast_fixup\ << 194 #else << 195 EX(LONG_S_L, a1, -1(a0), .Llast_fixup\ << 196 #endif << 197 #else /* CONFIG_CPU_NO_LOAD_STORE_LR */ << 198 PTR_SUBU t0, $0, a2 << 199 .set reorder << 200 move a2, zero << 201 PTR_ADDIU t0, 1 << 202 STORE_BYTE(0) << 203 STORE_BYTE(1) << 204 #if LONGSIZE == 4 << 205 EX(sb, a1, 2(a0), .Lbyte_fixup\@) << 206 #else << 207 STORE_BYTE(2) << 208 STORE_BYTE(3) << 209 STORE_BYTE(4) << 210 STORE_BYTE(5) << 211 EX(sb, a1, 6(a0), .Lbyte_fixup\@) << 212 #endif << 213 0: << 214 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ << 215 1: move a2, zero << 216 jr ra << 217 << 218 .Lsmall_memset\@: << 219 PTR_ADDU t1, a0, a2 << 220 beqz a2, 2f << 221 << 222 1: PTR_ADDIU a0, 1 << 223 R10KCBARRIER(0(ra)) << 224 .set noreorder << 225 bne t1, a0, 1b << 226 EX(sb, a1, -1(a0), .Lsmall_fixup\@) << 227 .set reorder << 228 << 229 2: move a2, zero << 230 jr ra << 231 .if __memset == 1 << 232 END(memset) << 233 .set __memset, 0 << 234 .hidden __memset << 235 .endif << 236 << 237 #ifdef CONFIG_CPU_NO_LOAD_STORE_LR << 238 .Lbyte_fixup\@: << 239 /* << 240 * unset_bytes = (#bytes - (#unaligned << 241 * a2 = a2 << 242 */ << 243 PTR_SUBU a2, t0 << 244 PTR_ADDIU a2, 1 << 245 jr ra << 246 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ << 247 << 248 .Lfirst_fixup\@: << 249 /* unset_bytes already in a2 */ << 250 jr ra << 251 << 252 .Lfwd_fixup\@: << 253 /* << 254 * unset_bytes = partial_start_addr + << 255 * a2 = t1 + << 256 */ << 257 PTR_L t0, TI_TASK($28) << 258 andi a2, 0x3f << 259 LONG_L t0, THREAD_BUADDR(t0) << 260 LONG_ADDU a2, t1 << 261 LONG_SUBU a2, t0 << 262 jr ra << 263 << 264 .Lpartial_fixup\@: << 265 /* << 266 * unset_bytes = partial_end_addr + << 267 * a2 = a0 + (a << 268 */ << 269 PTR_L t0, TI_TASK($28) << 270 andi a2, STORMASK << 271 LONG_L t0, THREAD_BUADDR(t0) << 272 LONG_ADDU a2, a0 << 273 LONG_SUBU a2, t0 << 274 jr ra << 275 << 276 .Llast_fixup\@: << 277 /* unset_bytes already in a2 */ << 278 jr ra << 279 << 280 .Lsmall_fixup\@: << 281 /* << 282 * unset_bytes = end_addr - current_ad << 283 * a2 = t1 - a0 << 284 */ << 285 PTR_SUBU a2, t1, a0 << 286 PTR_ADDIU a2, 1 << 287 jr ra << 288 << 289 .endm << 290 << 291 /* << 292 * memset(void *s, int c, size_t n) << 293 * 9 * 294 * a0: start of area to clear !! 10 * This routine is "moral-ware": you are free to use it any way you wish, and 295 * a1: char to fill with !! 11 * the only obligation I put on you is a moral one: if you make any improvements 296 * a2: size of area to clear !! 12 * to the routine, please send me your improvements for me to use similarly. >> 13 * >> 14 * The scheduling comments are according to the EV5 documentation (and done by >> 15 * hand, so they might well be incorrect, please do tell me about it..) 297 */ 16 */ 298 !! 17 #include <linux/export.h> 299 LEAF(memset) !! 18 .set noat 300 EXPORT_SYMBOL(memset) !! 19 .set noreorder 301 move v0, a0 !! 20 .text 302 beqz a1, 1f !! 21 .globl memset 303 !! 22 .globl __memset 304 andi a1, 0xff !! 23 .globl ___memset 305 LONG_SLL t1, a1, 8 !! 24 .globl __memset16 306 or a1, t1 !! 25 .globl __constant_c_memset 307 LONG_SLL t1, a1, 16 !! 26 308 #if LONGSIZE == 8 !! 27 .ent ___memset 309 or a1, t1 !! 28 .align 5 310 LONG_SLL t1, a1, 32 !! 29 ___memset: 311 #endif !! 30 .frame $30,0,$26,0 312 or a1, t1 !! 31 .prologue 0 313 1: !! 32 314 #ifndef CONFIG_EVA !! 33 and $17,255,$1 /* E1 */ 315 FEXPORT(__bzero) !! 34 insbl $17,1,$17 /* .. E0 */ 316 EXPORT_SYMBOL(__bzero) !! 35 bis $17,$1,$17 /* E0 (p-c latency, next cycle) */ 317 #endif !! 36 sll $17,16,$1 /* E1 (p-c latency, next cycle) */ 318 __BUILD_BZERO LEGACY_MODE !! 37 319 !! 38 bis $17,$1,$17 /* E0 (p-c latency, next cycle) */ 320 #ifdef CONFIG_EVA !! 39 sll $17,32,$1 /* E1 (p-c latency, next cycle) */ 321 LEAF(__bzero) !! 40 bis $17,$1,$17 /* E0 (p-c latency, next cycle) */ 322 EXPORT_SYMBOL(__bzero) !! 41 ldq_u $31,0($30) /* .. E1 */ 323 __BUILD_BZERO EVA_MODE !! 42 324 END(__bzero) !! 43 .align 5 325 #endif !! 44 __constant_c_memset: >> 45 addq $18,$16,$6 /* E0 */ >> 46 bis $16,$16,$0 /* .. E1 */ >> 47 xor $16,$6,$1 /* E0 */ >> 48 ble $18,end /* .. E1 */ >> 49 >> 50 bic $1,7,$1 /* E0 */ >> 51 beq $1,within_one_quad /* .. E1 (note EV5 zero-latency forwarding) */ >> 52 and $16,7,$3 /* E0 */ >> 53 beq $3,aligned /* .. E1 (note EV5 zero-latency forwarding) */ >> 54 >> 55 ldq_u $4,0($16) /* E0 */ >> 56 bis $16,$16,$5 /* .. E1 */ >> 57 insql $17,$16,$2 /* E0 */ >> 58 subq $3,8,$3 /* .. E1 */ >> 59 >> 60 addq $18,$3,$18 /* E0 $18 is new count ($3 is negative) */ >> 61 mskql $4,$16,$4 /* .. E1 (and possible load stall) */ >> 62 subq $16,$3,$16 /* E0 $16 is new aligned destination */ >> 63 bis $2,$4,$1 /* .. E1 */ >> 64 >> 65 bis $31,$31,$31 /* E0 */ >> 66 ldq_u $31,0($30) /* .. E1 */ >> 67 stq_u $1,0($5) /* E0 */ >> 68 bis $31,$31,$31 /* .. E1 */ >> 69 >> 70 .align 4 >> 71 aligned: >> 72 sra $18,3,$3 /* E0 */ >> 73 and $18,7,$18 /* .. E1 */ >> 74 bis $16,$16,$5 /* E0 */ >> 75 beq $3,no_quad /* .. E1 */ >> 76 >> 77 .align 3 >> 78 loop: >> 79 stq $17,0($5) /* E0 */ >> 80 subq $3,1,$3 /* .. E1 */ >> 81 addq $5,8,$5 /* E0 */ >> 82 bne $3,loop /* .. E1 */ >> 83 >> 84 no_quad: >> 85 bis $31,$31,$31 /* E0 */ >> 86 beq $18,end /* .. E1 */ >> 87 ldq $7,0($5) /* E0 */ >> 88 mskqh $7,$6,$2 /* .. E1 (and load stall) */ >> 89 >> 90 insqh $17,$6,$4 /* E0 */ >> 91 bis $2,$4,$1 /* .. E1 */ >> 92 stq $1,0($5) /* E0 */ >> 93 ret $31,($26),1 /* .. E1 */ >> 94 >> 95 .align 3 >> 96 within_one_quad: >> 97 ldq_u $1,0($16) /* E0 */ >> 98 insql $17,$16,$2 /* E1 */ >> 99 mskql $1,$16,$4 /* E0 (after load stall) */ >> 100 bis $2,$4,$2 /* E0 */ >> 101 >> 102 mskql $2,$6,$4 /* E0 */ >> 103 mskqh $1,$6,$2 /* .. E1 */ >> 104 bis $2,$4,$1 /* E0 */ >> 105 stq_u $1,0($16) /* E0 */ >> 106 >> 107 end: >> 108 ret $31,($26),1 /* E1 */ >> 109 .end ___memset >> 110 EXPORT_SYMBOL(___memset) >> 111 EXPORT_SYMBOL(__constant_c_memset) >> 112 >> 113 .align 5 >> 114 .ent __memset16 >> 115 __memset16: >> 116 .prologue 0 >> 117 >> 118 inswl $17,0,$1 /* E0 */ >> 119 inswl $17,2,$2 /* E0 */ >> 120 inswl $17,4,$3 /* E0 */ >> 121 or $1,$2,$1 /* .. E1 */ >> 122 inswl $17,6,$4 /* E0 */ >> 123 or $1,$3,$1 /* .. E1 */ >> 124 or $1,$4,$17 /* E0 */ >> 125 br __constant_c_memset /* .. E1 */ >> 126 >> 127 .end __memset16 >> 128 EXPORT_SYMBOL(__memset16) >> 129 >> 130 memset = ___memset >> 131 __memset = ___memset >> 132 EXPORT_SYMBOL(memset) >> 133 EXPORT_SYMBOL(__memset)
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.