1 /* SPDX-License-Identifier: GPL-2.0 */ << 2 /* 1 /* 3 * linux/arch/alpha/lib/memset.S !! 2 * This file is subject to the terms and conditions of the GNU General Public >> 3 * License. See the file "COPYING" in the main directory of this archive >> 4 * for more details. 4 * 5 * 5 * This is an efficient (and small) implementa !! 6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle 6 * function for the alpha. !! 7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc. 7 * !! 8 * Copyright (C) 2007 by Maciej W. Rozycki 8 * (C) Copyright 1996 Linus Torvalds !! 9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc. 9 * << 10 * This routine is "moral-ware": you are free << 11 * the only obligation I put on you is a moral << 12 * to the routine, please send me your improve << 13 * << 14 * The scheduling comments are according to th << 15 * hand, so they might well be incorrect, plea << 16 */ 10 */ 17 #include <linux/export.h> 11 #include <linux/export.h> 18 .set noat !! 12 #include <asm/asm.h> 19 .set noreorder !! 13 #include <asm/asm-offsets.h> 20 .text !! 14 #include <asm/regdef.h> 21 .globl memset !! 15 22 .globl __memset !! 16 #if LONGSIZE == 4 23 .globl ___memset !! 17 #define LONG_S_L swl 24 .globl __memset16 !! 18 #define LONG_S_R swr 25 .globl __constant_c_memset !! 19 #else 26 !! 20 #define LONG_S_L sdl 27 .ent ___memset !! 21 #define LONG_S_R sdr 28 .align 5 !! 22 #endif 29 ___memset: !! 23 30 .frame $30,0,$26,0 !! 24 #ifdef CONFIG_CPU_MICROMIPS 31 .prologue 0 !! 25 #define STORSIZE (LONGSIZE * 2) 32 !! 26 #define STORMASK (STORSIZE - 1) 33 and $17,255,$1 /* E1 */ !! 27 #define FILL64RG t8 34 insbl $17,1,$17 /* .. E0 */ !! 28 #define FILLPTRG t7 35 bis $17,$1,$17 /* E0 (p-c lat !! 29 #undef LONG_S 36 sll $17,16,$1 /* E1 (p-c lat !! 30 #define LONG_S LONG_SP 37 !! 31 #else 38 bis $17,$1,$17 /* E0 (p-c lat !! 32 #define STORSIZE LONGSIZE 39 sll $17,32,$1 /* E1 (p-c lat !! 33 #define STORMASK LONGMASK 40 bis $17,$1,$17 /* E0 (p-c lat !! 34 #define FILL64RG a1 41 ldq_u $31,0($30) /* .. E1 */ !! 35 #define FILLPTRG t0 42 !! 36 #endif 43 .align 5 !! 37 44 __constant_c_memset: !! 38 #define LEGACY_MODE 1 45 addq $18,$16,$6 /* E0 */ !! 39 #define EVA_MODE 2 46 bis $16,$16,$0 /* .. E1 */ !! 40 47 xor $16,$6,$1 /* E0 */ !! 41 /* 48 ble $18,end /* .. E1 */ !! 42 * No need to protect it with EVA #ifdefery. The generated block of code 49 !! 43 * will never be assembled if EVA is not enabled. 50 bic $1,7,$1 /* E0 */ !! 44 */ 51 beq $1,within_one_quad /* .. E1 (note !! 45 #define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr) 52 and $16,7,$3 /* E0 */ !! 46 #define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr) 53 beq $3,aligned /* .. E1 (note !! 47 54 !! 48 #define EX(insn,reg,addr,handler) \ 55 ldq_u $4,0($16) /* E0 */ !! 49 .if \mode == LEGACY_MODE; \ 56 bis $16,$16,$5 /* .. E1 */ !! 50 9: insn reg, addr; \ 57 insql $17,$16,$2 /* E0 */ !! 51 .else; \ 58 subq $3,8,$3 /* .. E1 */ !! 52 9: ___BUILD_EVA_INSN(insn, reg, addr); \ 59 !! 53 .endif; \ 60 addq $18,$3,$18 /* E0 $18 is !! 54 .section __ex_table,"a"; \ 61 mskql $4,$16,$4 /* .. E1 (and !! 55 PTR_WD 9b, handler; \ 62 subq $16,$3,$16 /* E0 $16 is !! 56 .previous 63 bis $2,$4,$1 /* .. E1 */ !! 57 64 !! 58 .macro f_fill64 dst, offset, val, fixup, mode 65 bis $31,$31,$31 /* E0 */ !! 59 EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup) 66 ldq_u $31,0($30) /* .. E1 */ !! 60 EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup) 67 stq_u $1,0($5) /* E0 */ !! 61 EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup) 68 bis $31,$31,$31 /* .. E1 */ !! 62 EX(LONG_S, \val, (\offset + 3 * STORSIZE)(\dst), \fixup) 69 !! 63 #if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS)) 70 .align 4 !! 64 EX(LONG_S, \val, (\offset + 4 * STORSIZE)(\dst), \fixup) 71 aligned: !! 65 EX(LONG_S, \val, (\offset + 5 * STORSIZE)(\dst), \fixup) 72 sra $18,3,$3 /* E0 */ !! 66 EX(LONG_S, \val, (\offset + 6 * STORSIZE)(\dst), \fixup) 73 and $18,7,$18 /* .. E1 */ !! 67 EX(LONG_S, \val, (\offset + 7 * STORSIZE)(\dst), \fixup) 74 bis $16,$16,$5 /* E0 */ !! 68 #endif 75 beq $3,no_quad /* .. E1 */ !! 69 #if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) 76 !! 70 EX(LONG_S, \val, (\offset + 8 * STORSIZE)(\dst), \fixup) 77 .align 3 !! 71 EX(LONG_S, \val, (\offset + 9 * STORSIZE)(\dst), \fixup) 78 loop: !! 72 EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup) 79 stq $17,0($5) /* E0 */ !! 73 EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup) 80 subq $3,1,$3 /* .. E1 */ !! 74 EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup) 81 addq $5,8,$5 /* E0 */ !! 75 EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup) 82 bne $3,loop /* .. E1 */ !! 76 EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup) 83 !! 77 EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup) 84 no_quad: !! 78 #endif 85 bis $31,$31,$31 /* E0 */ !! 79 .endm 86 beq $18,end /* .. E1 */ !! 80 87 ldq $7,0($5) /* E0 */ !! 81 .align 5 88 mskqh $7,$6,$2 /* .. E1 (and !! 82 89 !! 83 /* 90 insqh $17,$6,$4 /* E0 */ !! 84 * Macro to generate the __bzero{,_user} symbol 91 bis $2,$4,$1 /* .. E1 */ !! 85 * Arguments: 92 stq $1,0($5) /* E0 */ !! 86 * mode: LEGACY_MODE or EVA_MODE 93 ret $31,($26),1 /* .. E1 */ !! 87 */ 94 !! 88 .macro __BUILD_BZERO mode 95 .align 3 !! 89 /* Initialize __memset if this is the first time we call this macro */ 96 within_one_quad: !! 90 .ifnotdef __memset 97 ldq_u $1,0($16) /* E0 */ !! 91 .set __memset, 1 98 insql $17,$16,$2 /* E1 */ !! 92 .hidden __memset /* Make sure it does not leak */ 99 mskql $1,$16,$4 /* E0 (after l !! 93 .endif 100 bis $2,$4,$2 /* E0 */ !! 94 101 !! 95 sltiu t0, a2, STORSIZE /* very small region? */ 102 mskql $2,$6,$4 /* E0 */ !! 96 .set noreorder 103 mskqh $1,$6,$2 /* .. E1 */ !! 97 bnez t0, .Lsmall_memset\@ 104 bis $2,$4,$1 /* E0 */ !! 98 andi t0, a0, STORMASK /* aligned? */ 105 stq_u $1,0($16) /* E0 */ !! 99 .set reorder 106 !! 100 107 end: !! 101 #ifdef CONFIG_CPU_MICROMIPS 108 ret $31,($26),1 /* E1 */ !! 102 move t8, a1 /* used by 'swp' instruction */ 109 .end ___memset !! 103 move t9, a1 110 EXPORT_SYMBOL(___memset) !! 104 #endif 111 EXPORT_SYMBOL(__constant_c_memset) !! 105 .set noreorder 112 !! 106 #ifndef CONFIG_CPU_DADDI_WORKAROUNDS 113 .align 5 !! 107 beqz t0, 1f 114 .ent __memset16 !! 108 PTR_SUBU t0, STORSIZE /* alignment in bytes */ 115 __memset16: !! 109 #else 116 .prologue 0 !! 110 .set noat 117 !! 111 li AT, STORSIZE 118 inswl $17,0,$1 /* E0 */ !! 112 beqz t0, 1f 119 inswl $17,2,$2 /* E0 */ !! 113 PTR_SUBU t0, AT /* alignment in bytes */ 120 inswl $17,4,$3 /* E0 */ !! 114 .set at 121 or $1,$2,$1 /* .. E1 */ !! 115 #endif 122 inswl $17,6,$4 /* E0 */ !! 116 .set reorder 123 or $1,$3,$1 /* .. E1 */ !! 117 124 or $1,$4,$17 /* E0 */ !! 118 #ifndef CONFIG_CPU_NO_LOAD_STORE_LR 125 br __constant_c_memset /* .. E1 */ !! 119 R10KCBARRIER(0(ra)) 126 !! 120 #ifdef __MIPSEB__ 127 .end __memset16 !! 121 EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */ 128 EXPORT_SYMBOL(__memset16) !! 122 #else 129 !! 123 EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */ 130 memset = ___memset !! 124 #endif 131 __memset = ___memset !! 125 PTR_SUBU a0, t0 /* long align ptr */ 132 EXPORT_SYMBOL(memset) !! 126 PTR_ADDU a2, t0 /* correct size */ 133 EXPORT_SYMBOL(__memset) !! 127 >> 128 #else /* CONFIG_CPU_NO_LOAD_STORE_LR */ >> 129 #define STORE_BYTE(N) \ >> 130 EX(sb, a1, N(a0), .Lbyte_fixup\@); \ >> 131 .set noreorder; \ >> 132 beqz t0, 0f; \ >> 133 PTR_ADDU t0, 1; \ >> 134 .set reorder; >> 135 >> 136 PTR_ADDU a2, t0 /* correct size */ >> 137 PTR_ADDU t0, 1 >> 138 STORE_BYTE(0) >> 139 STORE_BYTE(1) >> 140 #if LONGSIZE == 4 >> 141 EX(sb, a1, 2(a0), .Lbyte_fixup\@) >> 142 #else >> 143 STORE_BYTE(2) >> 144 STORE_BYTE(3) >> 145 STORE_BYTE(4) >> 146 STORE_BYTE(5) >> 147 EX(sb, a1, 6(a0), .Lbyte_fixup\@) >> 148 #endif >> 149 0: >> 150 ori a0, STORMASK >> 151 xori a0, STORMASK >> 152 PTR_ADDIU a0, STORSIZE >> 153 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ >> 154 1: ori t1, a2, 0x3f /* # of full blocks */ >> 155 xori t1, 0x3f >> 156 andi t0, a2, 0x40-STORSIZE >> 157 beqz t1, .Lmemset_partial\@ /* no block to fill */ >> 158 >> 159 PTR_ADDU t1, a0 /* end address */ >> 160 1: PTR_ADDIU a0, 64 >> 161 R10KCBARRIER(0(ra)) >> 162 f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode >> 163 bne t1, a0, 1b >> 164 >> 165 .Lmemset_partial\@: >> 166 R10KCBARRIER(0(ra)) >> 167 PTR_LA t1, 2f /* where to start */ >> 168 #ifdef CONFIG_CPU_MICROMIPS >> 169 LONG_SRL t7, t0, 1 >> 170 #endif >> 171 #if LONGSIZE == 4 >> 172 PTR_SUBU t1, FILLPTRG >> 173 #else >> 174 .set noat >> 175 LONG_SRL AT, FILLPTRG, 1 >> 176 PTR_SUBU t1, AT >> 177 .set at >> 178 #endif >> 179 PTR_ADDU a0, t0 /* dest ptr */ >> 180 jr t1 >> 181 >> 182 /* ... but first do longs ... */ >> 183 f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode >> 184 2: andi a2, STORMASK /* At most one long to go */ >> 185 >> 186 .set noreorder >> 187 beqz a2, 1f >> 188 #ifndef CONFIG_CPU_NO_LOAD_STORE_LR >> 189 PTR_ADDU a0, a2 /* What's left */ >> 190 .set reorder >> 191 R10KCBARRIER(0(ra)) >> 192 #ifdef __MIPSEB__ >> 193 EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@) >> 194 #else >> 195 EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@) >> 196 #endif >> 197 #else /* CONFIG_CPU_NO_LOAD_STORE_LR */ >> 198 PTR_SUBU t0, $0, a2 >> 199 .set reorder >> 200 move a2, zero /* No remaining longs */ >> 201 PTR_ADDIU t0, 1 >> 202 STORE_BYTE(0) >> 203 STORE_BYTE(1) >> 204 #if LONGSIZE == 4 >> 205 EX(sb, a1, 2(a0), .Lbyte_fixup\@) >> 206 #else >> 207 STORE_BYTE(2) >> 208 STORE_BYTE(3) >> 209 STORE_BYTE(4) >> 210 STORE_BYTE(5) >> 211 EX(sb, a1, 6(a0), .Lbyte_fixup\@) >> 212 #endif >> 213 0: >> 214 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ >> 215 1: move a2, zero >> 216 jr ra >> 217 >> 218 .Lsmall_memset\@: >> 219 PTR_ADDU t1, a0, a2 >> 220 beqz a2, 2f >> 221 >> 222 1: PTR_ADDIU a0, 1 /* fill bytewise */ >> 223 R10KCBARRIER(0(ra)) >> 224 .set noreorder >> 225 bne t1, a0, 1b >> 226 EX(sb, a1, -1(a0), .Lsmall_fixup\@) >> 227 .set reorder >> 228 >> 229 2: move a2, zero >> 230 jr ra /* done */ >> 231 .if __memset == 1 >> 232 END(memset) >> 233 .set __memset, 0 >> 234 .hidden __memset >> 235 .endif >> 236 >> 237 #ifdef CONFIG_CPU_NO_LOAD_STORE_LR >> 238 .Lbyte_fixup\@: >> 239 /* >> 240 * unset_bytes = (#bytes - (#unaligned bytes)) - (-#unaligned bytes remaining + 1) + 1 >> 241 * a2 = a2 - t0 + 1 >> 242 */ >> 243 PTR_SUBU a2, t0 >> 244 PTR_ADDIU a2, 1 >> 245 jr ra >> 246 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ >> 247 >> 248 .Lfirst_fixup\@: >> 249 /* unset_bytes already in a2 */ >> 250 jr ra >> 251 >> 252 .Lfwd_fixup\@: >> 253 /* >> 254 * unset_bytes = partial_start_addr + #bytes - fault_addr >> 255 * a2 = t1 + (a2 & 3f) - $28->task->BUADDR >> 256 */ >> 257 PTR_L t0, TI_TASK($28) >> 258 andi a2, 0x3f >> 259 LONG_L t0, THREAD_BUADDR(t0) >> 260 LONG_ADDU a2, t1 >> 261 LONG_SUBU a2, t0 >> 262 jr ra >> 263 >> 264 .Lpartial_fixup\@: >> 265 /* >> 266 * unset_bytes = partial_end_addr + #bytes - fault_addr >> 267 * a2 = a0 + (a2 & STORMASK) - $28->task->BUADDR >> 268 */ >> 269 PTR_L t0, TI_TASK($28) >> 270 andi a2, STORMASK >> 271 LONG_L t0, THREAD_BUADDR(t0) >> 272 LONG_ADDU a2, a0 >> 273 LONG_SUBU a2, t0 >> 274 jr ra >> 275 >> 276 .Llast_fixup\@: >> 277 /* unset_bytes already in a2 */ >> 278 jr ra >> 279 >> 280 .Lsmall_fixup\@: >> 281 /* >> 282 * unset_bytes = end_addr - current_addr + 1 >> 283 * a2 = t1 - a0 + 1 >> 284 */ >> 285 PTR_SUBU a2, t1, a0 >> 286 PTR_ADDIU a2, 1 >> 287 jr ra >> 288 >> 289 .endm >> 290 >> 291 /* >> 292 * memset(void *s, int c, size_t n) >> 293 * >> 294 * a0: start of area to clear >> 295 * a1: char to fill with >> 296 * a2: size of area to clear >> 297 */ >> 298 >> 299 LEAF(memset) >> 300 EXPORT_SYMBOL(memset) >> 301 move v0, a0 /* result */ >> 302 beqz a1, 1f >> 303 >> 304 andi a1, 0xff /* spread fillword */ >> 305 LONG_SLL t1, a1, 8 >> 306 or a1, t1 >> 307 LONG_SLL t1, a1, 16 >> 308 #if LONGSIZE == 8 >> 309 or a1, t1 >> 310 LONG_SLL t1, a1, 32 >> 311 #endif >> 312 or a1, t1 >> 313 1: >> 314 #ifndef CONFIG_EVA >> 315 FEXPORT(__bzero) >> 316 EXPORT_SYMBOL(__bzero) >> 317 #endif >> 318 __BUILD_BZERO LEGACY_MODE >> 319 >> 320 #ifdef CONFIG_EVA >> 321 LEAF(__bzero) >> 322 EXPORT_SYMBOL(__bzero) >> 323 __BUILD_BZERO EVA_MODE >> 324 END(__bzero) >> 325 #endif
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.