1 /* SPDX-License-Identifier: GPL-2.0 */ !! 1 /* 2 /* linux/arch/sparc/lib/memset.S: Sparc optimi !! 2 * This file is subject to the terms and conditions of the GNU General Public 3 * Copyright (C) 1991,1996 Free Software Found !! 3 * License. See the file "COPYING" in the main directory of this archive 4 * Copyright (C) 1996,1997 Jakub Jelinek (jj@s !! 4 * for more details. 5 * Copyright (C) 1996 David S. Miller (davem@c << 6 * 5 * 7 * Calls to memset returns initial %o0. Calls !! 6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle 8 * number of bytes not yet set if exception oc !! 7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc. 9 * clear_user. !! 8 * Copyright (C) 2007 by Maciej W. Rozycki >> 9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc. 10 */ 10 */ >> 11 #include <asm/asm.h> >> 12 #include <asm/asm-offsets.h> >> 13 #include <asm/regdef.h> >> 14 >> 15 #if LONGSIZE == 4 >> 16 #define LONG_S_L swl >> 17 #define LONG_S_R swr >> 18 #else >> 19 #define LONG_S_L sdl >> 20 #define LONG_S_R sdr >> 21 #endif >> 22 >> 23 #ifdef CONFIG_CPU_MICROMIPS >> 24 #define STORSIZE (LONGSIZE * 2) >> 25 #define STORMASK (STORSIZE - 1) >> 26 #define FILL64RG t8 >> 27 #define FILLPTRG t7 >> 28 #undef LONG_S >> 29 #define LONG_S LONG_SP >> 30 #else >> 31 #define STORSIZE LONGSIZE >> 32 #define STORMASK LONGMASK >> 33 #define FILL64RG a1 >> 34 #define FILLPTRG t0 >> 35 #endif >> 36 >> 37 #define LEGACY_MODE 1 >> 38 #define EVA_MODE 2 >> 39 >> 40 /* >> 41 * No need to protect it with EVA #ifdefery. The generated block of code >> 42 * will never be assembled if EVA is not enabled. >> 43 */ >> 44 #define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr) >> 45 #define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr) 11 46 12 #include <linux/export.h> !! 47 #define EX(insn,reg,addr,handler) \ 13 #include <asm/ptrace.h> !! 48 .if \mode == LEGACY_MODE; \ 14 !! 49 9: insn reg, addr; \ 15 /* Work around cpp -rob */ !! 50 .else; \ 16 #define ALLOC #alloc !! 51 9: ___BUILD_EVA_INSN(insn, reg, addr); \ 17 #define EXECINSTR #execinstr !! 52 .endif; \ 18 #define EX(x,y,a,b) !! 53 .section __ex_table,"a"; \ 19 98: x,y; !! 54 PTR 9b, handler; \ 20 .section .fixup,ALLOC,EXECINSTR; !! 55 .previous 21 .align 4; !! 56 22 99: retl; !! 57 .macro f_fill64 dst, offset, val, fixup, mode 23 a, b, %o0; !! 58 EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup) 24 .section __ex_table,ALLOC; !! 59 EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup) 25 .align 4; !! 60 EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup) 26 .word 98b, 99b; !! 61 EX(LONG_S, \val, (\offset + 3 * STORSIZE)(\dst), \fixup) 27 .text; !! 62 #if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS)) 28 .align 4 !! 63 EX(LONG_S, \val, (\offset + 4 * STORSIZE)(\dst), \fixup) 29 !! 64 EX(LONG_S, \val, (\offset + 5 * STORSIZE)(\dst), \fixup) 30 #define STORE(source, base, offset, n) !! 65 EX(LONG_S, \val, (\offset + 6 * STORSIZE)(\dst), \fixup) 31 98: std source, [base + offset + n]; !! 66 EX(LONG_S, \val, (\offset + 7 * STORSIZE)(\dst), \fixup) 32 .section .fixup,ALLOC,EXECINSTR; !! 67 #endif 33 .align 4; !! 68 #if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) 34 99: ba 30f; !! 69 EX(LONG_S, \val, (\offset + 8 * STORSIZE)(\dst), \fixup) 35 sub %o3, n - offset, %o3; !! 70 EX(LONG_S, \val, (\offset + 9 * STORSIZE)(\dst), \fixup) 36 .section __ex_table,ALLOC; !! 71 EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup) 37 .align 4; !! 72 EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup) 38 .word 98b, 99b; !! 73 EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup) 39 .text; !! 74 EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup) 40 .align 4; !! 75 EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup) 41 !! 76 EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup) 42 #define STORE_LAST(source, base, offset, n) !! 77 #endif 43 EX(std source, [base - offset - n], !! 78 .endm 44 add %o1, offset + n); !! 79 45 !! 80 .set noreorder 46 /* Please don't change these macros, unless yo !! 81 .align 5 47 * in the .fixup section below as well. !! 82 48 * Store 64 bytes at (BASE + OFFSET) using val !! 83 /* 49 #define ZERO_BIG_BLOCK(base, offset, source) !! 84 * Macro to generate the __bzero{,_user} symbol 50 STORE(source, base, offset, 0x00); !! 85 * Arguments: 51 STORE(source, base, offset, 0x08); !! 86 * mode: LEGACY_MODE or EVA_MODE 52 STORE(source, base, offset, 0x10); !! 87 */ 53 STORE(source, base, offset, 0x18); !! 88 .macro __BUILD_BZERO mode 54 STORE(source, base, offset, 0x20); !! 89 /* Initialize __memset if this is the first time we call this macro */ 55 STORE(source, base, offset, 0x28); !! 90 .ifnotdef __memset 56 STORE(source, base, offset, 0x30); !! 91 .set __memset, 1 57 STORE(source, base, offset, 0x38); !! 92 .hidden __memset /* Make sure it does not leak */ 58 !! 93 .endif 59 #define ZERO_LAST_BLOCKS(base, offset, source) !! 94 60 STORE_LAST(source, base, offset, 0x38) !! 95 sltiu t0, a2, STORSIZE /* very small region? */ 61 STORE_LAST(source, base, offset, 0x30) !! 96 bnez t0, .Lsmall_memset\@ 62 STORE_LAST(source, base, offset, 0x28) !! 97 andi t0, a0, STORMASK /* aligned? */ 63 STORE_LAST(source, base, offset, 0x20) !! 98 64 STORE_LAST(source, base, offset, 0x18) !! 99 #ifdef CONFIG_CPU_MICROMIPS 65 STORE_LAST(source, base, offset, 0x10) !! 100 move t8, a1 /* used by 'swp' instruction */ 66 STORE_LAST(source, base, offset, 0x08) !! 101 move t9, a1 67 STORE_LAST(source, base, offset, 0x00) !! 102 #endif 68 !! 103 #ifndef CONFIG_CPU_DADDI_WORKAROUNDS 69 .text !! 104 beqz t0, 1f 70 .align 4 !! 105 PTR_SUBU t0, STORSIZE /* alignment in bytes */ 71 !! 106 #else 72 .globl __bzero_begin !! 107 .set noat 73 __bzero_begin: !! 108 li AT, STORSIZE 74 !! 109 beqz t0, 1f 75 .globl __bzero !! 110 PTR_SUBU t0, AT /* alignment in bytes */ 76 .type __bzero,#function !! 111 .set at 77 .globl memset !! 112 #endif 78 EXPORT_SYMBOL(__bzero) !! 113 79 EXPORT_SYMBOL(memset) !! 114 #ifndef CONFIG_CPU_MIPSR6 80 memset: !! 115 R10KCBARRIER(0(ra)) 81 mov %o0, %g1 !! 116 #ifdef __MIPSEB__ 82 mov 1, %g4 !! 117 EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */ 83 and %o1, 0xff, %g3 !! 118 #else 84 sll %g3, 8, %g2 !! 119 EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */ 85 or %g3, %g2, %g3 !! 120 #endif 86 sll %g3, 16, %g2 !! 121 PTR_SUBU a0, t0 /* long align ptr */ 87 or %g3, %g2, %g3 !! 122 PTR_ADDU a2, t0 /* correct size */ 88 b 1f !! 123 89 mov %o2, %o1 !! 124 #else /* CONFIG_CPU_MIPSR6 */ 90 3: !! 125 #define STORE_BYTE(N) \ 91 cmp %o2, 3 !! 126 EX(sb, a1, N(a0), .Lbyte_fixup\@); \ 92 be 2f !! 127 beqz t0, 0f; \ 93 EX(stb %g3, [%o0], sub %o1, 0) !! 128 PTR_ADDU t0, 1; 94 !! 129 95 cmp %o2, 2 !! 130 PTR_ADDU a2, t0 /* correct size */ 96 be 2f !! 131 PTR_ADDU t0, 1 97 EX(stb %g3, [%o0 + 0x01], sub %o1, 1) !! 132 STORE_BYTE(0) 98 !! 133 STORE_BYTE(1) 99 EX(stb %g3, [%o0 + 0x02], sub %o1, 2) !! 134 #if LONGSIZE == 4 100 2: !! 135 EX(sb, a1, 2(a0), .Lbyte_fixup\@) 101 sub %o2, 4, %o2 !! 136 #else 102 add %o1, %o2, %o1 !! 137 STORE_BYTE(2) 103 b 4f !! 138 STORE_BYTE(3) 104 sub %o0, %o2, %o0 !! 139 STORE_BYTE(4) 105 !! 140 STORE_BYTE(5) 106 __bzero: !! 141 EX(sb, a1, 6(a0), .Lbyte_fixup\@) 107 clr %g4 !! 142 #endif 108 mov %g0, %g3 !! 143 0: 109 1: !! 144 ori a0, STORMASK 110 cmp %o1, 7 !! 145 xori a0, STORMASK 111 bleu 7f !! 146 PTR_ADDIU a0, STORSIZE 112 andcc %o0, 3, %o2 !! 147 #endif /* CONFIG_CPU_MIPSR6 */ 113 !! 148 1: ori t1, a2, 0x3f /* # of full blocks */ 114 bne 3b !! 149 xori t1, 0x3f 115 4: !! 150 beqz t1, .Lmemset_partial\@ /* no block to fill */ 116 andcc %o0, 4, %g0 !! 151 andi t0, a2, 0x40-STORSIZE 117 !! 152 118 be 2f !! 153 PTR_ADDU t1, a0 /* end address */ 119 mov %g3, %g2 !! 154 .set reorder 120 !! 155 1: PTR_ADDIU a0, 64 121 EX(st %g3, [%o0], sub %o1, 0) !! 156 R10KCBARRIER(0(ra)) 122 sub %o1, 4, %o1 !! 157 f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode 123 add %o0, 4, %o0 !! 158 bne t1, a0, 1b 124 2: !! 159 .set noreorder 125 andcc %o1, 0xffffff80, %o3 ! Now !! 160 126 be 9f !! 161 .Lmemset_partial\@: 127 andcc %o1, 0x78, %o2 !! 162 R10KCBARRIER(0(ra)) 128 10: !! 163 PTR_LA t1, 2f /* where to start */ 129 ZERO_BIG_BLOCK(%o0, 0x00, %g2) !! 164 #ifdef CONFIG_CPU_MICROMIPS 130 subcc %o3, 128, %o3 !! 165 LONG_SRL t7, t0, 1 131 ZERO_BIG_BLOCK(%o0, 0x40, %g2) !! 166 #endif 132 bne 10b !! 167 #if LONGSIZE == 4 133 add %o0, 128, %o0 !! 168 PTR_SUBU t1, FILLPTRG 134 !! 169 #else 135 orcc %o2, %g0, %g0 !! 170 .set noat 136 9: !! 171 LONG_SRL AT, FILLPTRG, 1 137 be 13f !! 172 PTR_SUBU t1, AT 138 andcc %o1, 7, %o1 !! 173 .set at 139 !! 174 #endif 140 srl %o2, 1, %o3 !! 175 jr t1 141 set 13f, %o4 !! 176 PTR_ADDU a0, t0 /* dest ptr */ 142 sub %o4, %o3, %o4 !! 177 143 jmp %o4 !! 178 .set push 144 add %o0, %o2, %o0 !! 179 .set noreorder 145 !! 180 .set nomacro 146 ZERO_LAST_BLOCKS(%o0, 0x48, %g2) !! 181 /* ... but first do longs ... */ 147 ZERO_LAST_BLOCKS(%o0, 0x08, %g2) !! 182 f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode 148 13: !! 183 2: .set pop 149 be 8f !! 184 andi a2, STORMASK /* At most one long to go */ 150 andcc %o1, 4, %g0 !! 185 >> 186 beqz a2, 1f >> 187 #ifndef CONFIG_CPU_MIPSR6 >> 188 PTR_ADDU a0, a2 /* What's left */ >> 189 R10KCBARRIER(0(ra)) >> 190 #ifdef __MIPSEB__ >> 191 EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@) >> 192 #else >> 193 EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@) >> 194 #endif >> 195 #else >> 196 PTR_SUBU t0, $0, a2 >> 197 PTR_ADDIU t0, 1 >> 198 STORE_BYTE(0) >> 199 STORE_BYTE(1) >> 200 #if LONGSIZE == 4 >> 201 EX(sb, a1, 2(a0), .Lbyte_fixup\@) >> 202 #else >> 203 STORE_BYTE(2) >> 204 STORE_BYTE(3) >> 205 STORE_BYTE(4) >> 206 STORE_BYTE(5) >> 207 EX(sb, a1, 6(a0), .Lbyte_fixup\@) >> 208 #endif >> 209 0: >> 210 #endif >> 211 1: jr ra >> 212 move a2, zero >> 213 >> 214 .Lsmall_memset\@: >> 215 beqz a2, 2f >> 216 PTR_ADDU t1, a0, a2 >> 217 >> 218 1: PTR_ADDIU a0, 1 /* fill bytewise */ >> 219 R10KCBARRIER(0(ra)) >> 220 bne t1, a0, 1b >> 221 sb a1, -1(a0) >> 222 >> 223 2: jr ra /* done */ >> 224 move a2, zero >> 225 .if __memset == 1 >> 226 END(memset) >> 227 .set __memset, 0 >> 228 .hidden __memset >> 229 .endif >> 230 >> 231 #ifdef CONFIG_CPU_MIPSR6 >> 232 .Lbyte_fixup\@: >> 233 PTR_SUBU a2, $0, t0 >> 234 jr ra >> 235 PTR_ADDIU a2, 1 >> 236 #endif /* CONFIG_CPU_MIPSR6 */ >> 237 >> 238 .Lfirst_fixup\@: >> 239 jr ra >> 240 nop >> 241 >> 242 .Lfwd_fixup\@: >> 243 PTR_L t0, TI_TASK($28) >> 244 andi a2, 0x3f >> 245 LONG_L t0, THREAD_BUADDR(t0) >> 246 LONG_ADDU a2, t1 >> 247 jr ra >> 248 LONG_SUBU a2, t0 >> 249 >> 250 .Lpartial_fixup\@: >> 251 PTR_L t0, TI_TASK($28) >> 252 andi a2, STORMASK >> 253 LONG_L t0, THREAD_BUADDR(t0) >> 254 LONG_ADDU a2, t1 >> 255 jr ra >> 256 LONG_SUBU a2, t0 >> 257 >> 258 .Llast_fixup\@: >> 259 jr ra >> 260 andi v1, a2, STORMASK 151 261 152 be 1f !! 262 .endm 153 andcc %o1, 2, %g0 << 154 263 155 EX(st %g3, [%o0], and %o1, 7) !! 264 /* 156 add %o0, 4, %o0 !! 265 * memset(void *s, int c, size_t n) 157 1: !! 266 * 158 be 1f !! 267 * a0: start of area to clear 159 andcc %o1, 1, %g0 !! 268 * a1: char to fill with >> 269 * a2: size of area to clear >> 270 */ 160 271 161 EX(sth %g3, [%o0], and %o1, 3) !! 272 LEAF(memset) 162 add %o0, 2, %o0 !! 273 beqz a1, 1f >> 274 move v0, a0 /* result */ >> 275 >> 276 andi a1, 0xff /* spread fillword */ >> 277 LONG_SLL t1, a1, 8 >> 278 or a1, t1 >> 279 LONG_SLL t1, a1, 16 >> 280 #if LONGSIZE == 8 >> 281 or a1, t1 >> 282 LONG_SLL t1, a1, 32 >> 283 #endif >> 284 or a1, t1 163 1: 285 1: 164 bne,a 8f !! 286 #ifndef CONFIG_EVA 165 EX(stb %g3, [%o0], and %o1, 1) !! 287 FEXPORT(__bzero) 166 8: !! 288 #else 167 b 0f !! 289 FEXPORT(__bzero_kernel) 168 nop !! 290 #endif 169 7: !! 291 __BUILD_BZERO LEGACY_MODE 170 be 13b !! 292 171 orcc %o1, 0, %g0 !! 293 #ifdef CONFIG_EVA 172 !! 294 LEAF(__bzero) 173 be 0f !! 295 __BUILD_BZERO EVA_MODE 174 8: !! 296 END(__bzero) 175 add %o0, 1, %o0 !! 297 #endif 176 subcc %o1, 1, %o1 << 177 bne 8b << 178 EX(stb %g3, [%o0 - 1], add %o1, 1) << 179 0: << 180 andcc %g4, 1, %g0 << 181 be 5f << 182 nop << 183 retl << 184 mov %g1, %o0 << 185 5: << 186 retl << 187 clr %o0 << 188 << 189 .section .fixup,#alloc,#execinstr << 190 .align 4 << 191 30: << 192 and %o1, 0x7f, %o1 << 193 retl << 194 add %o3, %o1, %o0 << 195 << 196 .globl __bzero_end << 197 __bzero_end: <<
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.