1 /* SPDX-License-Identifier: GPL-2.0 */ !! 1 /* 2 /* linux/arch/sparc/lib/memset.S: Sparc optimi !! 2 * This file is subject to the terms and conditions of the GNU General Public 3 * Copyright (C) 1991,1996 Free Software Found !! 3 * License. See the file "COPYING" in the main directory of this archive 4 * Copyright (C) 1996,1997 Jakub Jelinek (jj@s !! 4 * for more details. 5 * Copyright (C) 1996 David S. Miller (davem@c << 6 * 5 * 7 * Calls to memset returns initial %o0. Calls !! 6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle 8 * number of bytes not yet set if exception oc !! 7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc. 9 * clear_user. !! 8 * Copyright (C) 2007 by Maciej W. Rozycki >> 9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc. 10 */ 10 */ >> 11 #include <asm/asm.h> >> 12 #include <asm/asm-offsets.h> >> 13 #include <asm/export.h> >> 14 #include <asm/regdef.h> >> 15 >> 16 #if LONGSIZE == 4 >> 17 #define LONG_S_L swl >> 18 #define LONG_S_R swr >> 19 #else >> 20 #define LONG_S_L sdl >> 21 #define LONG_S_R sdr >> 22 #endif >> 23 >> 24 #ifdef CONFIG_CPU_MICROMIPS >> 25 #define STORSIZE (LONGSIZE * 2) >> 26 #define STORMASK (STORSIZE - 1) >> 27 #define FILL64RG t8 >> 28 #define FILLPTRG t7 >> 29 #undef LONG_S >> 30 #define LONG_S LONG_SP >> 31 #else >> 32 #define STORSIZE LONGSIZE >> 33 #define STORMASK LONGMASK >> 34 #define FILL64RG a1 >> 35 #define FILLPTRG t0 >> 36 #endif >> 37 >> 38 #define LEGACY_MODE 1 >> 39 #define EVA_MODE 2 >> 40 >> 41 /* >> 42 * No need to protect it with EVA #ifdefery. The generated block of code >> 43 * will never be assembled if EVA is not enabled. >> 44 */ >> 45 #define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr) >> 46 #define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr) 11 47 12 #include <linux/export.h> !! 48 #define EX(insn,reg,addr,handler) \ 13 #include <asm/ptrace.h> !! 49 .if \mode == LEGACY_MODE; \ >> 50 9: insn reg, addr; \ >> 51 .else; \ >> 52 9: ___BUILD_EVA_INSN(insn, reg, addr); \ >> 53 .endif; \ >> 54 .section __ex_table,"a"; \ >> 55 PTR 9b, handler; \ >> 56 .previous >> 57 >> 58 .macro f_fill64 dst, offset, val, fixup, mode >> 59 EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup) >> 60 EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup) >> 61 EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup) >> 62 EX(LONG_S, \val, (\offset + 3 * STORSIZE)(\dst), \fixup) >> 63 #if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS)) >> 64 EX(LONG_S, \val, (\offset + 4 * STORSIZE)(\dst), \fixup) >> 65 EX(LONG_S, \val, (\offset + 5 * STORSIZE)(\dst), \fixup) >> 66 EX(LONG_S, \val, (\offset + 6 * STORSIZE)(\dst), \fixup) >> 67 EX(LONG_S, \val, (\offset + 7 * STORSIZE)(\dst), \fixup) >> 68 #endif >> 69 #if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) >> 70 EX(LONG_S, \val, (\offset + 8 * STORSIZE)(\dst), \fixup) >> 71 EX(LONG_S, \val, (\offset + 9 * STORSIZE)(\dst), \fixup) >> 72 EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup) >> 73 EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup) >> 74 EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup) >> 75 EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup) >> 76 EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup) >> 77 EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup) >> 78 #endif >> 79 .endm >> 80 >> 81 .set noreorder >> 82 .align 5 >> 83 >> 84 /* >> 85 * Macro to generate the __bzero{,_user} symbol >> 86 * Arguments: >> 87 * mode: LEGACY_MODE or EVA_MODE >> 88 */ >> 89 .macro __BUILD_BZERO mode >> 90 /* Initialize __memset if this is the first time we call this macro */ >> 91 .ifnotdef __memset >> 92 .set __memset, 1 >> 93 .hidden __memset /* Make sure it does not leak */ >> 94 .endif >> 95 >> 96 sltiu t0, a2, STORSIZE /* very small region? */ >> 97 bnez t0, .Lsmall_memset\@ >> 98 andi t0, a0, STORMASK /* aligned? */ >> 99 >> 100 #ifdef CONFIG_CPU_MICROMIPS >> 101 move t8, a1 /* used by 'swp' instruction */ >> 102 move t9, a1 >> 103 #endif >> 104 #ifndef CONFIG_CPU_DADDI_WORKAROUNDS >> 105 beqz t0, 1f >> 106 PTR_SUBU t0, STORSIZE /* alignment in bytes */ >> 107 #else >> 108 .set noat >> 109 li AT, STORSIZE >> 110 beqz t0, 1f >> 111 PTR_SUBU t0, AT /* alignment in bytes */ >> 112 .set at >> 113 #endif >> 114 >> 115 #ifndef CONFIG_CPU_MIPSR6 >> 116 R10KCBARRIER(0(ra)) >> 117 #ifdef __MIPSEB__ >> 118 EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */ >> 119 #else >> 120 EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */ >> 121 #endif >> 122 PTR_SUBU a0, t0 /* long align ptr */ >> 123 PTR_ADDU a2, t0 /* correct size */ >> 124 >> 125 #else /* CONFIG_CPU_MIPSR6 */ >> 126 #define STORE_BYTE(N) \ >> 127 EX(sb, a1, N(a0), .Lbyte_fixup\@); \ >> 128 beqz t0, 0f; \ >> 129 PTR_ADDU t0, 1; >> 130 >> 131 PTR_ADDU a2, t0 /* correct size */ >> 132 PTR_ADDU t0, 1 >> 133 STORE_BYTE(0) >> 134 STORE_BYTE(1) >> 135 #if LONGSIZE == 4 >> 136 EX(sb, a1, 2(a0), .Lbyte_fixup\@) >> 137 #else >> 138 STORE_BYTE(2) >> 139 STORE_BYTE(3) >> 140 STORE_BYTE(4) >> 141 STORE_BYTE(5) >> 142 EX(sb, a1, 6(a0), .Lbyte_fixup\@) >> 143 #endif >> 144 0: >> 145 ori a0, STORMASK >> 146 xori a0, STORMASK >> 147 PTR_ADDIU a0, STORSIZE >> 148 #endif /* CONFIG_CPU_MIPSR6 */ >> 149 1: ori t1, a2, 0x3f /* # of full blocks */ >> 150 xori t1, 0x3f >> 151 beqz t1, .Lmemset_partial\@ /* no block to fill */ >> 152 andi t0, a2, 0x40-STORSIZE >> 153 >> 154 PTR_ADDU t1, a0 /* end address */ >> 155 .set reorder >> 156 1: PTR_ADDIU a0, 64 >> 157 R10KCBARRIER(0(ra)) >> 158 f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode >> 159 bne t1, a0, 1b >> 160 .set noreorder >> 161 >> 162 .Lmemset_partial\@: >> 163 R10KCBARRIER(0(ra)) >> 164 PTR_LA t1, 2f /* where to start */ >> 165 #ifdef CONFIG_CPU_MICROMIPS >> 166 LONG_SRL t7, t0, 1 >> 167 #endif >> 168 #if LONGSIZE == 4 >> 169 PTR_SUBU t1, FILLPTRG >> 170 #else >> 171 .set noat >> 172 LONG_SRL AT, FILLPTRG, 1 >> 173 PTR_SUBU t1, AT >> 174 .set at >> 175 #endif >> 176 jr t1 >> 177 PTR_ADDU a0, t0 /* dest ptr */ >> 178 >> 179 .set push >> 180 .set noreorder >> 181 .set nomacro >> 182 /* ... but first do longs ... */ >> 183 f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode >> 184 2: .set pop >> 185 andi a2, STORMASK /* At most one long to go */ >> 186 >> 187 beqz a2, 1f >> 188 #ifndef CONFIG_CPU_MIPSR6 >> 189 PTR_ADDU a0, a2 /* What's left */ >> 190 R10KCBARRIER(0(ra)) >> 191 #ifdef __MIPSEB__ >> 192 EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@) >> 193 #else >> 194 EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@) >> 195 #endif >> 196 #else >> 197 PTR_SUBU t0, $0, a2 >> 198 PTR_ADDIU t0, 1 >> 199 STORE_BYTE(0) >> 200 STORE_BYTE(1) >> 201 #if LONGSIZE == 4 >> 202 EX(sb, a1, 2(a0), .Lbyte_fixup\@) >> 203 #else >> 204 STORE_BYTE(2) >> 205 STORE_BYTE(3) >> 206 STORE_BYTE(4) >> 207 STORE_BYTE(5) >> 208 EX(sb, a1, 6(a0), .Lbyte_fixup\@) >> 209 #endif >> 210 0: >> 211 #endif >> 212 1: jr ra >> 213 move a2, zero >> 214 >> 215 .Lsmall_memset\@: >> 216 beqz a2, 2f >> 217 PTR_ADDU t1, a0, a2 >> 218 >> 219 1: PTR_ADDIU a0, 1 /* fill bytewise */ >> 220 R10KCBARRIER(0(ra)) >> 221 bne t1, a0, 1b >> 222 EX(sb, a1, -1(a0), .Lsmall_fixup\@) >> 223 >> 224 2: jr ra /* done */ >> 225 move a2, zero >> 226 .if __memset == 1 >> 227 END(memset) >> 228 .set __memset, 0 >> 229 .hidden __memset >> 230 .endif >> 231 >> 232 #ifdef CONFIG_CPU_MIPSR6 >> 233 .Lbyte_fixup\@: >> 234 PTR_SUBU a2, $0, t0 >> 235 jr ra >> 236 PTR_ADDIU a2, 1 >> 237 #endif /* CONFIG_CPU_MIPSR6 */ >> 238 >> 239 .Lfirst_fixup\@: >> 240 jr ra >> 241 nop >> 242 >> 243 .Lfwd_fixup\@: >> 244 PTR_L t0, TI_TASK($28) >> 245 andi a2, 0x3f >> 246 LONG_L t0, THREAD_BUADDR(t0) >> 247 LONG_ADDU a2, t1 >> 248 jr ra >> 249 LONG_SUBU a2, t0 >> 250 >> 251 .Lpartial_fixup\@: >> 252 PTR_L t0, TI_TASK($28) >> 253 andi a2, STORMASK >> 254 LONG_L t0, THREAD_BUADDR(t0) >> 255 LONG_ADDU a2, a0 >> 256 jr ra >> 257 LONG_SUBU a2, t0 14 258 15 /* Work around cpp -rob */ !! 259 .Llast_fixup\@: 16 #define ALLOC #alloc !! 260 jr ra 17 #define EXECINSTR #execinstr !! 261 nop 18 #define EX(x,y,a,b) << 19 98: x,y; << 20 .section .fixup,ALLOC,EXECINSTR; << 21 .align 4; << 22 99: retl; << 23 a, b, %o0; << 24 .section __ex_table,ALLOC; << 25 .align 4; << 26 .word 98b, 99b; << 27 .text; << 28 .align 4 << 29 << 30 #define STORE(source, base, offset, n) << 31 98: std source, [base + offset + n]; << 32 .section .fixup,ALLOC,EXECINSTR; << 33 .align 4; << 34 99: ba 30f; << 35 sub %o3, n - offset, %o3; << 36 .section __ex_table,ALLOC; << 37 .align 4; << 38 .word 98b, 99b; << 39 .text; << 40 .align 4; << 41 << 42 #define STORE_LAST(source, base, offset, n) << 43 EX(std source, [base - offset - n], << 44 add %o1, offset + n); << 45 << 46 /* Please don't change these macros, unless yo << 47 * in the .fixup section below as well. << 48 * Store 64 bytes at (BASE + OFFSET) using val << 49 #define ZERO_BIG_BLOCK(base, offset, source) << 50 STORE(source, base, offset, 0x00); << 51 STORE(source, base, offset, 0x08); << 52 STORE(source, base, offset, 0x10); << 53 STORE(source, base, offset, 0x18); << 54 STORE(source, base, offset, 0x20); << 55 STORE(source, base, offset, 0x28); << 56 STORE(source, base, offset, 0x30); << 57 STORE(source, base, offset, 0x38); << 58 << 59 #define ZERO_LAST_BLOCKS(base, offset, source) << 60 STORE_LAST(source, base, offset, 0x38) << 61 STORE_LAST(source, base, offset, 0x30) << 62 STORE_LAST(source, base, offset, 0x28) << 63 STORE_LAST(source, base, offset, 0x20) << 64 STORE_LAST(source, base, offset, 0x18) << 65 STORE_LAST(source, base, offset, 0x10) << 66 STORE_LAST(source, base, offset, 0x08) << 67 STORE_LAST(source, base, offset, 0x00) << 68 << 69 .text << 70 .align 4 << 71 << 72 .globl __bzero_begin << 73 __bzero_begin: << 74 << 75 .globl __bzero << 76 .type __bzero,#function << 77 .globl memset << 78 EXPORT_SYMBOL(__bzero) << 79 EXPORT_SYMBOL(memset) << 80 memset: << 81 mov %o0, %g1 << 82 mov 1, %g4 << 83 and %o1, 0xff, %g3 << 84 sll %g3, 8, %g2 << 85 or %g3, %g2, %g3 << 86 sll %g3, 16, %g2 << 87 or %g3, %g2, %g3 << 88 b 1f << 89 mov %o2, %o1 << 90 3: << 91 cmp %o2, 3 << 92 be 2f << 93 EX(stb %g3, [%o0], sub %o1, 0) << 94 << 95 cmp %o2, 2 << 96 be 2f << 97 EX(stb %g3, [%o0 + 0x01], sub %o1, 1) << 98 << 99 EX(stb %g3, [%o0 + 0x02], sub %o1, 2) << 100 2: << 101 sub %o2, 4, %o2 << 102 add %o1, %o2, %o1 << 103 b 4f << 104 sub %o0, %o2, %o0 << 105 << 106 __bzero: << 107 clr %g4 << 108 mov %g0, %g3 << 109 1: << 110 cmp %o1, 7 << 111 bleu 7f << 112 andcc %o0, 3, %o2 << 113 << 114 bne 3b << 115 4: << 116 andcc %o0, 4, %g0 << 117 << 118 be 2f << 119 mov %g3, %g2 << 120 << 121 EX(st %g3, [%o0], sub %o1, 0) << 122 sub %o1, 4, %o1 << 123 add %o0, 4, %o0 << 124 2: << 125 andcc %o1, 0xffffff80, %o3 ! Now << 126 be 9f << 127 andcc %o1, 0x78, %o2 << 128 10: << 129 ZERO_BIG_BLOCK(%o0, 0x00, %g2) << 130 subcc %o3, 128, %o3 << 131 ZERO_BIG_BLOCK(%o0, 0x40, %g2) << 132 bne 10b << 133 add %o0, 128, %o0 << 134 << 135 orcc %o2, %g0, %g0 << 136 9: << 137 be 13f << 138 andcc %o1, 7, %o1 << 139 << 140 srl %o2, 1, %o3 << 141 set 13f, %o4 << 142 sub %o4, %o3, %o4 << 143 jmp %o4 << 144 add %o0, %o2, %o0 << 145 << 146 ZERO_LAST_BLOCKS(%o0, 0x48, %g2) << 147 ZERO_LAST_BLOCKS(%o0, 0x08, %g2) << 148 13: << 149 be 8f << 150 andcc %o1, 4, %g0 << 151 262 152 be 1f !! 263 .Lsmall_fixup\@: 153 andcc %o1, 2, %g0 !! 264 PTR_SUBU a2, t1, a0 >> 265 jr ra >> 266 PTR_ADDIU a2, 1 154 267 155 EX(st %g3, [%o0], and %o1, 7) !! 268 .endm 156 add %o0, 4, %o0 << 157 1: << 158 be 1f << 159 andcc %o1, 1, %g0 << 160 269 161 EX(sth %g3, [%o0], and %o1, 3) !! 270 /* 162 add %o0, 2, %o0 !! 271 * memset(void *s, int c, size_t n) 163 1: !! 272 * 164 bne,a 8f !! 273 * a0: start of area to clear 165 EX(stb %g3, [%o0], and %o1, 1) !! 274 * a1: char to fill with 166 8: !! 275 * a2: size of area to clear 167 b 0f !! 276 */ 168 nop << 169 7: << 170 be 13b << 171 orcc %o1, 0, %g0 << 172 << 173 be 0f << 174 8: << 175 add %o0, 1, %o0 << 176 subcc %o1, 1, %o1 << 177 bne 8b << 178 EX(stb %g3, [%o0 - 1], add %o1, 1) << 179 0: << 180 andcc %g4, 1, %g0 << 181 be 5f << 182 nop << 183 retl << 184 mov %g1, %o0 << 185 5: << 186 retl << 187 clr %o0 << 188 << 189 .section .fixup,#alloc,#execinstr << 190 .align 4 << 191 30: << 192 and %o1, 0x7f, %o1 << 193 retl << 194 add %o3, %o1, %o0 << 195 277 196 .globl __bzero_end !! 278 LEAF(memset) 197 __bzero_end: !! 279 EXPORT_SYMBOL(memset) >> 280 beqz a1, 1f >> 281 move v0, a0 /* result */ >> 282 >> 283 andi a1, 0xff /* spread fillword */ >> 284 LONG_SLL t1, a1, 8 >> 285 or a1, t1 >> 286 LONG_SLL t1, a1, 16 >> 287 #if LONGSIZE == 8 >> 288 or a1, t1 >> 289 LONG_SLL t1, a1, 32 >> 290 #endif >> 291 or a1, t1 >> 292 1: >> 293 #ifndef CONFIG_EVA >> 294 FEXPORT(__bzero) >> 295 EXPORT_SYMBOL(__bzero) >> 296 #else >> 297 FEXPORT(__bzero_kernel) >> 298 EXPORT_SYMBOL(__bzero_kernel) >> 299 #endif >> 300 __BUILD_BZERO LEGACY_MODE >> 301 >> 302 #ifdef CONFIG_EVA >> 303 LEAF(__bzero) >> 304 EXPORT_SYMBOL(__bzero) >> 305 __BUILD_BZERO EVA_MODE >> 306 END(__bzero) >> 307 #endif
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.