1 /* SPDX-License-Identifier: GPL-2.0 */ << 2 /* 1 /* 3 * Copyright (C) 2020-2022 Loongson Technology !! 2 * This file is subject to the terms and conditions of the GNU General Public >> 3 * License. See the file "COPYING" in the main directory of this archive >> 4 * for more details. >> 5 * >> 6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle >> 7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc. >> 8 * Copyright (C) 2007 by Maciej W. Rozycki >> 9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc. 4 */ 10 */ 5 << 6 #include <linux/export.h> << 7 #include <asm/alternative-asm.h> << 8 #include <asm/asm.h> 11 #include <asm/asm.h> 9 #include <asm/asmmacro.h> !! 12 #include <asm/asm-offsets.h> 10 #include <asm/cpu.h> !! 13 #include <asm/export.h> 11 #include <asm/regdef.h> 14 #include <asm/regdef.h> 12 #include <asm/unwind_hints.h> << 13 << 14 .macro fill_to_64 r0 << 15 bstrins.d \r0, \r0, 15, 8 << 16 bstrins.d \r0, \r0, 31, 16 << 17 bstrins.d \r0, \r0, 63, 32 << 18 .endm << 19 << 20 .section .noinstr.text, "ax" << 21 << 22 SYM_FUNC_START(memset) << 23 /* << 24 * Some CPUs support hardware unaligne << 25 */ << 26 ALTERNATIVE "b __memset_generic", << 27 "b __memset_fast", CPU << 28 SYM_FUNC_END(memset) << 29 SYM_FUNC_ALIAS(__memset, memset) << 30 15 31 EXPORT_SYMBOL(memset) !! 16 #if LONGSIZE == 4 32 EXPORT_SYMBOL(__memset) !! 17 #define LONG_S_L swl >> 18 #define LONG_S_R swr >> 19 #else >> 20 #define LONG_S_L sdl >> 21 #define LONG_S_R sdr >> 22 #endif >> 23 >> 24 #ifdef CONFIG_CPU_MICROMIPS >> 25 #define STORSIZE (LONGSIZE * 2) >> 26 #define STORMASK (STORSIZE - 1) >> 27 #define FILL64RG t8 >> 28 #define FILLPTRG t7 >> 29 #undef LONG_S >> 30 #define LONG_S LONG_SP >> 31 #else >> 32 #define STORSIZE LONGSIZE >> 33 #define STORMASK LONGMASK >> 34 #define FILL64RG a1 >> 35 #define FILLPTRG t0 >> 36 #endif 33 37 34 _ASM_NOKPROBE(memset) !! 38 #define LEGACY_MODE 1 35 _ASM_NOKPROBE(__memset) !! 39 #define EVA_MODE 2 36 40 37 /* 41 /* 38 * void *__memset_generic(void *s, int c, size !! 42 * No need to protect it with EVA #ifdefery. The generated block of code 39 * !! 43 * will never be assembled if EVA is not enabled. 40 * a0: s << 41 * a1: c << 42 * a2: n << 43 */ 44 */ 44 SYM_FUNC_START(__memset_generic) !! 45 #define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr) 45 move a3, a0 !! 46 #define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr) 46 beqz a2, 2f << 47 << 48 1: st.b a1, a0, 0 << 49 addi.d a0, a0, 1 << 50 addi.d a2, a2, -1 << 51 bgt a2, zero, 1b << 52 47 53 2: move a0, a3 !! 48 #define EX(insn,reg,addr,handler) \ 54 jr ra !! 49 .if \mode == LEGACY_MODE; \ 55 SYM_FUNC_END(__memset_generic) !! 50 9: insn reg, addr; \ 56 _ASM_NOKPROBE(__memset_generic) !! 51 .else; \ >> 52 9: ___BUILD_EVA_INSN(insn, reg, addr); \ >> 53 .endif; \ >> 54 .section __ex_table,"a"; \ >> 55 PTR 9b, handler; \ >> 56 .previous >> 57 >> 58 .macro f_fill64 dst, offset, val, fixup, mode >> 59 EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup) >> 60 EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup) >> 61 EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup) >> 62 EX(LONG_S, \val, (\offset + 3 * STORSIZE)(\dst), \fixup) >> 63 #if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS)) >> 64 EX(LONG_S, \val, (\offset + 4 * STORSIZE)(\dst), \fixup) >> 65 EX(LONG_S, \val, (\offset + 5 * STORSIZE)(\dst), \fixup) >> 66 EX(LONG_S, \val, (\offset + 6 * STORSIZE)(\dst), \fixup) >> 67 EX(LONG_S, \val, (\offset + 7 * STORSIZE)(\dst), \fixup) >> 68 #endif >> 69 #if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) >> 70 EX(LONG_S, \val, (\offset + 8 * STORSIZE)(\dst), \fixup) >> 71 EX(LONG_S, \val, (\offset + 9 * STORSIZE)(\dst), \fixup) >> 72 EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup) >> 73 EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup) >> 74 EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup) >> 75 EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup) >> 76 EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup) >> 77 EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup) >> 78 #endif >> 79 .endm 57 80 58 /* !! 81 .set noreorder 59 * void *__memset_fast(void *s, int c, size_t !! 82 .align 5 60 * << 61 * a0: s << 62 * a1: c << 63 * a2: n << 64 */ << 65 SYM_FUNC_START(__memset_fast) << 66 /* fill a1 to 64 bits */ << 67 fill_to_64 a1 << 68 << 69 sltui t0, a2, 9 << 70 bnez t0, .Lsmall << 71 << 72 add.d a2, a0, a2 << 73 st.d a1, a0, 0 << 74 << 75 /* align up address */ << 76 addi.d a3, a0, 8 << 77 bstrins.d a3, zero, 2, 0 << 78 << 79 addi.d a4, a2, -64 << 80 bgeu a3, a4, .Llt64 << 81 << 82 /* set 64 bytes at a time */ << 83 .Lloop64: << 84 st.d a1, a3, 0 << 85 st.d a1, a3, 8 << 86 st.d a1, a3, 16 << 87 st.d a1, a3, 24 << 88 st.d a1, a3, 32 << 89 st.d a1, a3, 40 << 90 st.d a1, a3, 48 << 91 st.d a1, a3, 56 << 92 addi.d a3, a3, 64 << 93 bltu a3, a4, .Lloop64 << 94 << 95 /* set the remaining bytes */ << 96 .Llt64: << 97 addi.d a4, a2, -32 << 98 bgeu a3, a4, .Llt32 << 99 st.d a1, a3, 0 << 100 st.d a1, a3, 8 << 101 st.d a1, a3, 16 << 102 st.d a1, a3, 24 << 103 addi.d a3, a3, 32 << 104 << 105 .Llt32: << 106 addi.d a4, a2, -16 << 107 bgeu a3, a4, .Llt16 << 108 st.d a1, a3, 0 << 109 st.d a1, a3, 8 << 110 addi.d a3, a3, 16 << 111 << 112 .Llt16: << 113 addi.d a4, a2, -8 << 114 bgeu a3, a4, .Llt8 << 115 st.d a1, a3, 0 << 116 83 117 .Llt8: !! 84 /* 118 st.d a1, a2, -8 !! 85 * Macro to generate the __bzero{,_user} symbol 119 !! 86 * Arguments: 120 /* return */ !! 87 * mode: LEGACY_MODE or EVA_MODE 121 jr ra !! 88 */ 122 !! 89 .macro __BUILD_BZERO mode 123 .align 4 !! 90 /* Initialize __memset if this is the first time we call this macro */ 124 .Lsmall: !! 91 .ifnotdef __memset 125 pcaddi t0, 4 !! 92 .set __memset, 1 126 slli.d a2, a2, 4 !! 93 .hidden __memset /* Make sure it does not leak */ 127 add.d t0, t0, a2 !! 94 .endif 128 jr t0 !! 95 129 !! 96 sltiu t0, a2, STORSIZE /* very small region? */ 130 .align 4 !! 97 bnez t0, .Lsmall_memset\@ 131 0: jr ra !! 98 andi t0, a0, STORMASK /* aligned? */ >> 99 >> 100 #ifdef CONFIG_CPU_MICROMIPS >> 101 move t8, a1 /* used by 'swp' instruction */ >> 102 move t9, a1 >> 103 #endif >> 104 #ifndef CONFIG_CPU_DADDI_WORKAROUNDS >> 105 beqz t0, 1f >> 106 PTR_SUBU t0, STORSIZE /* alignment in bytes */ >> 107 #else >> 108 .set noat >> 109 li AT, STORSIZE >> 110 beqz t0, 1f >> 111 PTR_SUBU t0, AT /* alignment in bytes */ >> 112 .set at >> 113 #endif >> 114 >> 115 #ifndef CONFIG_CPU_MIPSR6 >> 116 R10KCBARRIER(0(ra)) >> 117 #ifdef __MIPSEB__ >> 118 EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */ >> 119 #else >> 120 EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */ >> 121 #endif >> 122 PTR_SUBU a0, t0 /* long align ptr */ >> 123 PTR_ADDU a2, t0 /* correct size */ >> 124 >> 125 #else /* CONFIG_CPU_MIPSR6 */ >> 126 #define STORE_BYTE(N) \ >> 127 EX(sb, a1, N(a0), .Lbyte_fixup\@); \ >> 128 beqz t0, 0f; \ >> 129 PTR_ADDU t0, 1; >> 130 >> 131 PTR_ADDU a2, t0 /* correct size */ >> 132 PTR_ADDU t0, 1 >> 133 STORE_BYTE(0) >> 134 STORE_BYTE(1) >> 135 #if LONGSIZE == 4 >> 136 EX(sb, a1, 2(a0), .Lbyte_fixup\@) >> 137 #else >> 138 STORE_BYTE(2) >> 139 STORE_BYTE(3) >> 140 STORE_BYTE(4) >> 141 STORE_BYTE(5) >> 142 EX(sb, a1, 6(a0), .Lbyte_fixup\@) >> 143 #endif >> 144 0: >> 145 ori a0, STORMASK >> 146 xori a0, STORMASK >> 147 PTR_ADDIU a0, STORSIZE >> 148 #endif /* CONFIG_CPU_MIPSR6 */ >> 149 1: ori t1, a2, 0x3f /* # of full blocks */ >> 150 xori t1, 0x3f >> 151 beqz t1, .Lmemset_partial\@ /* no block to fill */ >> 152 andi t0, a2, 0x40-STORSIZE >> 153 >> 154 PTR_ADDU t1, a0 /* end address */ >> 155 .set reorder >> 156 1: PTR_ADDIU a0, 64 >> 157 R10KCBARRIER(0(ra)) >> 158 f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode >> 159 bne t1, a0, 1b >> 160 .set noreorder >> 161 >> 162 .Lmemset_partial\@: >> 163 R10KCBARRIER(0(ra)) >> 164 PTR_LA t1, 2f /* where to start */ >> 165 #ifdef CONFIG_CPU_MICROMIPS >> 166 LONG_SRL t7, t0, 1 >> 167 #endif >> 168 #if LONGSIZE == 4 >> 169 PTR_SUBU t1, FILLPTRG >> 170 #else >> 171 .set noat >> 172 LONG_SRL AT, FILLPTRG, 1 >> 173 PTR_SUBU t1, AT >> 174 .set at >> 175 #endif >> 176 jr t1 >> 177 PTR_ADDU a0, t0 /* dest ptr */ >> 178 >> 179 .set push >> 180 .set noreorder >> 181 .set nomacro >> 182 /* ... but first do longs ... */ >> 183 f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode >> 184 2: .set pop >> 185 andi a2, STORMASK /* At most one long to go */ >> 186 >> 187 beqz a2, 1f >> 188 #ifndef CONFIG_CPU_MIPSR6 >> 189 PTR_ADDU a0, a2 /* What's left */ >> 190 R10KCBARRIER(0(ra)) >> 191 #ifdef __MIPSEB__ >> 192 EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@) >> 193 #else >> 194 EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@) >> 195 #endif >> 196 #else >> 197 PTR_SUBU t0, $0, a2 >> 198 move a2, zero /* No remaining longs */ >> 199 PTR_ADDIU t0, 1 >> 200 STORE_BYTE(0) >> 201 STORE_BYTE(1) >> 202 #if LONGSIZE == 4 >> 203 EX(sb, a1, 2(a0), .Lbyte_fixup\@) >> 204 #else >> 205 STORE_BYTE(2) >> 206 STORE_BYTE(3) >> 207 STORE_BYTE(4) >> 208 STORE_BYTE(5) >> 209 EX(sb, a1, 6(a0), .Lbyte_fixup\@) >> 210 #endif >> 211 0: >> 212 #endif >> 213 1: jr ra >> 214 move a2, zero >> 215 >> 216 .Lsmall_memset\@: >> 217 beqz a2, 2f >> 218 PTR_ADDU t1, a0, a2 >> 219 >> 220 1: PTR_ADDIU a0, 1 /* fill bytewise */ >> 221 R10KCBARRIER(0(ra)) >> 222 bne t1, a0, 1b >> 223 EX(sb, a1, -1(a0), .Lsmall_fixup\@) >> 224 >> 225 2: jr ra /* done */ >> 226 move a2, zero >> 227 .if __memset == 1 >> 228 END(memset) >> 229 .set __memset, 0 >> 230 .hidden __memset >> 231 .endif 132 232 133 .align 4 !! 233 #ifdef CONFIG_CPU_MIPSR6 134 1: st.b a1, a0, 0 !! 234 .Lbyte_fixup\@: 135 jr ra !! 235 /* >> 236 * unset_bytes = (#bytes - (#unaligned bytes)) - (-#unaligned bytes remaining + 1) + 1 >> 237 * a2 = a2 - t0 + 1 >> 238 */ >> 239 PTR_SUBU a2, t0 >> 240 jr ra >> 241 PTR_ADDIU a2, 1 >> 242 #endif /* CONFIG_CPU_MIPSR6 */ 136 243 137 .align 4 !! 244 .Lfirst_fixup\@: 138 2: st.h a1, a0, 0 !! 245 /* unset_bytes already in a2 */ 139 jr ra 246 jr ra >> 247 nop 140 248 141 .align 4 !! 249 .Lfwd_fixup\@: 142 3: st.h a1, a0, 0 !! 250 /* 143 st.b a1, a0, 2 !! 251 * unset_bytes = partial_start_addr + #bytes - fault_addr 144 jr ra !! 252 * a2 = t1 + (a2 & 3f) - $28->task->BUADDR >> 253 */ >> 254 PTR_L t0, TI_TASK($28) >> 255 andi a2, 0x3f >> 256 LONG_L t0, THREAD_BUADDR(t0) >> 257 LONG_ADDU a2, t1 >> 258 jr ra >> 259 LONG_SUBU a2, t0 145 260 146 .align 4 !! 261 .Lpartial_fixup\@: 147 4: st.w a1, a0, 0 !! 262 /* 148 jr ra !! 263 * unset_bytes = partial_end_addr + #bytes - fault_addr >> 264 * a2 = a0 + (a2 & STORMASK) - $28->task->BUADDR >> 265 */ >> 266 PTR_L t0, TI_TASK($28) >> 267 andi a2, STORMASK >> 268 LONG_L t0, THREAD_BUADDR(t0) >> 269 LONG_ADDU a2, a0 >> 270 jr ra >> 271 LONG_SUBU a2, t0 >> 272 >> 273 .Llast_fixup\@: >> 274 /* unset_bytes already in a2 */ >> 275 jr ra >> 276 nop 149 277 150 .align 4 !! 278 .Lsmall_fixup\@: 151 5: st.w a1, a0, 0 !! 279 /* 152 st.b a1, a0, 4 !! 280 * unset_bytes = end_addr - current_addr + 1 153 jr ra !! 281 * a2 = t1 - a0 + 1 >> 282 */ >> 283 .set reorder >> 284 PTR_SUBU a2, t1, a0 >> 285 PTR_ADDIU a2, 1 >> 286 jr ra >> 287 .set noreorder 154 288 155 .align 4 !! 289 .endm 156 6: st.w a1, a0, 0 << 157 st.h a1, a0, 4 << 158 jr ra << 159 290 160 .align 4 !! 291 /* 161 7: st.w a1, a0, 0 !! 292 * memset(void *s, int c, size_t n) 162 st.w a1, a0, 3 !! 293 * 163 jr ra !! 294 * a0: start of area to clear >> 295 * a1: char to fill with >> 296 * a2: size of area to clear >> 297 */ 164 298 165 .align 4 !! 299 LEAF(memset) 166 8: st.d a1, a0, 0 !! 300 EXPORT_SYMBOL(memset) 167 jr ra !! 301 beqz a1, 1f 168 SYM_FUNC_END(__memset_fast) !! 302 move v0, a0 /* result */ 169 _ASM_NOKPROBE(__memset_fast) << 170 303 171 STACK_FRAME_NON_STANDARD __memset_fast !! 304 andi a1, 0xff /* spread fillword */ >> 305 LONG_SLL t1, a1, 8 >> 306 or a1, t1 >> 307 LONG_SLL t1, a1, 16 >> 308 #if LONGSIZE == 8 >> 309 or a1, t1 >> 310 LONG_SLL t1, a1, 32 >> 311 #endif >> 312 or a1, t1 >> 313 1: >> 314 #ifndef CONFIG_EVA >> 315 FEXPORT(__bzero) >> 316 EXPORT_SYMBOL(__bzero) >> 317 #else >> 318 FEXPORT(__bzero_kernel) >> 319 EXPORT_SYMBOL(__bzero_kernel) >> 320 #endif >> 321 __BUILD_BZERO LEGACY_MODE >> 322 >> 323 #ifdef CONFIG_EVA >> 324 LEAF(__bzero) >> 325 EXPORT_SYMBOL(__bzero) >> 326 __BUILD_BZERO EVA_MODE >> 327 END(__bzero) >> 328 #endif
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.