1 /* 1 /* 2 * arch/xtensa/lib/strncpy_user.S !! 2 * This file is subject to the terms and conditions of the GNU General Public >> 3 * License. See the file "COPYING" in the main directory of this archive >> 4 * for more details. 3 * 5 * 4 * This file is subject to the terms and cond !! 6 * Copyright (C) 1996, 1999 by Ralf Baechle 5 * Public License. See the file "COPYING" in !! 7 * Copyright (C) 2011 MIPS Technologies, Inc. 6 * this archive for more details. << 7 * << 8 * Returns: -EFAULT if exception before termi << 9 * buffer filled, else strlen. << 10 * << 11 * Copyright (C) 2002 Tensilica Inc. << 12 */ 8 */ 13 << 14 #include <linux/errno.h> 9 #include <linux/errno.h> 15 #include <linux/linkage.h> !! 10 #include <linux/export.h> 16 #include <asm/asmmacro.h> !! 11 #include <asm/asm.h> 17 #include <asm/core.h> !! 12 #include <asm/asm-offsets.h> >> 13 #include <asm/regdef.h> >> 14 >> 15 #define EX(insn,reg,addr,handler) \ >> 16 9: insn reg, addr; \ >> 17 .section __ex_table,"a"; \ >> 18 PTR_WD 9b, handler; \ >> 19 .previous 18 20 19 /* 21 /* 20 * char *__strncpy_user(char *dst, const char !! 22 * Returns: -EFAULT if exception before terminator, N if the entire >> 23 * buffer filled, else strlen. 21 */ 24 */ 22 25 23 #ifdef __XTENSA_EB__ << 24 # define MASK0 0xff000000 << 25 # define MASK1 0x00ff0000 << 26 # define MASK2 0x0000ff00 << 27 # define MASK3 0x000000ff << 28 #else << 29 # define MASK0 0x000000ff << 30 # define MASK1 0x0000ff00 << 31 # define MASK2 0x00ff0000 << 32 # define MASK3 0xff000000 << 33 #endif << 34 << 35 # Register use << 36 # a0/ return address << 37 # a1/ stack pointer << 38 # a2/ return value << 39 # a3/ src << 40 # a4/ len << 41 # a5/ mask0 << 42 # a6/ mask1 << 43 # a7/ mask2 << 44 # a8/ mask3 << 45 # a9/ tmp << 46 # a10/ tmp << 47 # a11/ dst << 48 << 49 .text << 50 ENTRY(__strncpy_user) << 51 << 52 abi_entry_default << 53 # a2/ dst, a3/ src, a4/ len << 54 mov a11, a2 # leave dst in << 55 beqz a4, .Lret # if len is ze << 56 movi a5, MASK0 # mask for byt << 57 movi a6, MASK1 # mask for byt << 58 movi a7, MASK2 # mask for byt << 59 movi a8, MASK3 # mask for byt << 60 bbsi.l a3, 0, .Lsrc1mod2 # if only 8 << 61 bbsi.l a3, 1, .Lsrc2mod4 # if only 16 << 62 .Lsrcaligned: # return here when src is word << 63 srli a10, a4, 2 # number of lo << 64 movi a9, 3 << 65 bnone a11, a9, .Laligned << 66 j .Ldstunaligned << 67 << 68 .Lsrc1mod2: # src address is odd << 69 EX(11f) l8ui a9, a3, 0 # get << 70 addi a3, a3, 1 # adva << 71 EX(10f) s8i a9, a11, 0 # stor << 72 beqz a9, .Lret # if b << 73 addi a11, a11, 1 # adva << 74 addi a4, a4, -1 # decr << 75 beqz a4, .Lret # if l << 76 bbci.l a3, 1, .Lsrcaligned # if s << 77 << 78 .Lsrc2mod4: # src address is 2 mod 4 << 79 EX(11f) l8ui a9, a3, 0 # get << 80 /* 1-cycle interlock */ << 81 EX(10f) s8i a9, a11, 0 # stor << 82 beqz a9, .Lret # if b << 83 addi a11, a11, 1 # adva << 84 addi a4, a4, -1 # decr << 85 beqz a4, .Lret # if l << 86 EX(11f) l8ui a9, a3, 1 # get << 87 addi a3, a3, 2 # adva << 88 EX(10f) s8i a9, a11, 0 # stor << 89 beqz a9, .Lret # if b << 90 addi a11, a11, 1 # adva << 91 addi a4, a4, -1 # decr << 92 bnez a4, .Lsrcaligned # if l << 93 .Lret: << 94 sub a2, a11, a2 # comp << 95 abi_ret_default << 96 << 97 /* 26 /* 98 * dst is word-aligned, src is word-aligned !! 27 * Ugly special case have to check: we might get passed a user space >> 28 * pointer which wraps into the kernel space. We don't deal with that. If >> 29 * it happens at most some bytes of the exceptions handlers will be copied. 99 */ 30 */ 100 .align 4 # 1 mod 4 alig << 101 .byte 0 # (0 mod 4 ali << 102 .Laligned: << 103 #if XCHAL_HAVE_LOOPS << 104 loopnez a10, .Loop1done << 105 #else << 106 beqz a10, .Loop1done << 107 slli a10, a10, 2 << 108 add a10, a10, a11 # a10 = end of << 109 #endif << 110 .Loop1: << 111 EX(11f) l32i a9, a3, 0 # get << 112 addi a3, a3, 4 # adva << 113 bnone a9, a5, .Lz0 # if b << 114 bnone a9, a6, .Lz1 # if b << 115 bnone a9, a7, .Lz2 # if b << 116 EX(10f) s32i a9, a11, 0 # stor << 117 bnone a9, a8, .Lz3 # if b << 118 addi a11, a11, 4 # adva << 119 #if !XCHAL_HAVE_LOOPS << 120 blt a11, a10, .Loop1 << 121 #endif << 122 31 123 .Loop1done: !! 32 LEAF(__strncpy_from_user_asm) 124 bbci.l a4, 1, .L100 !! 33 move t0, zero 125 # copy 2 bytes !! 34 move v1, a1 126 EX(11f) l16ui a9, a3, 0 !! 35 #ifdef CONFIG_EVA 127 addi a3, a3, 2 # adva !! 36 .set push 128 #ifdef __XTENSA_EB__ !! 37 .set eva 129 bnone a9, a7, .Lz0 # if b !! 38 1: EX(lbue, v0, (v1), .Lfault) 130 bnone a9, a8, .Lz1 # if b !! 39 .set pop 131 #else << 132 bnone a9, a5, .Lz0 # if b << 133 bnone a9, a6, .Lz1 # if b << 134 #endif << 135 EX(10f) s16i a9, a11, 0 << 136 addi a11, a11, 2 # adva << 137 .L100: << 138 bbci.l a4, 0, .Lret << 139 EX(11f) l8ui a9, a3, 0 << 140 /* slot */ << 141 EX(10f) s8i a9, a11, 0 << 142 beqz a9, .Lret # if b << 143 addi a11, a11, 1-3 # adva << 144 # the << 145 /* fall thru to .Lz3 and "retw" */ << 146 << 147 .Lz3: # byte 3 is zero << 148 addi a11, a11, 3 # adva << 149 sub a2, a11, a2 # comp << 150 abi_ret_default << 151 .Lz0: # byte 0 is zero << 152 #ifdef __XTENSA_EB__ << 153 movi a9, 0 << 154 #endif /* __XTENSA_EB__ */ << 155 EX(10f) s8i a9, a11, 0 << 156 sub a2, a11, a2 # comp << 157 abi_ret_default << 158 .Lz1: # byte 1 is zero << 159 #ifdef __XTENSA_EB__ << 160 extui a9, a9, 16, 16 << 161 #endif /* __XTENSA_EB__ */ << 162 EX(10f) s16i a9, a11, 0 << 163 addi a11, a11, 1 # adva << 164 sub a2, a11, a2 # comp << 165 abi_ret_default << 166 .Lz2: # byte 2 is zero << 167 #ifdef __XTENSA_EB__ << 168 extui a9, a9, 16, 16 << 169 #endif /* __XTENSA_EB__ */ << 170 EX(10f) s16i a9, a11, 0 << 171 movi a9, 0 << 172 EX(10f) s8i a9, a11, 2 << 173 addi a11, a11, 2 # adva << 174 sub a2, a11, a2 # comp << 175 abi_ret_default << 176 << 177 .align 4 # 1 mod 4 alig << 178 .byte 0 # (0 mod 4 ali << 179 .Ldstunaligned: << 180 /* << 181 * for now just use byte copy loop << 182 */ << 183 #if XCHAL_HAVE_LOOPS << 184 loopnez a4, .Lunalignedend << 185 #else 40 #else 186 beqz a4, .Lunalignedend !! 41 1: EX(lbu, v0, (v1), .Lfault) 187 add a10, a11, a4 # a10 << 188 #endif /* XCHAL_HAVE_LOOPS */ << 189 .Lnextbyte: << 190 EX(11f) l8ui a9, a3, 0 << 191 addi a3, a3, 1 << 192 EX(10f) s8i a9, a11, 0 << 193 beqz a9, .Lunalignedend << 194 addi a11, a11, 1 << 195 #if !XCHAL_HAVE_LOOPS << 196 blt a11, a10, .Lnextbyte << 197 #endif 42 #endif >> 43 PTR_ADDIU v1, 1 >> 44 R10KCBARRIER(0(ra)) >> 45 sb v0, (a0) >> 46 beqz v0, 2f >> 47 PTR_ADDIU t0, 1 >> 48 PTR_ADDIU a0, 1 >> 49 bne t0, a2, 1b >> 50 2: PTR_ADDU v0, a1, t0 >> 51 xor v0, a1 >> 52 bltz v0, .Lfault >> 53 move v0, t0 >> 54 jr ra # return n >> 55 END(__strncpy_from_user_asm) >> 56 >> 57 .Lfault: >> 58 li v0, -EFAULT >> 59 jr ra >> 60 >> 61 .section __ex_table,"a" >> 62 PTR_WD 1b, .Lfault >> 63 .previous 198 64 199 .Lunalignedend: !! 65 EXPORT_SYMBOL(__strncpy_from_user_asm) 200 sub a2, a11, a2 # comp << 201 abi_ret_default << 202 << 203 ENDPROC(__strncpy_user) << 204 EXPORT_SYMBOL(__strncpy_user) << 205 << 206 .section .fixup, "ax" << 207 .align 4 << 208 << 209 /* For now, just return -EFAULT. Futu << 210 * like to clear remaining kernel spac << 211 * implementation in memset(). Thus, << 212 * load/store fixups. */ << 213 << 214 10: << 215 11: << 216 movi a2, -EFAULT << 217 abi_ret_default <<
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.