1 /* 1 /* 2 * arch/xtensa/lib/strnlen_user.S !! 2 * This file is subject to the terms and conditions of the GNU General Public >> 3 * License. See the file "COPYING" in the main directory of this archive >> 4 * for more details. 3 * 5 * 4 * This file is subject to the terms and cond !! 6 * Copyright (c) 1996, 1998, 1999, 2004 by Ralf Baechle 5 * Public License. See the file "COPYING" in !! 7 * Copyright (c) 1999 Silicon Graphics, Inc. 6 * this archive for more details. << 7 * << 8 * Returns strnlen, including trailing zero t << 9 * Zero indicates error. << 10 * << 11 * Copyright (C) 2002 Tensilica Inc. << 12 */ 8 */ 13 !! 9 #include <linux/export.h> 14 #include <linux/linkage.h> !! 10 #include <asm/asm.h> 15 #include <asm/asmmacro.h> !! 11 #include <asm/asm-offsets.h> 16 #include <asm/core.h> !! 12 #include <asm/regdef.h> >> 13 >> 14 #define EX(insn,reg,addr,handler) \ >> 15 9: insn reg, addr; \ >> 16 .section __ex_table,"a"; \ >> 17 PTR_WD 9b, handler; \ >> 18 .previous 17 19 18 /* 20 /* 19 * size_t __strnlen_user(const char *s, size_t !! 21 * Return the size of a string including the ending NUL character up to a >> 22 * maximum of a1 or 0 in case of error. >> 23 * >> 24 * Note: for performance reasons we deliberately accept that a user may >> 25 * make strlen_user and strnlen_user access the first few KSEG0 >> 26 * bytes. There's nothing secret there. On 64-bit accessing beyond >> 27 * the maximum is a tad hairier ... 20 */ 28 */ 21 !! 29 LEAF(__strnlen_user_asm) 22 #ifdef __XTENSA_EB__ !! 30 move v0, a0 23 # define MASK0 0xff000000 !! 31 PTR_ADDU a1, a0 # stop pointer 24 # define MASK1 0x00ff0000 !! 32 1: 25 # define MASK2 0x0000ff00 !! 33 #ifdef CONFIG_CPU_DADDI_WORKAROUNDS 26 # define MASK3 0x000000ff !! 34 .set noat >> 35 li AT, 1 >> 36 #endif >> 37 beq v0, a1, 1f # limit reached? >> 38 #ifdef CONFIG_EVA >> 39 .set push >> 40 .set eva >> 41 EX(lbe, t0, (v0), .Lfault) >> 42 .set pop 27 #else 43 #else 28 # define MASK0 0x000000ff !! 44 EX(lb, t0, (v0), .Lfault) 29 # define MASK1 0x0000ff00 << 30 # define MASK2 0x00ff0000 << 31 # define MASK3 0xff000000 << 32 #endif 45 #endif 33 !! 46 .set noreorder 34 # Register use: !! 47 bnez t0, 1b 35 # a2/ src !! 48 1: 36 # a3/ len !! 49 #ifndef CONFIG_CPU_DADDI_WORKAROUNDS 37 # a4/ tmp !! 50 PTR_ADDIU v0, 1 38 # a5/ mask0 << 39 # a6/ mask1 << 40 # a7/ mask2 << 41 # a8/ mask3 << 42 # a9/ tmp << 43 # a10/ tmp << 44 << 45 .text << 46 ENTRY(__strnlen_user) << 47 << 48 abi_entry_default << 49 # a2/ s, a3/ len << 50 addi a4, a2, -4 # because we o << 51 # we compensat << 52 movi a5, MASK0 # mask for byt << 53 movi a6, MASK1 # mask for byt << 54 movi a7, MASK2 # mask for byt << 55 movi a8, MASK3 # mask for byt << 56 bbsi.l a2, 0, .L1mod2 # if only 8-b << 57 bbsi.l a2, 1, .L2mod4 # if only 16-b << 58 << 59 /* << 60 * String is word-aligned. << 61 */ << 62 .Laligned: << 63 srli a10, a3, 2 # number of lo << 64 #if XCHAL_HAVE_LOOPS << 65 loopnez a10, .Ldone << 66 #else 51 #else 67 beqz a10, .Ldone !! 52 PTR_ADDU v0, AT 68 slli a10, a10, 2 !! 53 .set at 69 add a10, a10, a4 # a10 = end of << 70 #endif /* XCHAL_HAVE_LOOPS */ << 71 .Loop: << 72 EX(10f) l32i a9, a4, 4 # get << 73 addi a4, a4, 4 # adva << 74 bnone a9, a5, .Lz0 # if b << 75 bnone a9, a6, .Lz1 # if b << 76 bnone a9, a7, .Lz2 # if b << 77 bnone a9, a8, .Lz3 # if b << 78 #if !XCHAL_HAVE_LOOPS << 79 blt a4, a10, .Loop << 80 #endif 54 #endif >> 55 .set reorder >> 56 PTR_SUBU v0, a0 >> 57 jr ra >> 58 END(__strnlen_user_asm) >> 59 >> 60 .Lfault: >> 61 move v0, zero >> 62 jr ra 81 63 82 .Ldone: !! 64 EXPORT_SYMBOL(__strnlen_user_asm) 83 EX(10f) l32i a9, a4, 4 # load 4 bytes << 84 << 85 bbci.l a3, 1, .L100 << 86 # check two more bytes (bytes 0, 1 of << 87 addi a4, a4, 2 # advance stri << 88 bnone a9, a5, .Lz0 # if byte 0 is << 89 bnone a9, a6, .Lz1 # if byte 1 is << 90 .L100: << 91 bbci.l a3, 0, .L101 << 92 # check one more byte (byte 2 of word) << 93 # Actually, we don't need to check. Z << 94 # Do not add an extra one for the NULL << 95 # exhausted the original len paramete << 96 addi a4, a4, 1 # advance stri << 97 .L101: << 98 sub a2, a4, a2 # compute leng << 99 abi_ret_default << 100 << 101 # NOTE that in several places below, we point << 102 # the zero byte in order to include the NULL t << 103 << 104 .Lz3: # byte 3 is zero << 105 addi a4, a4, 3 # point to zer << 106 .Lz0: # byte 0 is zero << 107 addi a4, a4, 1 # point just b << 108 sub a2, a4, a2 # subtract to << 109 abi_ret_default << 110 .Lz1: # byte 1 is zero << 111 addi a4, a4, 1+1 # point just b << 112 sub a2, a4, a2 # subtract to << 113 abi_ret_default << 114 .Lz2: # byte 2 is zero << 115 addi a4, a4, 2+1 # point just b << 116 sub a2, a4, a2 # subtract to << 117 abi_ret_default << 118 << 119 .L1mod2: # address is odd << 120 EX(10f) l8ui a9, a4, 4 # get << 121 addi a4, a4, 1 # adva << 122 beqz a9, .Lz3 # if b << 123 bbci.l a4, 1, .Laligned # if s << 124 << 125 .L2mod4: # address is 2 mod 4 << 126 addi a4, a4, 2 # advance ptr << 127 EX(10f) l32i a9, a4, 0 # get word wit << 128 bnone a9, a7, .Lz2 # if byte 2 (o << 129 bany a9, a8, .Laligned # if byte 3 << 130 # byte 3 is zero << 131 addi a4, a4, 3+1 # point just b << 132 sub a2, a4, a2 # subtract to << 133 abi_ret_default << 134 << 135 ENDPROC(__strnlen_user) << 136 EXPORT_SYMBOL(__strnlen_user) << 137 << 138 .section .fixup, "ax" << 139 .align 4 << 140 10: << 141 movi a2, 0 << 142 abi_ret_default <<
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.