~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/arch/arm64/lib/memset.S

Version: ~ [ linux-6.12-rc7 ] ~ [ linux-6.11.7 ] ~ [ linux-6.10.14 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.60 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.116 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.171 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.229 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.285 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.323 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.12 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

Diff markup

Differences between /arch/arm64/lib/memset.S (Architecture sparc) and /arch/mips/lib/memset.S (Architecture mips)


  1 /* SPDX-License-Identifier: GPL-2.0-only */    << 
  2 /*                                                  1 /*
  3  * Copyright (C) 2013 ARM Ltd.                 !!   2  * This file is subject to the terms and conditions of the GNU General Public
  4  * Copyright (C) 2013 Linaro.                  !!   3  * License.  See the file "COPYING" in the main directory of this archive
                                                   >>   4  * for more details.
  5  *                                                  5  *
  6  * This code is based on glibc cortex strings  !!   6  * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
  7  * be found @                                  !!   7  * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
  8  *                                             !!   8  * Copyright (C) 2007 by Maciej W. Rozycki
  9  * http://bazaar.launchpad.net/~linaro-toolcha !!   9  * Copyright (C) 2011, 2012 MIPS Technologies, Inc.
 10  * files/head:/src/aarch64/                    << 
 11  */                                                10  */
                                                   >>  11 #include <linux/export.h>
                                                   >>  12 #include <asm/asm.h>
                                                   >>  13 #include <asm/asm-offsets.h>
                                                   >>  14 #include <asm/regdef.h>
                                                   >>  15 
                                                   >>  16 #if LONGSIZE == 4
                                                   >>  17 #define LONG_S_L swl
                                                   >>  18 #define LONG_S_R swr
                                                   >>  19 #else
                                                   >>  20 #define LONG_S_L sdl
                                                   >>  21 #define LONG_S_R sdr
                                                   >>  22 #endif
                                                   >>  23 
                                                   >>  24 #ifdef CONFIG_CPU_MICROMIPS
                                                   >>  25 #define STORSIZE (LONGSIZE * 2)
                                                   >>  26 #define STORMASK (STORSIZE - 1)
                                                   >>  27 #define FILL64RG t8
                                                   >>  28 #define FILLPTRG t7
                                                   >>  29 #undef  LONG_S
                                                   >>  30 #define LONG_S LONG_SP
                                                   >>  31 #else
                                                   >>  32 #define STORSIZE LONGSIZE
                                                   >>  33 #define STORMASK LONGMASK
                                                   >>  34 #define FILL64RG a1
                                                   >>  35 #define FILLPTRG t0
                                                   >>  36 #endif
 12                                                    37 
 13 #include <linux/linkage.h>                     !!  38 #define LEGACY_MODE 1
 14 #include <asm/assembler.h>                     !!  39 #define EVA_MODE    2
 15 #include <asm/cache.h>                         << 
 16                                                    40 
 17 /*                                                 41 /*
 18  * Fill in the buffer with character c (alignm !!  42  * No need to protect it with EVA #ifdefery. The generated block of code
 19  *                                             !!  43  * will never be assembled if EVA is not enabled.
 20  * Parameters:                                 << 
 21  *      x0 - buf                               << 
 22  *      x1 - c                                 << 
 23  *      x2 - n                                 << 
 24  * Returns:                                    << 
 25  *      x0 - buf                               << 
 26  */                                                44  */
                                                   >>  45 #define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
                                                   >>  46 #define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
 27                                                    47 
 28 dstin           .req    x0                     !!  48 #define EX(insn,reg,addr,handler)                       \
 29 val             .req    w1                     !!  49         .if \mode == LEGACY_MODE;                       \
 30 count           .req    x2                     !!  50 9:              insn    reg, addr;                      \
 31 tmp1            .req    x3                     !!  51         .else;                                          \
 32 tmp1w           .req    w3                     !!  52 9:              ___BUILD_EVA_INSN(insn, reg, addr);     \
 33 tmp2            .req    x4                     !!  53         .endif;                                         \
 34 tmp2w           .req    w4                     !!  54         .section __ex_table,"a";                        \
 35 zva_len_x       .req    x5                     !!  55         PTR_WD  9b, handler;                            \
 36 zva_len         .req    w5                     !!  56         .previous
 37 zva_bits_x      .req    x6                     !!  57 
 38                                                !!  58         .macro  f_fill64 dst, offset, val, fixup, mode
 39 A_l             .req    x7                     !!  59         EX(LONG_S, \val, (\offset +  0 * STORSIZE)(\dst), \fixup)
 40 A_lw            .req    w7                     !!  60         EX(LONG_S, \val, (\offset +  1 * STORSIZE)(\dst), \fixup)
 41 dst             .req    x8                     !!  61         EX(LONG_S, \val, (\offset +  2 * STORSIZE)(\dst), \fixup)
 42 tmp3w           .req    w9                     !!  62         EX(LONG_S, \val, (\offset +  3 * STORSIZE)(\dst), \fixup)
 43 tmp3            .req    x9                     !!  63 #if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
 44                                                !!  64         EX(LONG_S, \val, (\offset +  4 * STORSIZE)(\dst), \fixup)
 45 SYM_FUNC_START(__pi_memset)                    !!  65         EX(LONG_S, \val, (\offset +  5 * STORSIZE)(\dst), \fixup)
 46         mov     dst, dstin      /* Preserve re !!  66         EX(LONG_S, \val, (\offset +  6 * STORSIZE)(\dst), \fixup)
 47         and     A_lw, val, #255                !!  67         EX(LONG_S, \val, (\offset +  7 * STORSIZE)(\dst), \fixup)
 48         orr     A_lw, A_lw, A_lw, lsl #8       !!  68 #endif
 49         orr     A_lw, A_lw, A_lw, lsl #16      !!  69 #if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
 50         orr     A_l, A_l, A_l, lsl #32         !!  70         EX(LONG_S, \val, (\offset +  8 * STORSIZE)(\dst), \fixup)
 51                                                !!  71         EX(LONG_S, \val, (\offset +  9 * STORSIZE)(\dst), \fixup)
 52         cmp     count, #15                     !!  72         EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
 53         b.hi    .Lover16_proc                  !!  73         EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
 54         /*All store maybe are non-aligned..*/  !!  74         EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
 55         tbz     count, #3, 1f                  !!  75         EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
 56         str     A_l, [dst], #8                 !!  76         EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
 57 1:                                             !!  77         EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
 58         tbz     count, #2, 2f                  !!  78 #endif
 59         str     A_lw, [dst], #4                !!  79         .endm
 60 2:                                             << 
 61         tbz     count, #1, 3f                  << 
 62         strh    A_lw, [dst], #2                << 
 63 3:                                             << 
 64         tbz     count, #0, 4f                  << 
 65         strb    A_lw, [dst]                    << 
 66 4:                                             << 
 67         ret                                    << 
 68                                                << 
 69 .Lover16_proc:                                 << 
 70         /*Whether  the start address is aligne << 
 71         neg     tmp2, dst                      << 
 72         ands    tmp2, tmp2, #15                << 
 73         b.eq    .Laligned                      << 
 74 /*                                             << 
 75 * The count is not less than 16, we can use st << 
 76 * then adjust the dst aligned with 16.This pro << 
 77 * memory address at alignment boundary.        << 
 78 */                                             << 
 79         stp     A_l, A_l, [dst] /*non-aligned  << 
 80         /*make the dst aligned..*/             << 
 81         sub     count, count, tmp2             << 
 82         add     dst, dst, tmp2                 << 
 83                                                << 
 84 .Laligned:                                     << 
 85         cbz     A_l, .Lzero_mem                << 
 86                                                << 
 87 .Ltail_maybe_long:                             << 
 88         cmp     count, #64                     << 
 89         b.ge    .Lnot_short                    << 
 90 .Ltail63:                                      << 
 91         ands    tmp1, count, #0x30             << 
 92         b.eq    3f                             << 
 93         cmp     tmp1w, #0x20                   << 
 94         b.eq    1f                             << 
 95         b.lt    2f                             << 
 96         stp     A_l, A_l, [dst], #16           << 
 97 1:                                             << 
 98         stp     A_l, A_l, [dst], #16           << 
 99 2:                                             << 
100         stp     A_l, A_l, [dst], #16           << 
101 /*                                             << 
102 * The last store length is less than 16,use st << 
103 * It will lead some bytes written twice and th << 
104 */                                             << 
105 3:                                             << 
106         ands    count, count, #15              << 
107         cbz     count, 4f                      << 
108         add     dst, dst, count                << 
109         stp     A_l, A_l, [dst, #-16]   /* Rep << 
110 4:                                             << 
111         ret                                    << 
112                                                    80 
113         /*                                     !!  81         .align  5
114         * Critical loop. Start at a new cache  << 
115         * 64 bytes per line, this ensures the  << 
116         */                                     << 
117         .p2align        L1_CACHE_SHIFT         << 
118 .Lnot_short:                                   << 
119         sub     dst, dst, #16/* Pre-bias.  */  << 
120         sub     count, count, #64              << 
121 1:                                             << 
122         stp     A_l, A_l, [dst, #16]           << 
123         stp     A_l, A_l, [dst, #32]           << 
124         stp     A_l, A_l, [dst, #48]           << 
125         stp     A_l, A_l, [dst, #64]!          << 
126         subs    count, count, #64              << 
127         b.ge    1b                             << 
128         tst     count, #0x3f                   << 
129         add     dst, dst, #16                  << 
130         b.ne    .Ltail63                       << 
131 .Lexitfunc:                                    << 
132         ret                                    << 
133                                                    82 
134         /*                                         83         /*
135         * For zeroing memory, check to see if  !!  84          * Macro to generate the __bzero{,_user} symbol
136         * zero entire 'cache' lines.           !!  85          * Arguments:
137         */                                     !!  86          * mode: LEGACY_MODE or EVA_MODE
138 .Lzero_mem:                                    !!  87          */
139         cmp     count, #63                     !!  88         .macro __BUILD_BZERO mode
140         b.le    .Ltail63                       !!  89         /* Initialize __memset if this is the first time we call this macro */
141         /*                                     !!  90         .ifnotdef __memset
142         * For zeroing small amounts of memory, !!  91         .set __memset, 1
143         * the line-clear code.                 !!  92         .hidden __memset /* Make sure it does not leak */
144         */                                     !!  93         .endif
145         cmp     count, #128                    !!  94 
146         b.lt    .Lnot_short /*count is at leas !!  95         sltiu           t0, a2, STORSIZE        /* very small region? */
147                                                !!  96         .set            noreorder
148         mrs     tmp1, dczid_el0                !!  97         bnez            t0, .Lsmall_memset\@
149         tbnz    tmp1, #4, .Lnot_short          !!  98          andi           t0, a0, STORMASK        /* aligned? */
150         mov     tmp3w, #4                      !!  99         .set            reorder
151         and     zva_len, tmp1w, #15     /* Saf !! 100 
152         lsl     zva_len, tmp3w, zva_len        !! 101 #ifdef CONFIG_CPU_MICROMIPS
                                                   >> 102         move            t8, a1                  /* used by 'swp' instruction */
                                                   >> 103         move            t9, a1
                                                   >> 104 #endif
                                                   >> 105         .set            noreorder
                                                   >> 106 #ifndef CONFIG_CPU_DADDI_WORKAROUNDS
                                                   >> 107         beqz            t0, 1f
                                                   >> 108          PTR_SUBU       t0, STORSIZE            /* alignment in bytes */
                                                   >> 109 #else
                                                   >> 110         .set            noat
                                                   >> 111         li              AT, STORSIZE
                                                   >> 112         beqz            t0, 1f
                                                   >> 113          PTR_SUBU       t0, AT                  /* alignment in bytes */
                                                   >> 114         .set            at
                                                   >> 115 #endif
                                                   >> 116         .set            reorder
                                                   >> 117 
                                                   >> 118 #ifndef CONFIG_CPU_NO_LOAD_STORE_LR
                                                   >> 119         R10KCBARRIER(0(ra))
                                                   >> 120 #ifdef __MIPSEB__
                                                   >> 121         EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
                                                   >> 122 #else
                                                   >> 123         EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
                                                   >> 124 #endif
                                                   >> 125         PTR_SUBU        a0, t0                  /* long align ptr */
                                                   >> 126         PTR_ADDU        a2, t0                  /* correct size */
                                                   >> 127 
                                                   >> 128 #else /* CONFIG_CPU_NO_LOAD_STORE_LR */
                                                   >> 129 #define STORE_BYTE(N)                           \
                                                   >> 130         EX(sb, a1, N(a0), .Lbyte_fixup\@);      \
                                                   >> 131         .set            noreorder;              \
                                                   >> 132         beqz            t0, 0f;                 \
                                                   >> 133          PTR_ADDU       t0, 1;                  \
                                                   >> 134         .set            reorder;
                                                   >> 135 
                                                   >> 136         PTR_ADDU        a2, t0                  /* correct size */
                                                   >> 137         PTR_ADDU        t0, 1
                                                   >> 138         STORE_BYTE(0)
                                                   >> 139         STORE_BYTE(1)
                                                   >> 140 #if LONGSIZE == 4
                                                   >> 141         EX(sb, a1, 2(a0), .Lbyte_fixup\@)
                                                   >> 142 #else
                                                   >> 143         STORE_BYTE(2)
                                                   >> 144         STORE_BYTE(3)
                                                   >> 145         STORE_BYTE(4)
                                                   >> 146         STORE_BYTE(5)
                                                   >> 147         EX(sb, a1, 6(a0), .Lbyte_fixup\@)
                                                   >> 148 #endif
                                                   >> 149 0:
                                                   >> 150         ori             a0, STORMASK
                                                   >> 151         xori            a0, STORMASK
                                                   >> 152         PTR_ADDIU       a0, STORSIZE
                                                   >> 153 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
                                                   >> 154 1:      ori             t1, a2, 0x3f            /* # of full blocks */
                                                   >> 155         xori            t1, 0x3f
                                                   >> 156         andi            t0, a2, 0x40-STORSIZE
                                                   >> 157         beqz            t1, .Lmemset_partial\@  /* no block to fill */
                                                   >> 158 
                                                   >> 159         PTR_ADDU        t1, a0                  /* end address */
                                                   >> 160 1:      PTR_ADDIU       a0, 64
                                                   >> 161         R10KCBARRIER(0(ra))
                                                   >> 162         f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
                                                   >> 163         bne             t1, a0, 1b
                                                   >> 164 
                                                   >> 165 .Lmemset_partial\@:
                                                   >> 166         R10KCBARRIER(0(ra))
                                                   >> 167         PTR_LA          t1, 2f                  /* where to start */
                                                   >> 168 #ifdef CONFIG_CPU_MICROMIPS
                                                   >> 169         LONG_SRL        t7, t0, 1
                                                   >> 170 #endif
                                                   >> 171 #if LONGSIZE == 4
                                                   >> 172         PTR_SUBU        t1, FILLPTRG
                                                   >> 173 #else
                                                   >> 174         .set            noat
                                                   >> 175         LONG_SRL        AT, FILLPTRG, 1
                                                   >> 176         PTR_SUBU        t1, AT
                                                   >> 177         .set            at
                                                   >> 178 #endif
                                                   >> 179         PTR_ADDU        a0, t0                  /* dest ptr */
                                                   >> 180         jr              t1
                                                   >> 181 
                                                   >> 182         /* ... but first do longs ... */
                                                   >> 183         f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
                                                   >> 184 2:      andi            a2, STORMASK            /* At most one long to go */
                                                   >> 185 
                                                   >> 186         .set            noreorder
                                                   >> 187         beqz            a2, 1f
                                                   >> 188 #ifndef CONFIG_CPU_NO_LOAD_STORE_LR
                                                   >> 189          PTR_ADDU       a0, a2                  /* What's left */
                                                   >> 190         .set            reorder
                                                   >> 191         R10KCBARRIER(0(ra))
                                                   >> 192 #ifdef __MIPSEB__
                                                   >> 193         EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
                                                   >> 194 #else
                                                   >> 195         EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
                                                   >> 196 #endif
                                                   >> 197 #else /* CONFIG_CPU_NO_LOAD_STORE_LR */
                                                   >> 198          PTR_SUBU       t0, $0, a2
                                                   >> 199         .set            reorder
                                                   >> 200         move            a2, zero                /* No remaining longs */
                                                   >> 201         PTR_ADDIU       t0, 1
                                                   >> 202         STORE_BYTE(0)
                                                   >> 203         STORE_BYTE(1)
                                                   >> 204 #if LONGSIZE == 4
                                                   >> 205         EX(sb, a1, 2(a0), .Lbyte_fixup\@)
                                                   >> 206 #else
                                                   >> 207         STORE_BYTE(2)
                                                   >> 208         STORE_BYTE(3)
                                                   >> 209         STORE_BYTE(4)
                                                   >> 210         STORE_BYTE(5)
                                                   >> 211         EX(sb, a1, 6(a0), .Lbyte_fixup\@)
                                                   >> 212 #endif
                                                   >> 213 0:
                                                   >> 214 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
                                                   >> 215 1:      move            a2, zero
                                                   >> 216         jr              ra
                                                   >> 217 
                                                   >> 218 .Lsmall_memset\@:
                                                   >> 219         PTR_ADDU        t1, a0, a2
                                                   >> 220         beqz            a2, 2f
                                                   >> 221 
                                                   >> 222 1:      PTR_ADDIU       a0, 1                   /* fill bytewise */
                                                   >> 223         R10KCBARRIER(0(ra))
                                                   >> 224         .set            noreorder
                                                   >> 225         bne             t1, a0, 1b
                                                   >> 226          EX(sb, a1, -1(a0), .Lsmall_fixup\@)
                                                   >> 227         .set            reorder
                                                   >> 228 
                                                   >> 229 2:      move            a2, zero
                                                   >> 230         jr              ra                      /* done */
                                                   >> 231         .if __memset == 1
                                                   >> 232         END(memset)
                                                   >> 233         .set __memset, 0
                                                   >> 234         .hidden __memset
                                                   >> 235         .endif
153                                                   236 
154         ands    tmp3w, zva_len, #63            !! 237 #ifdef CONFIG_CPU_NO_LOAD_STORE_LR
                                                   >> 238 .Lbyte_fixup\@:
155         /*                                        239         /*
156         * ensure the zva_len is not less than  !! 240          * unset_bytes = (#bytes - (#unaligned bytes)) - (-#unaligned bytes remaining + 1) + 1
157         * It is not meaningful to use ZVA if t !! 241          *      a2     =             a2                -              t0                   + 1
158         */                                     !! 242          */
159         b.ne    .Lnot_short                    !! 243         PTR_SUBU        a2, t0
160 .Lzero_by_line:                                !! 244         PTR_ADDIU       a2, 1
                                                   >> 245         jr              ra
                                                   >> 246 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
                                                   >> 247 
                                                   >> 248 .Lfirst_fixup\@:
                                                   >> 249         /* unset_bytes already in a2 */
                                                   >> 250         jr      ra
                                                   >> 251 
                                                   >> 252 .Lfwd_fixup\@:
161         /*                                        253         /*
162         * Compute how far we need to go to bec !! 254          * unset_bytes = partial_start_addr +  #bytes   -     fault_addr
163         * already at quad-word alignment.      !! 255          *      a2     =         t1         + (a2 & 3f) - $28->task->BUADDR
164         */                                     !! 256          */
165         cmp     count, zva_len_x               !! 257         PTR_L           t0, TI_TASK($28)
166         b.lt    .Lnot_short             /* Not !! 258         andi            a2, 0x3f
167         sub     zva_bits_x, zva_len_x, #1      !! 259         LONG_L          t0, THREAD_BUADDR(t0)
168         neg     tmp2, dst                      !! 260         LONG_ADDU       a2, t1
169         ands    tmp2, tmp2, zva_bits_x         !! 261         LONG_SUBU       a2, t0
170         b.eq    2f                      /* Alr !! 262         jr              ra
171         /* Not aligned, check that there's eno !! 263 
172         sub     tmp1, count, tmp2              !! 264 .Lpartial_fixup\@:
173         /*                                        265         /*
174         * grantee the remain length to be ZVA  !! 266          * unset_bytes = partial_end_addr +      #bytes     -     fault_addr
175         * avoid to make the 2f's process over  !! 267          *      a2     =       a0         + (a2 & STORMASK) - $28->task->BUADDR
176         cmp     tmp1, #64                      !! 268          */
177         ccmp    tmp1, zva_len_x, #8, ge /* NZC !! 269         PTR_L           t0, TI_TASK($28)
178         b.lt    .Lnot_short                    !! 270         andi            a2, STORMASK
                                                   >> 271         LONG_L          t0, THREAD_BUADDR(t0)
                                                   >> 272         LONG_ADDU       a2, a0
                                                   >> 273         LONG_SUBU       a2, t0
                                                   >> 274         jr              ra
                                                   >> 275 
                                                   >> 276 .Llast_fixup\@:
                                                   >> 277         /* unset_bytes already in a2 */
                                                   >> 278         jr              ra
                                                   >> 279 
                                                   >> 280 .Lsmall_fixup\@:
179         /*                                        281         /*
180         * We know that there's at least 64 byt !! 282          * unset_bytes = end_addr - current_addr + 1
181         * to overrun by 64 bytes.              !! 283          *      a2     =    t1    -      a0      + 1
182         */                                     !! 284          */
183         mov     count, tmp1                    !! 285         PTR_SUBU        a2, t1, a0
184 1:                                             !! 286         PTR_ADDIU       a2, 1
185         stp     A_l, A_l, [dst]                !! 287         jr              ra
186         stp     A_l, A_l, [dst, #16]           << 
187         stp     A_l, A_l, [dst, #32]           << 
188         subs    tmp2, tmp2, #64                << 
189         stp     A_l, A_l, [dst, #48]           << 
190         add     dst, dst, #64                  << 
191         b.ge    1b                             << 
192         /* We've overrun a bit, so adjust dst  << 
193         add     dst, dst, tmp2                 << 
194 2:                                             << 
195         sub     count, count, zva_len_x        << 
196 3:                                             << 
197         dc      zva, dst                       << 
198         add     dst, dst, zva_len_x            << 
199         subs    count, count, zva_len_x        << 
200         b.ge    3b                             << 
201         ands    count, count, zva_bits_x       << 
202         b.ne    .Ltail_maybe_long              << 
203         ret                                    << 
204 SYM_FUNC_END(__pi_memset)                      << 
205                                                   288 
206 SYM_FUNC_ALIAS(__memset, __pi_memset)          !! 289         .endm
207 EXPORT_SYMBOL(__memset)                        << 
208                                                   290 
209 SYM_FUNC_ALIAS_WEAK(memset, __pi_memset)       !! 291 /*
                                                   >> 292  * memset(void *s, int c, size_t n)
                                                   >> 293  *
                                                   >> 294  * a0: start of area to clear
                                                   >> 295  * a1: char to fill with
                                                   >> 296  * a2: size of area to clear
                                                   >> 297  */
                                                   >> 298 
                                                   >> 299 LEAF(memset)
210 EXPORT_SYMBOL(memset)                             300 EXPORT_SYMBOL(memset)
                                                   >> 301         move            v0, a0                  /* result */
                                                   >> 302         beqz            a1, 1f
                                                   >> 303 
                                                   >> 304         andi            a1, 0xff                /* spread fillword */
                                                   >> 305         LONG_SLL                t1, a1, 8
                                                   >> 306         or              a1, t1
                                                   >> 307         LONG_SLL                t1, a1, 16
                                                   >> 308 #if LONGSIZE == 8
                                                   >> 309         or              a1, t1
                                                   >> 310         LONG_SLL                t1, a1, 32
                                                   >> 311 #endif
                                                   >> 312         or              a1, t1
                                                   >> 313 1:
                                                   >> 314 #ifndef CONFIG_EVA
                                                   >> 315 FEXPORT(__bzero)
                                                   >> 316 EXPORT_SYMBOL(__bzero)
                                                   >> 317 #endif
                                                   >> 318         __BUILD_BZERO LEGACY_MODE
                                                   >> 319 
                                                   >> 320 #ifdef CONFIG_EVA
                                                   >> 321 LEAF(__bzero)
                                                   >> 322 EXPORT_SYMBOL(__bzero)
                                                   >> 323         __BUILD_BZERO EVA_MODE
                                                   >> 324 END(__bzero)
                                                   >> 325 #endif
                                                      

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php