~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/arch/sparc/lib/memcpy.S

Version: ~ [ linux-6.12-rc7 ] ~ [ linux-6.11.7 ] ~ [ linux-6.10.14 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.60 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.116 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.171 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.229 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.285 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.323 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.12 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

Diff markup

Differences between /arch/sparc/lib/memcpy.S (Version linux-6.12-rc7) and /arch/sparc/lib/memcpy.S (Version linux-4.10.17)


  1 /* SPDX-License-Identifier: GPL-2.0 */         << 
  2 /* memcpy.S: Sparc optimized memcpy and memmov      1 /* memcpy.S: Sparc optimized memcpy and memmove code
  3  * Hand optimized from GNU libc's memcpy and m      2  * Hand optimized from GNU libc's memcpy and memmove
  4  * Copyright (C) 1991,1996 Free Software Found      3  * Copyright (C) 1991,1996 Free Software Foundation
  5  * Copyright (C) 1995 Linus Torvalds (Linus.To      4  * Copyright (C) 1995 Linus Torvalds (Linus.Torvalds@helsinki.fi)
  6  * Copyright (C) 1996 David S. Miller (davem@c      5  * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu)
  7  * Copyright (C) 1996 Eddie C. Dost (ecd@skyne      6  * Copyright (C) 1996 Eddie C. Dost (ecd@skynet.be)
  8  * Copyright (C) 1996 Jakub Jelinek (jj@sunsit      7  * Copyright (C) 1996 Jakub Jelinek (jj@sunsite.mff.cuni.cz)
  9  */                                                 8  */
 10                                                     9 
 11 #include <linux/export.h>                      !!  10 #include <asm/export.h>
 12                                                << 
 13 #define FUNC(x)                 \                  11 #define FUNC(x)                 \
 14         .globl  x;              \                  12         .globl  x;              \
 15         .type   x,@function;    \                  13         .type   x,@function;    \
 16         .align  4;              \                  14         .align  4;              \
 17 x:                                                 15 x:
 18                                                    16 
 19 /* Both these macros have to start with exactl     17 /* Both these macros have to start with exactly the same insn */
 20 #define MOVE_BIGCHUNK(src, dst, offset, t0, t1     18 #define MOVE_BIGCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \
 21         ldd     [%src + (offset) + 0x00], %t0;     19         ldd     [%src + (offset) + 0x00], %t0; \
 22         ldd     [%src + (offset) + 0x08], %t2;     20         ldd     [%src + (offset) + 0x08], %t2; \
 23         ldd     [%src + (offset) + 0x10], %t4;     21         ldd     [%src + (offset) + 0x10], %t4; \
 24         ldd     [%src + (offset) + 0x18], %t6;     22         ldd     [%src + (offset) + 0x18], %t6; \
 25         st      %t0, [%dst + (offset) + 0x00];     23         st      %t0, [%dst + (offset) + 0x00]; \
 26         st      %t1, [%dst + (offset) + 0x04];     24         st      %t1, [%dst + (offset) + 0x04]; \
 27         st      %t2, [%dst + (offset) + 0x08];     25         st      %t2, [%dst + (offset) + 0x08]; \
 28         st      %t3, [%dst + (offset) + 0x0c];     26         st      %t3, [%dst + (offset) + 0x0c]; \
 29         st      %t4, [%dst + (offset) + 0x10];     27         st      %t4, [%dst + (offset) + 0x10]; \
 30         st      %t5, [%dst + (offset) + 0x14];     28         st      %t5, [%dst + (offset) + 0x14]; \
 31         st      %t6, [%dst + (offset) + 0x18];     29         st      %t6, [%dst + (offset) + 0x18]; \
 32         st      %t7, [%dst + (offset) + 0x1c];     30         st      %t7, [%dst + (offset) + 0x1c];
 33                                                    31 
 34 #define MOVE_BIGALIGNCHUNK(src, dst, offset, t     32 #define MOVE_BIGALIGNCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \
 35         ldd     [%src + (offset) + 0x00], %t0;     33         ldd     [%src + (offset) + 0x00], %t0; \
 36         ldd     [%src + (offset) + 0x08], %t2;     34         ldd     [%src + (offset) + 0x08], %t2; \
 37         ldd     [%src + (offset) + 0x10], %t4;     35         ldd     [%src + (offset) + 0x10], %t4; \
 38         ldd     [%src + (offset) + 0x18], %t6;     36         ldd     [%src + (offset) + 0x18], %t6; \
 39         std     %t0, [%dst + (offset) + 0x00];     37         std     %t0, [%dst + (offset) + 0x00]; \
 40         std     %t2, [%dst + (offset) + 0x08];     38         std     %t2, [%dst + (offset) + 0x08]; \
 41         std     %t4, [%dst + (offset) + 0x10];     39         std     %t4, [%dst + (offset) + 0x10]; \
 42         std     %t6, [%dst + (offset) + 0x18];     40         std     %t6, [%dst + (offset) + 0x18];
 43                                                    41 
 44 #define MOVE_LASTCHUNK(src, dst, offset, t0, t     42 #define MOVE_LASTCHUNK(src, dst, offset, t0, t1, t2, t3) \
 45         ldd     [%src - (offset) - 0x10], %t0;     43         ldd     [%src - (offset) - 0x10], %t0; \
 46         ldd     [%src - (offset) - 0x08], %t2;     44         ldd     [%src - (offset) - 0x08], %t2; \
 47         st      %t0, [%dst - (offset) - 0x10];     45         st      %t0, [%dst - (offset) - 0x10]; \
 48         st      %t1, [%dst - (offset) - 0x0c];     46         st      %t1, [%dst - (offset) - 0x0c]; \
 49         st      %t2, [%dst - (offset) - 0x08];     47         st      %t2, [%dst - (offset) - 0x08]; \
 50         st      %t3, [%dst - (offset) - 0x04];     48         st      %t3, [%dst - (offset) - 0x04];
 51                                                    49 
 52 #define MOVE_LASTALIGNCHUNK(src, dst, offset,      50 #define MOVE_LASTALIGNCHUNK(src, dst, offset, t0, t1, t2, t3) \
 53         ldd     [%src - (offset) - 0x10], %t0;     51         ldd     [%src - (offset) - 0x10], %t0; \
 54         ldd     [%src - (offset) - 0x08], %t2;     52         ldd     [%src - (offset) - 0x08], %t2; \
 55         std     %t0, [%dst - (offset) - 0x10];     53         std     %t0, [%dst - (offset) - 0x10]; \
 56         std     %t2, [%dst - (offset) - 0x08];     54         std     %t2, [%dst - (offset) - 0x08];
 57                                                    55 
 58 #define MOVE_SHORTCHUNK(src, dst, offset, t0,      56 #define MOVE_SHORTCHUNK(src, dst, offset, t0, t1) \
 59         ldub    [%src - (offset) - 0x02], %t0;     57         ldub    [%src - (offset) - 0x02], %t0; \
 60         ldub    [%src - (offset) - 0x01], %t1;     58         ldub    [%src - (offset) - 0x01], %t1; \
 61         stb     %t0, [%dst - (offset) - 0x02];     59         stb     %t0, [%dst - (offset) - 0x02]; \
 62         stb     %t1, [%dst - (offset) - 0x01];     60         stb     %t1, [%dst - (offset) - 0x01];
 63                                                    61 
 64         .text                                      62         .text
 65         .align  4                                  63         .align  4
 66                                                    64 
 67 FUNC(memmove)                                      65 FUNC(memmove)
 68 EXPORT_SYMBOL(memmove)                             66 EXPORT_SYMBOL(memmove)
 69         cmp             %o0, %o1                   67         cmp             %o0, %o1
 70         mov             %o0, %g7                   68         mov             %o0, %g7
 71         bleu            9f                         69         bleu            9f
 72          sub            %o0, %o1, %o4              70          sub            %o0, %o1, %o4
 73                                                    71 
 74         add             %o1, %o2, %o3              72         add             %o1, %o2, %o3
 75         cmp             %o3, %o0                   73         cmp             %o3, %o0
 76         bleu            0f                         74         bleu            0f
 77          andcc          %o4, 3, %o5                75          andcc          %o4, 3, %o5
 78                                                    76 
 79         add             %o1, %o2, %o1              77         add             %o1, %o2, %o1
 80         add             %o0, %o2, %o0              78         add             %o0, %o2, %o0
 81         sub             %o1, 1, %o1                79         sub             %o1, 1, %o1
 82         sub             %o0, 1, %o0                80         sub             %o0, 1, %o0
 83                                                    81         
 84 1:      /* reverse_bytes */                        82 1:      /* reverse_bytes */
 85                                                    83 
 86         ldub            [%o1], %o4                 84         ldub            [%o1], %o4
 87         subcc           %o2, 1, %o2                85         subcc           %o2, 1, %o2
 88         stb             %o4, [%o0]                 86         stb             %o4, [%o0]
 89         sub             %o1, 1, %o1                87         sub             %o1, 1, %o1
 90         bne             1b                         88         bne             1b
 91          sub            %o0, 1, %o0                89          sub            %o0, 1, %o0
 92                                                    90 
 93         retl                                       91         retl
 94          mov            %g7, %o0                   92          mov            %g7, %o0
 95                                                    93 
 96 /* NOTE: This code is executed just for the ca     94 /* NOTE: This code is executed just for the cases,
 97          where %src (=%o1) & 3 is != 0.            95          where %src (=%o1) & 3 is != 0.
 98          We need to align it to 4. So, for (%s     96          We need to align it to 4. So, for (%src & 3)
 99          1 we need to do ldub,lduh                 97          1 we need to do ldub,lduh
100          2 lduh                                    98          2 lduh
101          3 just ldub                               99          3 just ldub
102          so even if it looks weird, the branch    100          so even if it looks weird, the branches
103          are correct here. -jj                    101          are correct here. -jj
104  */                                               102  */
105 78:     /* dword_align */                         103 78:     /* dword_align */
106                                                   104 
107         andcc           %o1, 1, %g0               105         andcc           %o1, 1, %g0
108         be              4f                        106         be              4f
109          andcc          %o1, 2, %g0               107          andcc          %o1, 2, %g0
110                                                   108 
111         ldub            [%o1], %g2                109         ldub            [%o1], %g2
112         add             %o1, 1, %o1               110         add             %o1, 1, %o1
113         stb             %g2, [%o0]                111         stb             %g2, [%o0]
114         sub             %o2, 1, %o2               112         sub             %o2, 1, %o2
115         bne             3f                        113         bne             3f
116          add            %o0, 1, %o0               114          add            %o0, 1, %o0
117 4:                                                115 4:
118         lduh            [%o1], %g2                116         lduh            [%o1], %g2
119         add             %o1, 2, %o1               117         add             %o1, 2, %o1
120         sth             %g2, [%o0]                118         sth             %g2, [%o0]
121         sub             %o2, 2, %o2               119         sub             %o2, 2, %o2
122         b               3f                        120         b               3f
123          add            %o0, 2, %o0               121          add            %o0, 2, %o0
124                                                   122 
125 FUNC(memcpy)    /* %o0=dst %o1=src %o2=len */     123 FUNC(memcpy)    /* %o0=dst %o1=src %o2=len */
126 EXPORT_SYMBOL(memcpy)                             124 EXPORT_SYMBOL(memcpy)
127                                                   125 
128         sub             %o0, %o1, %o4             126         sub             %o0, %o1, %o4
129         mov             %o0, %g7                  127         mov             %o0, %g7
130 9:                                                128 9:
131         andcc           %o4, 3, %o5               129         andcc           %o4, 3, %o5
132 0:                                                130 0:
133         bne             86f                       131         bne             86f
134          cmp            %o2, 15                   132          cmp            %o2, 15
135                                                   133 
136         bleu            90f                       134         bleu            90f
137          andcc          %o1, 3, %g0               135          andcc          %o1, 3, %g0
138                                                   136 
139         bne             78b                       137         bne             78b
140 3:                                                138 3:
141          andcc          %o1, 4, %g0               139          andcc          %o1, 4, %g0
142                                                   140 
143         be              2f                        141         be              2f
144          mov            %o2, %g1                  142          mov            %o2, %g1
145                                                   143 
146         ld              [%o1], %o4                144         ld              [%o1], %o4
147         sub             %g1, 4, %g1               145         sub             %g1, 4, %g1
148         st              %o4, [%o0]                146         st              %o4, [%o0]
149         add             %o1, 4, %o1               147         add             %o1, 4, %o1
150         add             %o0, 4, %o0               148         add             %o0, 4, %o0
151 2:                                                149 2:
152         andcc           %g1, 0xffffff80, %g0      150         andcc           %g1, 0xffffff80, %g0
153         be              3f                        151         be              3f
154          andcc          %o0, 4, %g0               152          andcc          %o0, 4, %g0
155                                                   153 
156         be              82f + 4                   154         be              82f + 4
157 5:                                                155 5:
158         MOVE_BIGCHUNK(o1, o0, 0x00, o2, o3, o4    156         MOVE_BIGCHUNK(o1, o0, 0x00, o2, o3, o4, o5, g2, g3, g4, g5)
159         MOVE_BIGCHUNK(o1, o0, 0x20, o2, o3, o4    157         MOVE_BIGCHUNK(o1, o0, 0x20, o2, o3, o4, o5, g2, g3, g4, g5)
160         MOVE_BIGCHUNK(o1, o0, 0x40, o2, o3, o4    158         MOVE_BIGCHUNK(o1, o0, 0x40, o2, o3, o4, o5, g2, g3, g4, g5)
161         MOVE_BIGCHUNK(o1, o0, 0x60, o2, o3, o4    159         MOVE_BIGCHUNK(o1, o0, 0x60, o2, o3, o4, o5, g2, g3, g4, g5)
162         sub             %g1, 128, %g1             160         sub             %g1, 128, %g1
163         add             %o1, 128, %o1             161         add             %o1, 128, %o1
164         cmp             %g1, 128                  162         cmp             %g1, 128
165         bge             5b                        163         bge             5b
166          add            %o0, 128, %o0             164          add            %o0, 128, %o0
167 3:                                                165 3:
168         andcc           %g1, 0x70, %g4            166         andcc           %g1, 0x70, %g4
169         be              80f                       167         be              80f
170          andcc          %g1, 8, %g0               168          andcc          %g1, 8, %g0
171                                                   169 
172         sethi           %hi(80f), %o5             170         sethi           %hi(80f), %o5
173         srl             %g4, 1, %o4               171         srl             %g4, 1, %o4
174         add             %g4, %o4, %o4             172         add             %g4, %o4, %o4
175         add             %o1, %g4, %o1             173         add             %o1, %g4, %o1
176         sub             %o5, %o4, %o5             174         sub             %o5, %o4, %o5
177         jmpl            %o5 + %lo(80f), %g0       175         jmpl            %o5 + %lo(80f), %g0
178          add            %o0, %g4, %o0             176          add            %o0, %g4, %o0
179                                                   177 
180 79:     /* memcpy_table */                        178 79:     /* memcpy_table */
181                                                   179 
182         MOVE_LASTCHUNK(o1, o0, 0x60, g2, g3, g    180         MOVE_LASTCHUNK(o1, o0, 0x60, g2, g3, g4, g5)
183         MOVE_LASTCHUNK(o1, o0, 0x50, g2, g3, g    181         MOVE_LASTCHUNK(o1, o0, 0x50, g2, g3, g4, g5)
184         MOVE_LASTCHUNK(o1, o0, 0x40, g2, g3, g    182         MOVE_LASTCHUNK(o1, o0, 0x40, g2, g3, g4, g5)
185         MOVE_LASTCHUNK(o1, o0, 0x30, g2, g3, g    183         MOVE_LASTCHUNK(o1, o0, 0x30, g2, g3, g4, g5)
186         MOVE_LASTCHUNK(o1, o0, 0x20, g2, g3, g    184         MOVE_LASTCHUNK(o1, o0, 0x20, g2, g3, g4, g5)
187         MOVE_LASTCHUNK(o1, o0, 0x10, g2, g3, g    185         MOVE_LASTCHUNK(o1, o0, 0x10, g2, g3, g4, g5)
188         MOVE_LASTCHUNK(o1, o0, 0x00, g2, g3, g    186         MOVE_LASTCHUNK(o1, o0, 0x00, g2, g3, g4, g5)
189                                                   187 
190 80:     /* memcpy_table_end */                    188 80:     /* memcpy_table_end */
191         be              81f                       189         be              81f
192          andcc          %g1, 4, %g0               190          andcc          %g1, 4, %g0
193                                                   191 
194         ldd             [%o1], %g2                192         ldd             [%o1], %g2
195         add             %o0, 8, %o0               193         add             %o0, 8, %o0
196         st              %g2, [%o0 - 0x08]         194         st              %g2, [%o0 - 0x08]
197         add             %o1, 8, %o1               195         add             %o1, 8, %o1
198         st              %g3, [%o0 - 0x04]         196         st              %g3, [%o0 - 0x04]
199                                                   197 
200 81:     /* memcpy_last7 */                        198 81:     /* memcpy_last7 */
201                                                   199 
202         be              1f                        200         be              1f
203          andcc          %g1, 2, %g0               201          andcc          %g1, 2, %g0
204                                                   202 
205         ld              [%o1], %g2                203         ld              [%o1], %g2
206         add             %o1, 4, %o1               204         add             %o1, 4, %o1
207         st              %g2, [%o0]                205         st              %g2, [%o0]
208         add             %o0, 4, %o0               206         add             %o0, 4, %o0
209 1:                                                207 1:
210         be              1f                        208         be              1f
211          andcc          %g1, 1, %g0               209          andcc          %g1, 1, %g0
212                                                   210 
213         lduh            [%o1], %g2                211         lduh            [%o1], %g2
214         add             %o1, 2, %o1               212         add             %o1, 2, %o1
215         sth             %g2, [%o0]                213         sth             %g2, [%o0]
216         add             %o0, 2, %o0               214         add             %o0, 2, %o0
217 1:                                                215 1:
218         be              1f                        216         be              1f
219          nop                                      217          nop
220                                                   218 
221         ldub            [%o1], %g2                219         ldub            [%o1], %g2
222         stb             %g2, [%o0]                220         stb             %g2, [%o0]
223 1:                                                221 1:
224         retl                                      222         retl
225          mov            %g7, %o0                  223          mov            %g7, %o0
226                                                   224 
227 82:     /* ldd_std */                             225 82:     /* ldd_std */
228         MOVE_BIGALIGNCHUNK(o1, o0, 0x00, o2, o    226         MOVE_BIGALIGNCHUNK(o1, o0, 0x00, o2, o3, o4, o5, g2, g3, g4, g5)
229         MOVE_BIGALIGNCHUNK(o1, o0, 0x20, o2, o    227         MOVE_BIGALIGNCHUNK(o1, o0, 0x20, o2, o3, o4, o5, g2, g3, g4, g5)
230         MOVE_BIGALIGNCHUNK(o1, o0, 0x40, o2, o    228         MOVE_BIGALIGNCHUNK(o1, o0, 0x40, o2, o3, o4, o5, g2, g3, g4, g5)
231         MOVE_BIGALIGNCHUNK(o1, o0, 0x60, o2, o    229         MOVE_BIGALIGNCHUNK(o1, o0, 0x60, o2, o3, o4, o5, g2, g3, g4, g5)
232         subcc           %g1, 128, %g1             230         subcc           %g1, 128, %g1
233         add             %o1, 128, %o1             231         add             %o1, 128, %o1
234         cmp             %g1, 128                  232         cmp             %g1, 128
235         bge             82b                       233         bge             82b
236          add            %o0, 128, %o0             234          add            %o0, 128, %o0
237                                                   235 
238         andcc           %g1, 0x70, %g4            236         andcc           %g1, 0x70, %g4
239         be              84f                       237         be              84f
240          andcc          %g1, 8, %g0               238          andcc          %g1, 8, %g0
241                                                   239 
242         sethi           %hi(84f), %o5             240         sethi           %hi(84f), %o5
243         add             %o1, %g4, %o1             241         add             %o1, %g4, %o1
244         sub             %o5, %g4, %o5             242         sub             %o5, %g4, %o5
245         jmpl            %o5 + %lo(84f), %g0       243         jmpl            %o5 + %lo(84f), %g0
246          add            %o0, %g4, %o0             244          add            %o0, %g4, %o0
247                                                   245 
248 83:     /* amemcpy_table */                       246 83:     /* amemcpy_table */
249                                                   247 
250         MOVE_LASTALIGNCHUNK(o1, o0, 0x60, g2,     248         MOVE_LASTALIGNCHUNK(o1, o0, 0x60, g2, g3, g4, g5)
251         MOVE_LASTALIGNCHUNK(o1, o0, 0x50, g2,     249         MOVE_LASTALIGNCHUNK(o1, o0, 0x50, g2, g3, g4, g5)
252         MOVE_LASTALIGNCHUNK(o1, o0, 0x40, g2,     250         MOVE_LASTALIGNCHUNK(o1, o0, 0x40, g2, g3, g4, g5)
253         MOVE_LASTALIGNCHUNK(o1, o0, 0x30, g2,     251         MOVE_LASTALIGNCHUNK(o1, o0, 0x30, g2, g3, g4, g5)
254         MOVE_LASTALIGNCHUNK(o1, o0, 0x20, g2,     252         MOVE_LASTALIGNCHUNK(o1, o0, 0x20, g2, g3, g4, g5)
255         MOVE_LASTALIGNCHUNK(o1, o0, 0x10, g2,     253         MOVE_LASTALIGNCHUNK(o1, o0, 0x10, g2, g3, g4, g5)
256         MOVE_LASTALIGNCHUNK(o1, o0, 0x00, g2,     254         MOVE_LASTALIGNCHUNK(o1, o0, 0x00, g2, g3, g4, g5)
257                                                   255 
258 84:     /* amemcpy_table_end */                   256 84:     /* amemcpy_table_end */
259         be              85f                       257         be              85f
260          andcc          %g1, 4, %g0               258          andcc          %g1, 4, %g0
261                                                   259 
262         ldd             [%o1], %g2                260         ldd             [%o1], %g2
263         add             %o0, 8, %o0               261         add             %o0, 8, %o0
264         std             %g2, [%o0 - 0x08]         262         std             %g2, [%o0 - 0x08]
265         add             %o1, 8, %o1               263         add             %o1, 8, %o1
266 85:     /* amemcpy_last7 */                       264 85:     /* amemcpy_last7 */
267         be              1f                        265         be              1f
268          andcc          %g1, 2, %g0               266          andcc          %g1, 2, %g0
269                                                   267 
270         ld              [%o1], %g2                268         ld              [%o1], %g2
271         add             %o1, 4, %o1               269         add             %o1, 4, %o1
272         st              %g2, [%o0]                270         st              %g2, [%o0]
273         add             %o0, 4, %o0               271         add             %o0, 4, %o0
274 1:                                                272 1:
275         be              1f                        273         be              1f
276          andcc          %g1, 1, %g0               274          andcc          %g1, 1, %g0
277                                                   275 
278         lduh            [%o1], %g2                276         lduh            [%o1], %g2
279         add             %o1, 2, %o1               277         add             %o1, 2, %o1
280         sth             %g2, [%o0]                278         sth             %g2, [%o0]
281         add             %o0, 2, %o0               279         add             %o0, 2, %o0
282 1:                                                280 1:
283         be              1f                        281         be              1f
284          nop                                      282          nop
285                                                   283 
286         ldub            [%o1], %g2                284         ldub            [%o1], %g2
287         stb             %g2, [%o0]                285         stb             %g2, [%o0]
288 1:                                                286 1:
289         retl                                      287         retl
290          mov            %g7, %o0                  288          mov            %g7, %o0
291                                                   289 
292 86:     /* non_aligned */                         290 86:     /* non_aligned */
293         cmp             %o2, 6                    291         cmp             %o2, 6
294         bleu            88f                       292         bleu            88f
295          nop                                      293          nop
296                                                   294 
297         save            %sp, -96, %sp             295         save            %sp, -96, %sp
298         andcc           %i0, 3, %g0               296         andcc           %i0, 3, %g0
299         be              61f                       297         be              61f
300          andcc          %i0, 1, %g0               298          andcc          %i0, 1, %g0
301         be              60f                       299         be              60f
302          andcc          %i0, 2, %g0               300          andcc          %i0, 2, %g0
303                                                   301 
304         ldub            [%i1], %g5                302         ldub            [%i1], %g5
305         add             %i1, 1, %i1               303         add             %i1, 1, %i1
306         stb             %g5, [%i0]                304         stb             %g5, [%i0]
307         sub             %i2, 1, %i2               305         sub             %i2, 1, %i2
308         bne             61f                       306         bne             61f
309          add            %i0, 1, %i0               307          add            %i0, 1, %i0
310 60:                                               308 60:
311         ldub            [%i1], %g3                309         ldub            [%i1], %g3
312         add             %i1, 2, %i1               310         add             %i1, 2, %i1
313         stb             %g3, [%i0]                311         stb             %g3, [%i0]
314         sub             %i2, 2, %i2               312         sub             %i2, 2, %i2
315         ldub            [%i1 - 1], %g3            313         ldub            [%i1 - 1], %g3
316         add             %i0, 2, %i0               314         add             %i0, 2, %i0
317         stb             %g3, [%i0 - 1]            315         stb             %g3, [%i0 - 1]
318 61:                                               316 61:
319         and             %i1, 3, %g2               317         and             %i1, 3, %g2
320         and             %i2, 0xc, %g3             318         and             %i2, 0xc, %g3
321         and             %i1, -4, %i1              319         and             %i1, -4, %i1
322         cmp             %g3, 4                    320         cmp             %g3, 4
323         sll             %g2, 3, %g4               321         sll             %g2, 3, %g4
324         mov             32, %g2                   322         mov             32, %g2
325         be              4f                        323         be              4f
326          sub            %g2, %g4, %l0             324          sub            %g2, %g4, %l0
327                                                   325         
328         blu             3f                        326         blu             3f
329          cmp            %g3, 0x8                  327          cmp            %g3, 0x8
330                                                   328 
331         be              2f                        329         be              2f
332          srl            %i2, 2, %g3               330          srl            %i2, 2, %g3
333                                                   331 
334         ld              [%i1], %i3                332         ld              [%i1], %i3
335         add             %i0, -8, %i0              333         add             %i0, -8, %i0
336         ld              [%i1 + 4], %i4            334         ld              [%i1 + 4], %i4
337         b               8f                        335         b               8f
338          add            %g3, 1, %g3               336          add            %g3, 1, %g3
339 2:                                                337 2:
340         ld              [%i1], %i4                338         ld              [%i1], %i4
341         add             %i0, -12, %i0             339         add             %i0, -12, %i0
342         ld              [%i1 + 4], %i5            340         ld              [%i1 + 4], %i5
343         add             %g3, 2, %g3               341         add             %g3, 2, %g3
344         b               9f                        342         b               9f
345          add            %i1, -4, %i1              343          add            %i1, -4, %i1
346 3:                                                344 3:
347         ld              [%i1], %g1                345         ld              [%i1], %g1
348         add             %i0, -4, %i0              346         add             %i0, -4, %i0
349         ld              [%i1 + 4], %i3            347         ld              [%i1 + 4], %i3
350         srl             %i2, 2, %g3               348         srl             %i2, 2, %g3
351         b               7f                        349         b               7f
352          add            %i1, 4, %i1               350          add            %i1, 4, %i1
353 4:                                                351 4:
354         ld              [%i1], %i5                352         ld              [%i1], %i5
355         cmp             %i2, 7                    353         cmp             %i2, 7
356         ld              [%i1 + 4], %g1            354         ld              [%i1 + 4], %g1
357         srl             %i2, 2, %g3               355         srl             %i2, 2, %g3
358         bleu            10f                       356         bleu            10f
359          add            %i1, 8, %i1               357          add            %i1, 8, %i1
360                                                   358 
361         ld              [%i1], %i3                359         ld              [%i1], %i3
362         add             %g3, -1, %g3              360         add             %g3, -1, %g3
363 5:                                                361 5:
364         sll             %i5, %g4, %g2             362         sll             %i5, %g4, %g2
365         srl             %g1, %l0, %g5             363         srl             %g1, %l0, %g5
366         or              %g2, %g5, %g2             364         or              %g2, %g5, %g2
367         st              %g2, [%i0]                365         st              %g2, [%i0]
368 7:                                                366 7:
369         ld              [%i1 + 4], %i4            367         ld              [%i1 + 4], %i4
370         sll             %g1, %g4, %g2             368         sll             %g1, %g4, %g2
371         srl             %i3, %l0, %g5             369         srl             %i3, %l0, %g5
372         or              %g2, %g5, %g2             370         or              %g2, %g5, %g2
373         st              %g2, [%i0 + 4]            371         st              %g2, [%i0 + 4]
374 8:                                                372 8:
375         ld              [%i1 + 8], %i5            373         ld              [%i1 + 8], %i5
376         sll             %i3, %g4, %g2             374         sll             %i3, %g4, %g2
377         srl             %i4, %l0, %g5             375         srl             %i4, %l0, %g5
378         or              %g2, %g5, %g2             376         or              %g2, %g5, %g2
379         st              %g2, [%i0 + 8]            377         st              %g2, [%i0 + 8]
380 9:                                                378 9:
381         ld              [%i1 + 12], %g1           379         ld              [%i1 + 12], %g1
382         sll             %i4, %g4, %g2             380         sll             %i4, %g4, %g2
383         srl             %i5, %l0, %g5             381         srl             %i5, %l0, %g5
384         addcc           %g3, -4, %g3              382         addcc           %g3, -4, %g3
385         or              %g2, %g5, %g2             383         or              %g2, %g5, %g2
386         add             %i1, 16, %i1              384         add             %i1, 16, %i1
387         st              %g2, [%i0 + 12]           385         st              %g2, [%i0 + 12]
388         add             %i0, 16, %i0              386         add             %i0, 16, %i0
389         bne,a           5b                        387         bne,a           5b
390          ld             [%i1], %i3                388          ld             [%i1], %i3
391 10:                                               389 10:
392         sll             %i5, %g4, %g2             390         sll             %i5, %g4, %g2
393         srl             %g1, %l0, %g5             391         srl             %g1, %l0, %g5
394         srl             %l0, 3, %g3               392         srl             %l0, 3, %g3
395         or              %g2, %g5, %g2             393         or              %g2, %g5, %g2
396         sub             %i1, %g3, %i1             394         sub             %i1, %g3, %i1
397         andcc           %i2, 2, %g0               395         andcc           %i2, 2, %g0
398         st              %g2, [%i0]                396         st              %g2, [%i0]
399         be              1f                        397         be              1f
400          andcc          %i2, 1, %g0               398          andcc          %i2, 1, %g0
401                                                   399 
402         ldub            [%i1], %g2                400         ldub            [%i1], %g2
403         add             %i1, 2, %i1               401         add             %i1, 2, %i1
404         stb             %g2, [%i0 + 4]            402         stb             %g2, [%i0 + 4]
405         add             %i0, 2, %i0               403         add             %i0, 2, %i0
406         ldub            [%i1 - 1], %g2            404         ldub            [%i1 - 1], %g2
407         stb             %g2, [%i0 + 3]            405         stb             %g2, [%i0 + 3]
408 1:                                                406 1:
409         be              1f                        407         be              1f
410          nop                                      408          nop
411         ldub            [%i1], %g2                409         ldub            [%i1], %g2
412         stb             %g2, [%i0 + 4]            410         stb             %g2, [%i0 + 4]
413 1:                                                411 1:
414         ret                                       412         ret
415          restore        %g7, %g0, %o0             413          restore        %g7, %g0, %o0
416                                                   414 
417 88:     /* short_end */                           415 88:     /* short_end */
418                                                   416 
419         and             %o2, 0xe, %o3             417         and             %o2, 0xe, %o3
420 20:                                               418 20:
421         sethi           %hi(89f), %o5             419         sethi           %hi(89f), %o5
422         sll             %o3, 3, %o4               420         sll             %o3, 3, %o4
423         add             %o0, %o3, %o0             421         add             %o0, %o3, %o0
424         sub             %o5, %o4, %o5             422         sub             %o5, %o4, %o5
425         add             %o1, %o3, %o1             423         add             %o1, %o3, %o1
426         jmpl            %o5 + %lo(89f), %g0       424         jmpl            %o5 + %lo(89f), %g0
427          andcc          %o2, 1, %g0               425          andcc          %o2, 1, %g0
428                                                   426 
429         MOVE_SHORTCHUNK(o1, o0, 0x0c, g2, g3)     427         MOVE_SHORTCHUNK(o1, o0, 0x0c, g2, g3)
430         MOVE_SHORTCHUNK(o1, o0, 0x0a, g2, g3)     428         MOVE_SHORTCHUNK(o1, o0, 0x0a, g2, g3)
431         MOVE_SHORTCHUNK(o1, o0, 0x08, g2, g3)     429         MOVE_SHORTCHUNK(o1, o0, 0x08, g2, g3)
432         MOVE_SHORTCHUNK(o1, o0, 0x06, g2, g3)     430         MOVE_SHORTCHUNK(o1, o0, 0x06, g2, g3)
433         MOVE_SHORTCHUNK(o1, o0, 0x04, g2, g3)     431         MOVE_SHORTCHUNK(o1, o0, 0x04, g2, g3)
434         MOVE_SHORTCHUNK(o1, o0, 0x02, g2, g3)     432         MOVE_SHORTCHUNK(o1, o0, 0x02, g2, g3)
435         MOVE_SHORTCHUNK(o1, o0, 0x00, g2, g3)     433         MOVE_SHORTCHUNK(o1, o0, 0x00, g2, g3)
436                                                   434 
437 89:     /* short_table_end */                     435 89:     /* short_table_end */
438                                                   436 
439         be              1f                        437         be              1f
440          nop                                      438          nop
441                                                   439 
442         ldub            [%o1], %g2                440         ldub            [%o1], %g2
443         stb             %g2, [%o0]                441         stb             %g2, [%o0]
444 1:                                                442 1:
445         retl                                      443         retl
446          mov            %g7, %o0                  444          mov            %g7, %o0
447                                                   445 
448 90:     /* short_aligned_end */                   446 90:     /* short_aligned_end */
449         bne             88b                       447         bne             88b
450          andcc          %o2, 8, %g0               448          andcc          %o2, 8, %g0
451                                                   449 
452         be              1f                        450         be              1f
453          andcc          %o2, 4, %g0               451          andcc          %o2, 4, %g0
454                                                   452 
455         ld              [%o1 + 0x00], %g2         453         ld              [%o1 + 0x00], %g2
456         ld              [%o1 + 0x04], %g3         454         ld              [%o1 + 0x04], %g3
457         add             %o1, 8, %o1               455         add             %o1, 8, %o1
458         st              %g2, [%o0 + 0x00]         456         st              %g2, [%o0 + 0x00]
459         st              %g3, [%o0 + 0x04]         457         st              %g3, [%o0 + 0x04]
460         add             %o0, 8, %o0               458         add             %o0, 8, %o0
461 1:                                                459 1:
462         b               81b                       460         b               81b
463          mov            %o2, %g1                  461          mov            %o2, %g1
                                                      

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php