~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/arch/arm64/lib/memset.S

Version: ~ [ linux-6.12-rc7 ] ~ [ linux-6.11.7 ] ~ [ linux-6.10.14 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.60 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.116 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.171 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.229 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.285 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.323 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.12 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

Diff markup

Differences between /arch/arm64/lib/memset.S (Version linux-6.12-rc7) and /arch/ppc/lib/memset.S (Version linux-4.11.12)


  1 /* SPDX-License-Identifier: GPL-2.0-only */       
  2 /*                                                
  3  * Copyright (C) 2013 ARM Ltd.                    
  4  * Copyright (C) 2013 Linaro.                     
  5  *                                                
  6  * This code is based on glibc cortex strings     
  7  * be found @                                     
  8  *                                                
  9  * http://bazaar.launchpad.net/~linaro-toolcha    
 10  * files/head:/src/aarch64/                       
 11  */                                               
 12                                                   
 13 #include <linux/linkage.h>                        
 14 #include <asm/assembler.h>                        
 15 #include <asm/cache.h>                            
 16                                                   
 17 /*                                                
 18  * Fill in the buffer with character c (alignm    
 19  *                                                
 20  * Parameters:                                    
 21  *      x0 - buf                                  
 22  *      x1 - c                                    
 23  *      x2 - n                                    
 24  * Returns:                                       
 25  *      x0 - buf                                  
 26  */                                               
 27                                                   
 28 dstin           .req    x0                        
 29 val             .req    w1                        
 30 count           .req    x2                        
 31 tmp1            .req    x3                        
 32 tmp1w           .req    w3                        
 33 tmp2            .req    x4                        
 34 tmp2w           .req    w4                        
 35 zva_len_x       .req    x5                        
 36 zva_len         .req    w5                        
 37 zva_bits_x      .req    x6                        
 38                                                   
 39 A_l             .req    x7                        
 40 A_lw            .req    w7                        
 41 dst             .req    x8                        
 42 tmp3w           .req    w9                        
 43 tmp3            .req    x9                        
 44                                                   
 45 SYM_FUNC_START(__pi_memset)                       
 46         mov     dst, dstin      /* Preserve re    
 47         and     A_lw, val, #255                   
 48         orr     A_lw, A_lw, A_lw, lsl #8          
 49         orr     A_lw, A_lw, A_lw, lsl #16         
 50         orr     A_l, A_l, A_l, lsl #32            
 51                                                   
 52         cmp     count, #15                        
 53         b.hi    .Lover16_proc                     
 54         /*All store maybe are non-aligned..*/     
 55         tbz     count, #3, 1f                     
 56         str     A_l, [dst], #8                    
 57 1:                                                
 58         tbz     count, #2, 2f                     
 59         str     A_lw, [dst], #4                   
 60 2:                                                
 61         tbz     count, #1, 3f                     
 62         strh    A_lw, [dst], #2                   
 63 3:                                                
 64         tbz     count, #0, 4f                     
 65         strb    A_lw, [dst]                       
 66 4:                                                
 67         ret                                       
 68                                                   
 69 .Lover16_proc:                                    
 70         /*Whether  the start address is aligne    
 71         neg     tmp2, dst                         
 72         ands    tmp2, tmp2, #15                   
 73         b.eq    .Laligned                         
 74 /*                                                
 75 * The count is not less than 16, we can use st    
 76 * then adjust the dst aligned with 16.This pro    
 77 * memory address at alignment boundary.           
 78 */                                                
 79         stp     A_l, A_l, [dst] /*non-aligned     
 80         /*make the dst aligned..*/                
 81         sub     count, count, tmp2                
 82         add     dst, dst, tmp2                    
 83                                                   
 84 .Laligned:                                        
 85         cbz     A_l, .Lzero_mem                   
 86                                                   
 87 .Ltail_maybe_long:                                
 88         cmp     count, #64                        
 89         b.ge    .Lnot_short                       
 90 .Ltail63:                                         
 91         ands    tmp1, count, #0x30                
 92         b.eq    3f                                
 93         cmp     tmp1w, #0x20                      
 94         b.eq    1f                                
 95         b.lt    2f                                
 96         stp     A_l, A_l, [dst], #16              
 97 1:                                                
 98         stp     A_l, A_l, [dst], #16              
 99 2:                                                
100         stp     A_l, A_l, [dst], #16              
101 /*                                                
102 * The last store length is less than 16,use st    
103 * It will lead some bytes written twice and th    
104 */                                                
105 3:                                                
106         ands    count, count, #15                 
107         cbz     count, 4f                         
108         add     dst, dst, count                   
109         stp     A_l, A_l, [dst, #-16]   /* Rep    
110 4:                                                
111         ret                                       
112                                                   
113         /*                                        
114         * Critical loop. Start at a new cache     
115         * 64 bytes per line, this ensures the     
116         */                                        
117         .p2align        L1_CACHE_SHIFT            
118 .Lnot_short:                                      
119         sub     dst, dst, #16/* Pre-bias.  */     
120         sub     count, count, #64                 
121 1:                                                
122         stp     A_l, A_l, [dst, #16]              
123         stp     A_l, A_l, [dst, #32]              
124         stp     A_l, A_l, [dst, #48]              
125         stp     A_l, A_l, [dst, #64]!             
126         subs    count, count, #64                 
127         b.ge    1b                                
128         tst     count, #0x3f                      
129         add     dst, dst, #16                     
130         b.ne    .Ltail63                          
131 .Lexitfunc:                                       
132         ret                                       
133                                                   
134         /*                                        
135         * For zeroing memory, check to see if     
136         * zero entire 'cache' lines.              
137         */                                        
138 .Lzero_mem:                                       
139         cmp     count, #63                        
140         b.le    .Ltail63                          
141         /*                                        
142         * For zeroing small amounts of memory,    
143         * the line-clear code.                    
144         */                                        
145         cmp     count, #128                       
146         b.lt    .Lnot_short /*count is at leas    
147                                                   
148         mrs     tmp1, dczid_el0                   
149         tbnz    tmp1, #4, .Lnot_short             
150         mov     tmp3w, #4                         
151         and     zva_len, tmp1w, #15     /* Saf    
152         lsl     zva_len, tmp3w, zva_len           
153                                                   
154         ands    tmp3w, zva_len, #63               
155         /*                                        
156         * ensure the zva_len is not less than     
157         * It is not meaningful to use ZVA if t    
158         */                                        
159         b.ne    .Lnot_short                       
160 .Lzero_by_line:                                   
161         /*                                        
162         * Compute how far we need to go to bec    
163         * already at quad-word alignment.         
164         */                                        
165         cmp     count, zva_len_x                  
166         b.lt    .Lnot_short             /* Not    
167         sub     zva_bits_x, zva_len_x, #1         
168         neg     tmp2, dst                         
169         ands    tmp2, tmp2, zva_bits_x            
170         b.eq    2f                      /* Alr    
171         /* Not aligned, check that there's eno    
172         sub     tmp1, count, tmp2                 
173         /*                                        
174         * grantee the remain length to be ZVA     
175         * avoid to make the 2f's process over     
176         cmp     tmp1, #64                         
177         ccmp    tmp1, zva_len_x, #8, ge /* NZC    
178         b.lt    .Lnot_short                       
179         /*                                        
180         * We know that there's at least 64 byt    
181         * to overrun by 64 bytes.                 
182         */                                        
183         mov     count, tmp1                       
184 1:                                                
185         stp     A_l, A_l, [dst]                   
186         stp     A_l, A_l, [dst, #16]              
187         stp     A_l, A_l, [dst, #32]              
188         subs    tmp2, tmp2, #64                   
189         stp     A_l, A_l, [dst, #48]              
190         add     dst, dst, #64                     
191         b.ge    1b                                
192         /* We've overrun a bit, so adjust dst     
193         add     dst, dst, tmp2                    
194 2:                                                
195         sub     count, count, zva_len_x           
196 3:                                                
197         dc      zva, dst                          
198         add     dst, dst, zva_len_x               
199         subs    count, count, zva_len_x           
200         b.ge    3b                                
201         ands    count, count, zva_bits_x          
202         b.ne    .Ltail_maybe_long                 
203         ret                                       
204 SYM_FUNC_END(__pi_memset)                         
205                                                   
206 SYM_FUNC_ALIAS(__memset, __pi_memset)             
207 EXPORT_SYMBOL(__memset)                           
208                                                   
209 SYM_FUNC_ALIAS_WEAK(memset, __pi_memset)          
210 EXPORT_SYMBOL(memset)                             
                                                      

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php