~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/arch/arm/include/asm/cmpxchg.h

Version: ~ [ linux-6.12-rc7 ] ~ [ linux-6.11.7 ] ~ [ linux-6.10.14 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.60 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.116 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.171 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.229 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.285 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.323 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.12 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

Diff markup

Differences between /arch/arm/include/asm/cmpxchg.h (Version linux-6.12-rc7) and /arch/i386/include/asm-i386/cmpxchg.h (Version linux-2.6.0)


  1 /* SPDX-License-Identifier: GPL-2.0 */              1 
  2 #ifndef __ASM_ARM_CMPXCHG_H                       
  3 #define __ASM_ARM_CMPXCHG_H                       
  4                                                   
  5 #include <linux/irqflags.h>                       
  6 #include <linux/prefetch.h>                       
  7 #include <asm/barrier.h>                          
  8 #include <linux/cmpxchg-emu.h>                    
  9                                                   
 10 #if defined(CONFIG_CPU_SA1100) || defined(CONF    
 11 /*                                                
 12  * On the StrongARM, "swp" is terminally broke    
 13  * cache totally.  This means that the cache b    
 14  * since we use normal loads/stores as well, t    
 15  * Typically, this causes oopsen in filp_close    
 16  * more disastrous effects.  There are two wor    
 17  *  1. Disable interrupts and emulate the atom    
 18  *  2. Clean the cache, perform atomic swap, f    
 19  *                                                
 20  * We choose (1) since its the "easiest" to ac    
 21  * dependent on the processor type.               
 22  *                                                
 23  * NOTE that this solution won't work on an SM    
 24  * forbid it here.                                
 25  */                                               
 26 #define swp_is_buggy                              
 27 #endif                                            
 28                                                   
 29 static inline unsigned long                       
 30 __arch_xchg(unsigned long x, volatile void *pt    
 31 {                                                 
 32         extern void __bad_xchg(volatile void *    
 33         unsigned long ret;                        
 34 #ifdef swp_is_buggy                               
 35         unsigned long flags;                      
 36 #endif                                            
 37 #if __LINUX_ARM_ARCH__ >= 6                       
 38         unsigned int tmp;                         
 39 #endif                                            
 40                                                   
 41         prefetchw((const void *)ptr);             
 42                                                   
 43         switch (size) {                           
 44 #if __LINUX_ARM_ARCH__ >= 6                       
 45 #ifndef CONFIG_CPU_V6 /* MIN ARCH >= V6K */       
 46         case 1:                                   
 47                 asm volatile("@ __xchg1\n"        
 48                 "1:     ldrexb  %0, [%3]\n"       
 49                 "       strexb  %1, %2, [%3]\n    
 50                 "       teq     %1, #0\n"         
 51                 "       bne     1b"               
 52                         : "=&r" (ret), "=&r" (    
 53                         : "r" (x), "r" (ptr)      
 54                         : "memory", "cc");        
 55                 break;                            
 56         case 2:                                   
 57                 asm volatile("@ __xchg2\n"        
 58                 "1:     ldrexh  %0, [%3]\n"       
 59                 "       strexh  %1, %2, [%3]\n    
 60                 "       teq     %1, #0\n"         
 61                 "       bne     1b"               
 62                         : "=&r" (ret), "=&r" (    
 63                         : "r" (x), "r" (ptr)      
 64                         : "memory", "cc");        
 65                 break;                            
 66 #endif                                            
 67         case 4:                                   
 68                 asm volatile("@ __xchg4\n"        
 69                 "1:     ldrex   %0, [%3]\n"       
 70                 "       strex   %1, %2, [%3]\n    
 71                 "       teq     %1, #0\n"         
 72                 "       bne     1b"               
 73                         : "=&r" (ret), "=&r" (    
 74                         : "r" (x), "r" (ptr)      
 75                         : "memory", "cc");        
 76                 break;                            
 77 #elif defined(swp_is_buggy)                       
 78 #ifdef CONFIG_SMP                                 
 79 #error SMP is not supported on this platform      
 80 #endif                                            
 81         case 1:                                   
 82                 raw_local_irq_save(flags);        
 83                 ret = *(volatile unsigned char    
 84                 *(volatile unsigned char *)ptr    
 85                 raw_local_irq_restore(flags);     
 86                 break;                            
 87                                                   
 88         case 4:                                   
 89                 raw_local_irq_save(flags);        
 90                 ret = *(volatile unsigned long    
 91                 *(volatile unsigned long *)ptr    
 92                 raw_local_irq_restore(flags);     
 93                 break;                            
 94 #else                                             
 95         case 1:                                   
 96                 asm volatile("@ __xchg1\n"        
 97                 "       swpb    %0, %1, [%2]"     
 98                         : "=&r" (ret)             
 99                         : "r" (x), "r" (ptr)      
100                         : "memory", "cc");        
101                 break;                            
102         case 4:                                   
103                 asm volatile("@ __xchg4\n"        
104                 "       swp     %0, %1, [%2]"     
105                         : "=&r" (ret)             
106                         : "r" (x), "r" (ptr)      
107                         : "memory", "cc");        
108                 break;                            
109 #endif                                            
110         default:                                  
111                 /* Cause a link-time error, th    
112                 __bad_xchg(ptr, size), ret = 0    
113                 break;                            
114         }                                         
115                                                   
116         return ret;                               
117 }                                                 
118                                                   
119 #define arch_xchg_relaxed(ptr, x) ({              
120         (__typeof__(*(ptr)))__arch_xchg((unsig    
121                                         sizeof    
122 })                                                
123                                                   
124 #include <asm-generic/cmpxchg-local.h>            
125                                                   
126 #if __LINUX_ARM_ARCH__ < 6                        
127 /* min ARCH < ARMv6 */                            
128                                                   
129 #ifdef CONFIG_SMP                                 
130 #error "SMP is not supported on this platform"    
131 #endif                                            
132                                                   
133 #define arch_xchg arch_xchg_relaxed               
134                                                   
135 /*                                                
136  * cmpxchg_local and cmpxchg64_local are atomi    
137  * them available.                                
138  */                                               
139 #define arch_cmpxchg_local(ptr, o, n) ({          
140         (__typeof(*ptr))__generic_cmpxchg_loca    
141                                                   
142                                                   
143                                                   
144 })                                                
145                                                   
146 #define arch_cmpxchg64_local(ptr, o, n) __gene    
147                                                   
148 #include <asm-generic/cmpxchg.h>                  
149                                                   
150 #else   /* min ARCH >= ARMv6 */                   
151                                                   
152 extern void __bad_cmpxchg(volatile void *ptr,     
153                                                   
154 /*                                                
155  * cmpxchg only support 32-bits operands on AR    
156  */                                               
157                                                   
158 static inline unsigned long __cmpxchg(volatile    
159                                       unsigned    
160 {                                                 
161         unsigned long oldval, res;                
162                                                   
163         prefetchw((const void *)ptr);             
164                                                   
165         switch (size) {                           
166 #ifdef CONFIG_CPU_V6    /* ARCH == ARMv6 */       
167         case 1:                                   
168                 oldval = cmpxchg_emu_u8((volat    
169                 break;                            
170 #else /* min ARCH > ARMv6 */                      
171         case 1:                                   
172                 do {                              
173                         asm volatile("@ __cmpx    
174                         "       ldrexb  %1, [%    
175                         "       mov     %0, #0    
176                         "       teq     %1, %3    
177                         "       strexbeq %0, %    
178                                 : "=&r" (res),    
179                                 : "r" (ptr), "    
180                                 : "memory", "c    
181                 } while (res);                    
182                 break;                            
183         case 2:                                   
184                 do {                              
185                         asm volatile("@ __cmpx    
186                         "       ldrexh  %1, [%    
187                         "       mov     %0, #0    
188                         "       teq     %1, %3    
189                         "       strexheq %0, %    
190                                 : "=&r" (res),    
191                                 : "r" (ptr), "    
192                                 : "memory", "c    
193                 } while (res);                    
194                 break;                            
195 #endif                                            
196         case 4:                                   
197                 do {                              
198                         asm volatile("@ __cmpx    
199                         "       ldrex   %1, [%    
200                         "       mov     %0, #0    
201                         "       teq     %1, %3    
202                         "       strexeq %0, %4    
203                                 : "=&r" (res),    
204                                 : "r" (ptr), "    
205                                 : "memory", "c    
206                 } while (res);                    
207                 break;                            
208         default:                                  
209                 __bad_cmpxchg(ptr, size);         
210                 oldval = 0;                       
211         }                                         
212                                                   
213         return oldval;                            
214 }                                                 
215                                                   
216 #define arch_cmpxchg_relaxed(ptr,o,n) ({          
217         (__typeof__(*(ptr)))__cmpxchg((ptr),      
218                                       (unsigne    
219                                       (unsigne    
220                                       sizeof(*    
221 })                                                
222                                                   
223 static inline unsigned long __cmpxchg_local(vo    
224                                             un    
225                                             un    
226 {                                                 
227         unsigned long ret;                        
228                                                   
229         switch (size) {                           
230 #ifdef CONFIG_CPU_V6    /* min ARCH == ARMv6 *    
231         case 1:                                   
232         case 2:                                   
233                 ret = __generic_cmpxchg_local(    
234                 break;                            
235 #endif                                            
236         default:                                  
237                 ret = __cmpxchg(ptr, old, new,    
238         }                                         
239                                                   
240         return ret;                               
241 }                                                 
242                                                   
243 #define arch_cmpxchg_local(ptr, o, n) ({          
244         (__typeof(*ptr))__cmpxchg_local((ptr),    
245                                         (unsig    
246                                         (unsig    
247                                         sizeof    
248 })                                                
249                                                   
250 static inline unsigned long long __cmpxchg64(u    
251                                              u    
252                                              u    
253 {                                                 
254         unsigned long long oldval;                
255         unsigned long res;                        
256                                                   
257         prefetchw(ptr);                           
258                                                   
259         __asm__ __volatile__(                     
260 "1:     ldrexd          %1, %H1, [%3]\n"          
261 "       teq             %1, %4\n"                 
262 "       teqeq           %H1, %H4\n"               
263 "       bne             2f\n"                     
264 "       strexd          %0, %5, %H5, [%3]\n"      
265 "       teq             %0, #0\n"                 
266 "       bne             1b\n"                     
267 "2:"                                              
268         : "=&r" (res), "=&r" (oldval), "+Qo" (    
269         : "r" (ptr), "r" (old), "r" (new)         
270         : "cc");                                  
271                                                   
272         return oldval;                            
273 }                                                 
274                                                   
275 #define arch_cmpxchg64_relaxed(ptr, o, n) ({      
276         (__typeof__(*(ptr)))__cmpxchg64((ptr),    
277                                         (unsig    
278                                         (unsig    
279 })                                                
280                                                   
281 #define arch_cmpxchg64_local(ptr, o, n) arch_c    
282                                                   
283 #endif  /* __LINUX_ARM_ARCH__ >= 6 */             
284                                                   
285 #endif /* __ASM_ARM_CMPXCHG_H */                  
286                                                   

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php