~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/include/linux/atomic/atomic-arch-fallback.h

Version: ~ [ linux-6.12-rc7 ] ~ [ linux-6.11.7 ] ~ [ linux-6.10.14 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.60 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.116 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.171 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.229 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.285 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.323 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.12 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

Diff markup

Differences between /include/linux/atomic/atomic-arch-fallback.h (Version linux-6.12-rc7) and /include/linux/atomic/atomic-arch-fallback.h (Version linux-4.20.17)


  1 // SPDX-License-Identifier: GPL-2.0                 1 
  2                                                   
  3 // Generated by scripts/atomic/gen-atomic-fall    
  4 // DO NOT MODIFY THIS FILE DIRECTLY               
  5                                                   
  6 #ifndef _LINUX_ATOMIC_FALLBACK_H                  
  7 #define _LINUX_ATOMIC_FALLBACK_H                  
  8                                                   
  9 #include <linux/compiler.h>                       
 10                                                   
 11 #if defined(arch_xchg)                            
 12 #define raw_xchg arch_xchg                        
 13 #elif defined(arch_xchg_relaxed)                  
 14 #define raw_xchg(...) \                           
 15         __atomic_op_fence(arch_xchg, __VA_ARGS    
 16 #else                                             
 17 extern void raw_xchg_not_implemented(void);       
 18 #define raw_xchg(...) raw_xchg_not_implemented    
 19 #endif                                            
 20                                                   
 21 #if defined(arch_xchg_acquire)                    
 22 #define raw_xchg_acquire arch_xchg_acquire        
 23 #elif defined(arch_xchg_relaxed)                  
 24 #define raw_xchg_acquire(...) \                   
 25         __atomic_op_acquire(arch_xchg, __VA_AR    
 26 #elif defined(arch_xchg)                          
 27 #define raw_xchg_acquire arch_xchg                
 28 #else                                             
 29 extern void raw_xchg_acquire_not_implemented(v    
 30 #define raw_xchg_acquire(...) raw_xchg_acquire    
 31 #endif                                            
 32                                                   
 33 #if defined(arch_xchg_release)                    
 34 #define raw_xchg_release arch_xchg_release        
 35 #elif defined(arch_xchg_relaxed)                  
 36 #define raw_xchg_release(...) \                   
 37         __atomic_op_release(arch_xchg, __VA_AR    
 38 #elif defined(arch_xchg)                          
 39 #define raw_xchg_release arch_xchg                
 40 #else                                             
 41 extern void raw_xchg_release_not_implemented(v    
 42 #define raw_xchg_release(...) raw_xchg_release    
 43 #endif                                            
 44                                                   
 45 #if defined(arch_xchg_relaxed)                    
 46 #define raw_xchg_relaxed arch_xchg_relaxed        
 47 #elif defined(arch_xchg)                          
 48 #define raw_xchg_relaxed arch_xchg                
 49 #else                                             
 50 extern void raw_xchg_relaxed_not_implemented(v    
 51 #define raw_xchg_relaxed(...) raw_xchg_relaxed    
 52 #endif                                            
 53                                                   
 54 #if defined(arch_cmpxchg)                         
 55 #define raw_cmpxchg arch_cmpxchg                  
 56 #elif defined(arch_cmpxchg_relaxed)               
 57 #define raw_cmpxchg(...) \                        
 58         __atomic_op_fence(arch_cmpxchg, __VA_A    
 59 #else                                             
 60 extern void raw_cmpxchg_not_implemented(void);    
 61 #define raw_cmpxchg(...) raw_cmpxchg_not_imple    
 62 #endif                                            
 63                                                   
 64 #if defined(arch_cmpxchg_acquire)                 
 65 #define raw_cmpxchg_acquire arch_cmpxchg_acqui    
 66 #elif defined(arch_cmpxchg_relaxed)               
 67 #define raw_cmpxchg_acquire(...) \                
 68         __atomic_op_acquire(arch_cmpxchg, __VA    
 69 #elif defined(arch_cmpxchg)                       
 70 #define raw_cmpxchg_acquire arch_cmpxchg          
 71 #else                                             
 72 extern void raw_cmpxchg_acquire_not_implemente    
 73 #define raw_cmpxchg_acquire(...) raw_cmpxchg_a    
 74 #endif                                            
 75                                                   
 76 #if defined(arch_cmpxchg_release)                 
 77 #define raw_cmpxchg_release arch_cmpxchg_relea    
 78 #elif defined(arch_cmpxchg_relaxed)               
 79 #define raw_cmpxchg_release(...) \                
 80         __atomic_op_release(arch_cmpxchg, __VA    
 81 #elif defined(arch_cmpxchg)                       
 82 #define raw_cmpxchg_release arch_cmpxchg          
 83 #else                                             
 84 extern void raw_cmpxchg_release_not_implemente    
 85 #define raw_cmpxchg_release(...) raw_cmpxchg_r    
 86 #endif                                            
 87                                                   
 88 #if defined(arch_cmpxchg_relaxed)                 
 89 #define raw_cmpxchg_relaxed arch_cmpxchg_relax    
 90 #elif defined(arch_cmpxchg)                       
 91 #define raw_cmpxchg_relaxed arch_cmpxchg          
 92 #else                                             
 93 extern void raw_cmpxchg_relaxed_not_implemente    
 94 #define raw_cmpxchg_relaxed(...) raw_cmpxchg_r    
 95 #endif                                            
 96                                                   
 97 #if defined(arch_cmpxchg64)                       
 98 #define raw_cmpxchg64 arch_cmpxchg64              
 99 #elif defined(arch_cmpxchg64_relaxed)             
100 #define raw_cmpxchg64(...) \                      
101         __atomic_op_fence(arch_cmpxchg64, __VA    
102 #else                                             
103 extern void raw_cmpxchg64_not_implemented(void    
104 #define raw_cmpxchg64(...) raw_cmpxchg64_not_i    
105 #endif                                            
106                                                   
107 #if defined(arch_cmpxchg64_acquire)               
108 #define raw_cmpxchg64_acquire arch_cmpxchg64_a    
109 #elif defined(arch_cmpxchg64_relaxed)             
110 #define raw_cmpxchg64_acquire(...) \              
111         __atomic_op_acquire(arch_cmpxchg64, __    
112 #elif defined(arch_cmpxchg64)                     
113 #define raw_cmpxchg64_acquire arch_cmpxchg64      
114 #else                                             
115 extern void raw_cmpxchg64_acquire_not_implemen    
116 #define raw_cmpxchg64_acquire(...) raw_cmpxchg    
117 #endif                                            
118                                                   
119 #if defined(arch_cmpxchg64_release)               
120 #define raw_cmpxchg64_release arch_cmpxchg64_r    
121 #elif defined(arch_cmpxchg64_relaxed)             
122 #define raw_cmpxchg64_release(...) \              
123         __atomic_op_release(arch_cmpxchg64, __    
124 #elif defined(arch_cmpxchg64)                     
125 #define raw_cmpxchg64_release arch_cmpxchg64      
126 #else                                             
127 extern void raw_cmpxchg64_release_not_implemen    
128 #define raw_cmpxchg64_release(...) raw_cmpxchg    
129 #endif                                            
130                                                   
131 #if defined(arch_cmpxchg64_relaxed)               
132 #define raw_cmpxchg64_relaxed arch_cmpxchg64_r    
133 #elif defined(arch_cmpxchg64)                     
134 #define raw_cmpxchg64_relaxed arch_cmpxchg64      
135 #else                                             
136 extern void raw_cmpxchg64_relaxed_not_implemen    
137 #define raw_cmpxchg64_relaxed(...) raw_cmpxchg    
138 #endif                                            
139                                                   
140 #if defined(arch_cmpxchg128)                      
141 #define raw_cmpxchg128 arch_cmpxchg128            
142 #elif defined(arch_cmpxchg128_relaxed)            
143 #define raw_cmpxchg128(...) \                     
144         __atomic_op_fence(arch_cmpxchg128, __V    
145 #else                                             
146 extern void raw_cmpxchg128_not_implemented(voi    
147 #define raw_cmpxchg128(...) raw_cmpxchg128_not    
148 #endif                                            
149                                                   
150 #if defined(arch_cmpxchg128_acquire)              
151 #define raw_cmpxchg128_acquire arch_cmpxchg128    
152 #elif defined(arch_cmpxchg128_relaxed)            
153 #define raw_cmpxchg128_acquire(...) \             
154         __atomic_op_acquire(arch_cmpxchg128, _    
155 #elif defined(arch_cmpxchg128)                    
156 #define raw_cmpxchg128_acquire arch_cmpxchg128    
157 #else                                             
158 extern void raw_cmpxchg128_acquire_not_impleme    
159 #define raw_cmpxchg128_acquire(...) raw_cmpxch    
160 #endif                                            
161                                                   
162 #if defined(arch_cmpxchg128_release)              
163 #define raw_cmpxchg128_release arch_cmpxchg128    
164 #elif defined(arch_cmpxchg128_relaxed)            
165 #define raw_cmpxchg128_release(...) \             
166         __atomic_op_release(arch_cmpxchg128, _    
167 #elif defined(arch_cmpxchg128)                    
168 #define raw_cmpxchg128_release arch_cmpxchg128    
169 #else                                             
170 extern void raw_cmpxchg128_release_not_impleme    
171 #define raw_cmpxchg128_release(...) raw_cmpxch    
172 #endif                                            
173                                                   
174 #if defined(arch_cmpxchg128_relaxed)              
175 #define raw_cmpxchg128_relaxed arch_cmpxchg128    
176 #elif defined(arch_cmpxchg128)                    
177 #define raw_cmpxchg128_relaxed arch_cmpxchg128    
178 #else                                             
179 extern void raw_cmpxchg128_relaxed_not_impleme    
180 #define raw_cmpxchg128_relaxed(...) raw_cmpxch    
181 #endif                                            
182                                                   
183 #if defined(arch_try_cmpxchg)                     
184 #define raw_try_cmpxchg arch_try_cmpxchg          
185 #elif defined(arch_try_cmpxchg_relaxed)           
186 #define raw_try_cmpxchg(...) \                    
187         __atomic_op_fence(arch_try_cmpxchg, __    
188 #else                                             
189 #define raw_try_cmpxchg(_ptr, _oldp, _new) \      
190 ({ \                                              
191         typeof(*(_ptr)) *___op = (_oldp), ___o    
192         ___r = raw_cmpxchg((_ptr), ___o, (_new    
193         if (unlikely(___r != ___o)) \             
194                 *___op = ___r; \                  
195         likely(___r == ___o); \                   
196 })                                                
197 #endif                                            
198                                                   
199 #if defined(arch_try_cmpxchg_acquire)             
200 #define raw_try_cmpxchg_acquire arch_try_cmpxc    
201 #elif defined(arch_try_cmpxchg_relaxed)           
202 #define raw_try_cmpxchg_acquire(...) \            
203         __atomic_op_acquire(arch_try_cmpxchg,     
204 #elif defined(arch_try_cmpxchg)                   
205 #define raw_try_cmpxchg_acquire arch_try_cmpxc    
206 #else                                             
207 #define raw_try_cmpxchg_acquire(_ptr, _oldp, _    
208 ({ \                                              
209         typeof(*(_ptr)) *___op = (_oldp), ___o    
210         ___r = raw_cmpxchg_acquire((_ptr), ___    
211         if (unlikely(___r != ___o)) \             
212                 *___op = ___r; \                  
213         likely(___r == ___o); \                   
214 })                                                
215 #endif                                            
216                                                   
217 #if defined(arch_try_cmpxchg_release)             
218 #define raw_try_cmpxchg_release arch_try_cmpxc    
219 #elif defined(arch_try_cmpxchg_relaxed)           
220 #define raw_try_cmpxchg_release(...) \            
221         __atomic_op_release(arch_try_cmpxchg,     
222 #elif defined(arch_try_cmpxchg)                   
223 #define raw_try_cmpxchg_release arch_try_cmpxc    
224 #else                                             
225 #define raw_try_cmpxchg_release(_ptr, _oldp, _    
226 ({ \                                              
227         typeof(*(_ptr)) *___op = (_oldp), ___o    
228         ___r = raw_cmpxchg_release((_ptr), ___    
229         if (unlikely(___r != ___o)) \             
230                 *___op = ___r; \                  
231         likely(___r == ___o); \                   
232 })                                                
233 #endif                                            
234                                                   
235 #if defined(arch_try_cmpxchg_relaxed)             
236 #define raw_try_cmpxchg_relaxed arch_try_cmpxc    
237 #elif defined(arch_try_cmpxchg)                   
238 #define raw_try_cmpxchg_relaxed arch_try_cmpxc    
239 #else                                             
240 #define raw_try_cmpxchg_relaxed(_ptr, _oldp, _    
241 ({ \                                              
242         typeof(*(_ptr)) *___op = (_oldp), ___o    
243         ___r = raw_cmpxchg_relaxed((_ptr), ___    
244         if (unlikely(___r != ___o)) \             
245                 *___op = ___r; \                  
246         likely(___r == ___o); \                   
247 })                                                
248 #endif                                            
249                                                   
250 #if defined(arch_try_cmpxchg64)                   
251 #define raw_try_cmpxchg64 arch_try_cmpxchg64      
252 #elif defined(arch_try_cmpxchg64_relaxed)         
253 #define raw_try_cmpxchg64(...) \                  
254         __atomic_op_fence(arch_try_cmpxchg64,     
255 #else                                             
256 #define raw_try_cmpxchg64(_ptr, _oldp, _new) \    
257 ({ \                                              
258         typeof(*(_ptr)) *___op = (_oldp), ___o    
259         ___r = raw_cmpxchg64((_ptr), ___o, (_n    
260         if (unlikely(___r != ___o)) \             
261                 *___op = ___r; \                  
262         likely(___r == ___o); \                   
263 })                                                
264 #endif                                            
265                                                   
266 #if defined(arch_try_cmpxchg64_acquire)           
267 #define raw_try_cmpxchg64_acquire arch_try_cmp    
268 #elif defined(arch_try_cmpxchg64_relaxed)         
269 #define raw_try_cmpxchg64_acquire(...) \          
270         __atomic_op_acquire(arch_try_cmpxchg64    
271 #elif defined(arch_try_cmpxchg64)                 
272 #define raw_try_cmpxchg64_acquire arch_try_cmp    
273 #else                                             
274 #define raw_try_cmpxchg64_acquire(_ptr, _oldp,    
275 ({ \                                              
276         typeof(*(_ptr)) *___op = (_oldp), ___o    
277         ___r = raw_cmpxchg64_acquire((_ptr), _    
278         if (unlikely(___r != ___o)) \             
279                 *___op = ___r; \                  
280         likely(___r == ___o); \                   
281 })                                                
282 #endif                                            
283                                                   
284 #if defined(arch_try_cmpxchg64_release)           
285 #define raw_try_cmpxchg64_release arch_try_cmp    
286 #elif defined(arch_try_cmpxchg64_relaxed)         
287 #define raw_try_cmpxchg64_release(...) \          
288         __atomic_op_release(arch_try_cmpxchg64    
289 #elif defined(arch_try_cmpxchg64)                 
290 #define raw_try_cmpxchg64_release arch_try_cmp    
291 #else                                             
292 #define raw_try_cmpxchg64_release(_ptr, _oldp,    
293 ({ \                                              
294         typeof(*(_ptr)) *___op = (_oldp), ___o    
295         ___r = raw_cmpxchg64_release((_ptr), _    
296         if (unlikely(___r != ___o)) \             
297                 *___op = ___r; \                  
298         likely(___r == ___o); \                   
299 })                                                
300 #endif                                            
301                                                   
302 #if defined(arch_try_cmpxchg64_relaxed)           
303 #define raw_try_cmpxchg64_relaxed arch_try_cmp    
304 #elif defined(arch_try_cmpxchg64)                 
305 #define raw_try_cmpxchg64_relaxed arch_try_cmp    
306 #else                                             
307 #define raw_try_cmpxchg64_relaxed(_ptr, _oldp,    
308 ({ \                                              
309         typeof(*(_ptr)) *___op = (_oldp), ___o    
310         ___r = raw_cmpxchg64_relaxed((_ptr), _    
311         if (unlikely(___r != ___o)) \             
312                 *___op = ___r; \                  
313         likely(___r == ___o); \                   
314 })                                                
315 #endif                                            
316                                                   
317 #if defined(arch_try_cmpxchg128)                  
318 #define raw_try_cmpxchg128 arch_try_cmpxchg128    
319 #elif defined(arch_try_cmpxchg128_relaxed)        
320 #define raw_try_cmpxchg128(...) \                 
321         __atomic_op_fence(arch_try_cmpxchg128,    
322 #else                                             
323 #define raw_try_cmpxchg128(_ptr, _oldp, _new)     
324 ({ \                                              
325         typeof(*(_ptr)) *___op = (_oldp), ___o    
326         ___r = raw_cmpxchg128((_ptr), ___o, (_    
327         if (unlikely(___r != ___o)) \             
328                 *___op = ___r; \                  
329         likely(___r == ___o); \                   
330 })                                                
331 #endif                                            
332                                                   
333 #if defined(arch_try_cmpxchg128_acquire)          
334 #define raw_try_cmpxchg128_acquire arch_try_cm    
335 #elif defined(arch_try_cmpxchg128_relaxed)        
336 #define raw_try_cmpxchg128_acquire(...) \         
337         __atomic_op_acquire(arch_try_cmpxchg12    
338 #elif defined(arch_try_cmpxchg128)                
339 #define raw_try_cmpxchg128_acquire arch_try_cm    
340 #else                                             
341 #define raw_try_cmpxchg128_acquire(_ptr, _oldp    
342 ({ \                                              
343         typeof(*(_ptr)) *___op = (_oldp), ___o    
344         ___r = raw_cmpxchg128_acquire((_ptr),     
345         if (unlikely(___r != ___o)) \             
346                 *___op = ___r; \                  
347         likely(___r == ___o); \                   
348 })                                                
349 #endif                                            
350                                                   
351 #if defined(arch_try_cmpxchg128_release)          
352 #define raw_try_cmpxchg128_release arch_try_cm    
353 #elif defined(arch_try_cmpxchg128_relaxed)        
354 #define raw_try_cmpxchg128_release(...) \         
355         __atomic_op_release(arch_try_cmpxchg12    
356 #elif defined(arch_try_cmpxchg128)                
357 #define raw_try_cmpxchg128_release arch_try_cm    
358 #else                                             
359 #define raw_try_cmpxchg128_release(_ptr, _oldp    
360 ({ \                                              
361         typeof(*(_ptr)) *___op = (_oldp), ___o    
362         ___r = raw_cmpxchg128_release((_ptr),     
363         if (unlikely(___r != ___o)) \             
364                 *___op = ___r; \                  
365         likely(___r == ___o); \                   
366 })                                                
367 #endif                                            
368                                                   
369 #if defined(arch_try_cmpxchg128_relaxed)          
370 #define raw_try_cmpxchg128_relaxed arch_try_cm    
371 #elif defined(arch_try_cmpxchg128)                
372 #define raw_try_cmpxchg128_relaxed arch_try_cm    
373 #else                                             
374 #define raw_try_cmpxchg128_relaxed(_ptr, _oldp    
375 ({ \                                              
376         typeof(*(_ptr)) *___op = (_oldp), ___o    
377         ___r = raw_cmpxchg128_relaxed((_ptr),     
378         if (unlikely(___r != ___o)) \             
379                 *___op = ___r; \                  
380         likely(___r == ___o); \                   
381 })                                                
382 #endif                                            
383                                                   
384 #define raw_cmpxchg_local arch_cmpxchg_local      
385                                                   
386 #ifdef arch_try_cmpxchg_local                     
387 #define raw_try_cmpxchg_local arch_try_cmpxchg    
388 #else                                             
389 #define raw_try_cmpxchg_local(_ptr, _oldp, _ne    
390 ({ \                                              
391         typeof(*(_ptr)) *___op = (_oldp), ___o    
392         ___r = raw_cmpxchg_local((_ptr), ___o,    
393         if (unlikely(___r != ___o)) \             
394                 *___op = ___r; \                  
395         likely(___r == ___o); \                   
396 })                                                
397 #endif                                            
398                                                   
399 #define raw_cmpxchg64_local arch_cmpxchg64_loc    
400                                                   
401 #ifdef arch_try_cmpxchg64_local                   
402 #define raw_try_cmpxchg64_local arch_try_cmpxc    
403 #else                                             
404 #define raw_try_cmpxchg64_local(_ptr, _oldp, _    
405 ({ \                                              
406         typeof(*(_ptr)) *___op = (_oldp), ___o    
407         ___r = raw_cmpxchg64_local((_ptr), ___    
408         if (unlikely(___r != ___o)) \             
409                 *___op = ___r; \                  
410         likely(___r == ___o); \                   
411 })                                                
412 #endif                                            
413                                                   
414 #define raw_cmpxchg128_local arch_cmpxchg128_l    
415                                                   
416 #ifdef arch_try_cmpxchg128_local                  
417 #define raw_try_cmpxchg128_local arch_try_cmpx    
418 #else                                             
419 #define raw_try_cmpxchg128_local(_ptr, _oldp,     
420 ({ \                                              
421         typeof(*(_ptr)) *___op = (_oldp), ___o    
422         ___r = raw_cmpxchg128_local((_ptr), __    
423         if (unlikely(___r != ___o)) \             
424                 *___op = ___r; \                  
425         likely(___r == ___o); \                   
426 })                                                
427 #endif                                            
428                                                   
429 #define raw_sync_cmpxchg arch_sync_cmpxchg        
430                                                   
431 #ifdef arch_sync_try_cmpxchg                      
432 #define raw_sync_try_cmpxchg arch_sync_try_cmp    
433 #else                                             
434 #define raw_sync_try_cmpxchg(_ptr, _oldp, _new    
435 ({ \                                              
436         typeof(*(_ptr)) *___op = (_oldp), ___o    
437         ___r = raw_sync_cmpxchg((_ptr), ___o,     
438         if (unlikely(___r != ___o)) \             
439                 *___op = ___r; \                  
440         likely(___r == ___o); \                   
441 })                                                
442 #endif                                            
443                                                   
444 /**                                               
445  * raw_atomic_read() - atomic load with relaxe    
446  * @v: pointer to atomic_t                        
447  *                                                
448  * Atomically loads the value of @v with relax    
449  *                                                
450  * Safe to use in noinstr code; prefer atomic_    
451  *                                                
452  * Return: The value loaded from @v.              
453  */                                               
454 static __always_inline int                        
455 raw_atomic_read(const atomic_t *v)                
456 {                                                 
457         return arch_atomic_read(v);               
458 }                                                 
459                                                   
460 /**                                               
461  * raw_atomic_read_acquire() - atomic load wit    
462  * @v: pointer to atomic_t                        
463  *                                                
464  * Atomically loads the value of @v with acqui    
465  *                                                
466  * Safe to use in noinstr code; prefer atomic_    
467  *                                                
468  * Return: The value loaded from @v.              
469  */                                               
470 static __always_inline int                        
471 raw_atomic_read_acquire(const atomic_t *v)        
472 {                                                 
473 #if defined(arch_atomic_read_acquire)             
474         return arch_atomic_read_acquire(v);       
475 #else                                             
476         int ret;                                  
477                                                   
478         if (__native_word(atomic_t)) {            
479                 ret = smp_load_acquire(&(v)->c    
480         } else {                                  
481                 ret = raw_atomic_read(v);         
482                 __atomic_acquire_fence();         
483         }                                         
484                                                   
485         return ret;                               
486 #endif                                            
487 }                                                 
488                                                   
489 /**                                               
490  * raw_atomic_set() - atomic set with relaxed     
491  * @v: pointer to atomic_t                        
492  * @i: int value to assign                        
493  *                                                
494  * Atomically sets @v to @i with relaxed order    
495  *                                                
496  * Safe to use in noinstr code; prefer atomic_    
497  *                                                
498  * Return: Nothing.                               
499  */                                               
500 static __always_inline void                       
501 raw_atomic_set(atomic_t *v, int i)                
502 {                                                 
503         arch_atomic_set(v, i);                    
504 }                                                 
505                                                   
506 /**                                               
507  * raw_atomic_set_release() - atomic set with     
508  * @v: pointer to atomic_t                        
509  * @i: int value to assign                        
510  *                                                
511  * Atomically sets @v to @i with release order    
512  *                                                
513  * Safe to use in noinstr code; prefer atomic_    
514  *                                                
515  * Return: Nothing.                               
516  */                                               
517 static __always_inline void                       
518 raw_atomic_set_release(atomic_t *v, int i)        
519 {                                                 
520 #if defined(arch_atomic_set_release)              
521         arch_atomic_set_release(v, i);            
522 #else                                             
523         if (__native_word(atomic_t)) {            
524                 smp_store_release(&(v)->counte    
525         } else {                                  
526                 __atomic_release_fence();         
527                 raw_atomic_set(v, i);             
528         }                                         
529 #endif                                            
530 }                                                 
531                                                   
532 /**                                               
533  * raw_atomic_add() - atomic add with relaxed     
534  * @i: int value to add                           
535  * @v: pointer to atomic_t                        
536  *                                                
537  * Atomically updates @v to (@v + @i) with rel    
538  *                                                
539  * Safe to use in noinstr code; prefer atomic_    
540  *                                                
541  * Return: Nothing.                               
542  */                                               
543 static __always_inline void                       
544 raw_atomic_add(int i, atomic_t *v)                
545 {                                                 
546         arch_atomic_add(i, v);                    
547 }                                                 
548                                                   
549 /**                                               
550  * raw_atomic_add_return() - atomic add with f    
551  * @i: int value to add                           
552  * @v: pointer to atomic_t                        
553  *                                                
554  * Atomically updates @v to (@v + @i) with ful    
555  *                                                
556  * Safe to use in noinstr code; prefer atomic_    
557  *                                                
558  * Return: The updated value of @v.               
559  */                                               
560 static __always_inline int                        
561 raw_atomic_add_return(int i, atomic_t *v)         
562 {                                                 
563 #if defined(arch_atomic_add_return)               
564         return arch_atomic_add_return(i, v);      
565 #elif defined(arch_atomic_add_return_relaxed)     
566         int ret;                                  
567         __atomic_pre_full_fence();                
568         ret = arch_atomic_add_return_relaxed(i    
569         __atomic_post_full_fence();               
570         return ret;                               
571 #else                                             
572 #error "Unable to define raw_atomic_add_return    
573 #endif                                            
574 }                                                 
575                                                   
576 /**                                               
577  * raw_atomic_add_return_acquire() - atomic ad    
578  * @i: int value to add                           
579  * @v: pointer to atomic_t                        
580  *                                                
581  * Atomically updates @v to (@v + @i) with acq    
582  *                                                
583  * Safe to use in noinstr code; prefer atomic_    
584  *                                                
585  * Return: The updated value of @v.               
586  */                                               
587 static __always_inline int                        
588 raw_atomic_add_return_acquire(int i, atomic_t     
589 {                                                 
590 #if defined(arch_atomic_add_return_acquire)       
591         return arch_atomic_add_return_acquire(    
592 #elif defined(arch_atomic_add_return_relaxed)     
593         int ret = arch_atomic_add_return_relax    
594         __atomic_acquire_fence();                 
595         return ret;                               
596 #elif defined(arch_atomic_add_return)             
597         return arch_atomic_add_return(i, v);      
598 #else                                             
599 #error "Unable to define raw_atomic_add_return    
600 #endif                                            
601 }                                                 
602                                                   
603 /**                                               
604  * raw_atomic_add_return_release() - atomic ad    
605  * @i: int value to add                           
606  * @v: pointer to atomic_t                        
607  *                                                
608  * Atomically updates @v to (@v + @i) with rel    
609  *                                                
610  * Safe to use in noinstr code; prefer atomic_    
611  *                                                
612  * Return: The updated value of @v.               
613  */                                               
614 static __always_inline int                        
615 raw_atomic_add_return_release(int i, atomic_t     
616 {                                                 
617 #if defined(arch_atomic_add_return_release)       
618         return arch_atomic_add_return_release(    
619 #elif defined(arch_atomic_add_return_relaxed)     
620         __atomic_release_fence();                 
621         return arch_atomic_add_return_relaxed(    
622 #elif defined(arch_atomic_add_return)             
623         return arch_atomic_add_return(i, v);      
624 #else                                             
625 #error "Unable to define raw_atomic_add_return    
626 #endif                                            
627 }                                                 
628                                                   
629 /**                                               
630  * raw_atomic_add_return_relaxed() - atomic ad    
631  * @i: int value to add                           
632  * @v: pointer to atomic_t                        
633  *                                                
634  * Atomically updates @v to (@v + @i) with rel    
635  *                                                
636  * Safe to use in noinstr code; prefer atomic_    
637  *                                                
638  * Return: The updated value of @v.               
639  */                                               
640 static __always_inline int                        
641 raw_atomic_add_return_relaxed(int i, atomic_t     
642 {                                                 
643 #if defined(arch_atomic_add_return_relaxed)       
644         return arch_atomic_add_return_relaxed(    
645 #elif defined(arch_atomic_add_return)             
646         return arch_atomic_add_return(i, v);      
647 #else                                             
648 #error "Unable to define raw_atomic_add_return    
649 #endif                                            
650 }                                                 
651                                                   
652 /**                                               
653  * raw_atomic_fetch_add() - atomic add with fu    
654  * @i: int value to add                           
655  * @v: pointer to atomic_t                        
656  *                                                
657  * Atomically updates @v to (@v + @i) with ful    
658  *                                                
659  * Safe to use in noinstr code; prefer atomic_    
660  *                                                
661  * Return: The original value of @v.              
662  */                                               
663 static __always_inline int                        
664 raw_atomic_fetch_add(int i, atomic_t *v)          
665 {                                                 
666 #if defined(arch_atomic_fetch_add)                
667         return arch_atomic_fetch_add(i, v);       
668 #elif defined(arch_atomic_fetch_add_relaxed)      
669         int ret;                                  
670         __atomic_pre_full_fence();                
671         ret = arch_atomic_fetch_add_relaxed(i,    
672         __atomic_post_full_fence();               
673         return ret;                               
674 #else                                             
675 #error "Unable to define raw_atomic_fetch_add"    
676 #endif                                            
677 }                                                 
678                                                   
679 /**                                               
680  * raw_atomic_fetch_add_acquire() - atomic add    
681  * @i: int value to add                           
682  * @v: pointer to atomic_t                        
683  *                                                
684  * Atomically updates @v to (@v + @i) with acq    
685  *                                                
686  * Safe to use in noinstr code; prefer atomic_    
687  *                                                
688  * Return: The original value of @v.              
689  */                                               
690 static __always_inline int                        
691 raw_atomic_fetch_add_acquire(int i, atomic_t *    
692 {                                                 
693 #if defined(arch_atomic_fetch_add_acquire)        
694         return arch_atomic_fetch_add_acquire(i    
695 #elif defined(arch_atomic_fetch_add_relaxed)      
696         int ret = arch_atomic_fetch_add_relaxe    
697         __atomic_acquire_fence();                 
698         return ret;                               
699 #elif defined(arch_atomic_fetch_add)              
700         return arch_atomic_fetch_add(i, v);       
701 #else                                             
702 #error "Unable to define raw_atomic_fetch_add_    
703 #endif                                            
704 }                                                 
705                                                   
706 /**                                               
707  * raw_atomic_fetch_add_release() - atomic add    
708  * @i: int value to add                           
709  * @v: pointer to atomic_t                        
710  *                                                
711  * Atomically updates @v to (@v + @i) with rel    
712  *                                                
713  * Safe to use in noinstr code; prefer atomic_    
714  *                                                
715  * Return: The original value of @v.              
716  */                                               
717 static __always_inline int                        
718 raw_atomic_fetch_add_release(int i, atomic_t *    
719 {                                                 
720 #if defined(arch_atomic_fetch_add_release)        
721         return arch_atomic_fetch_add_release(i    
722 #elif defined(arch_atomic_fetch_add_relaxed)      
723         __atomic_release_fence();                 
724         return arch_atomic_fetch_add_relaxed(i    
725 #elif defined(arch_atomic_fetch_add)              
726         return arch_atomic_fetch_add(i, v);       
727 #else                                             
728 #error "Unable to define raw_atomic_fetch_add_    
729 #endif                                            
730 }                                                 
731                                                   
732 /**                                               
733  * raw_atomic_fetch_add_relaxed() - atomic add    
734  * @i: int value to add                           
735  * @v: pointer to atomic_t                        
736  *                                                
737  * Atomically updates @v to (@v + @i) with rel    
738  *                                                
739  * Safe to use in noinstr code; prefer atomic_    
740  *                                                
741  * Return: The original value of @v.              
742  */                                               
743 static __always_inline int                        
744 raw_atomic_fetch_add_relaxed(int i, atomic_t *    
745 {                                                 
746 #if defined(arch_atomic_fetch_add_relaxed)        
747         return arch_atomic_fetch_add_relaxed(i    
748 #elif defined(arch_atomic_fetch_add)              
749         return arch_atomic_fetch_add(i, v);       
750 #else                                             
751 #error "Unable to define raw_atomic_fetch_add_    
752 #endif                                            
753 }                                                 
754                                                   
755 /**                                               
756  * raw_atomic_sub() - atomic subtract with rel    
757  * @i: int value to subtract                      
758  * @v: pointer to atomic_t                        
759  *                                                
760  * Atomically updates @v to (@v - @i) with rel    
761  *                                                
762  * Safe to use in noinstr code; prefer atomic_    
763  *                                                
764  * Return: Nothing.                               
765  */                                               
766 static __always_inline void                       
767 raw_atomic_sub(int i, atomic_t *v)                
768 {                                                 
769         arch_atomic_sub(i, v);                    
770 }                                                 
771                                                   
772 /**                                               
773  * raw_atomic_sub_return() - atomic subtract w    
774  * @i: int value to subtract                      
775  * @v: pointer to atomic_t                        
776  *                                                
777  * Atomically updates @v to (@v - @i) with ful    
778  *                                                
779  * Safe to use in noinstr code; prefer atomic_    
780  *                                                
781  * Return: The updated value of @v.               
782  */                                               
783 static __always_inline int                        
784 raw_atomic_sub_return(int i, atomic_t *v)         
785 {                                                 
786 #if defined(arch_atomic_sub_return)               
787         return arch_atomic_sub_return(i, v);      
788 #elif defined(arch_atomic_sub_return_relaxed)     
789         int ret;                                  
790         __atomic_pre_full_fence();                
791         ret = arch_atomic_sub_return_relaxed(i    
792         __atomic_post_full_fence();               
793         return ret;                               
794 #else                                             
795 #error "Unable to define raw_atomic_sub_return    
796 #endif                                            
797 }                                                 
798                                                   
799 /**                                               
800  * raw_atomic_sub_return_acquire() - atomic su    
801  * @i: int value to subtract                      
802  * @v: pointer to atomic_t                        
803  *                                                
804  * Atomically updates @v to (@v - @i) with acq    
805  *                                                
806  * Safe to use in noinstr code; prefer atomic_    
807  *                                                
808  * Return: The updated value of @v.               
809  */                                               
810 static __always_inline int                        
811 raw_atomic_sub_return_acquire(int i, atomic_t     
812 {                                                 
813 #if defined(arch_atomic_sub_return_acquire)       
814         return arch_atomic_sub_return_acquire(    
815 #elif defined(arch_atomic_sub_return_relaxed)     
816         int ret = arch_atomic_sub_return_relax    
817         __atomic_acquire_fence();                 
818         return ret;                               
819 #elif defined(arch_atomic_sub_return)             
820         return arch_atomic_sub_return(i, v);      
821 #else                                             
822 #error "Unable to define raw_atomic_sub_return    
823 #endif                                            
824 }                                                 
825                                                   
826 /**                                               
827  * raw_atomic_sub_return_release() - atomic su    
828  * @i: int value to subtract                      
829  * @v: pointer to atomic_t                        
830  *                                                
831  * Atomically updates @v to (@v - @i) with rel    
832  *                                                
833  * Safe to use in noinstr code; prefer atomic_    
834  *                                                
835  * Return: The updated value of @v.               
836  */                                               
837 static __always_inline int                        
838 raw_atomic_sub_return_release(int i, atomic_t     
839 {                                                 
840 #if defined(arch_atomic_sub_return_release)       
841         return arch_atomic_sub_return_release(    
842 #elif defined(arch_atomic_sub_return_relaxed)     
843         __atomic_release_fence();                 
844         return arch_atomic_sub_return_relaxed(    
845 #elif defined(arch_atomic_sub_return)             
846         return arch_atomic_sub_return(i, v);      
847 #else                                             
848 #error "Unable to define raw_atomic_sub_return    
849 #endif                                            
850 }                                                 
851                                                   
852 /**                                               
853  * raw_atomic_sub_return_relaxed() - atomic su    
854  * @i: int value to subtract                      
855  * @v: pointer to atomic_t                        
856  *                                                
857  * Atomically updates @v to (@v - @i) with rel    
858  *                                                
859  * Safe to use in noinstr code; prefer atomic_    
860  *                                                
861  * Return: The updated value of @v.               
862  */                                               
863 static __always_inline int                        
864 raw_atomic_sub_return_relaxed(int i, atomic_t     
865 {                                                 
866 #if defined(arch_atomic_sub_return_relaxed)       
867         return arch_atomic_sub_return_relaxed(    
868 #elif defined(arch_atomic_sub_return)             
869         return arch_atomic_sub_return(i, v);      
870 #else                                             
871 #error "Unable to define raw_atomic_sub_return    
872 #endif                                            
873 }                                                 
874                                                   
875 /**                                               
876  * raw_atomic_fetch_sub() - atomic subtract wi    
877  * @i: int value to subtract                      
878  * @v: pointer to atomic_t                        
879  *                                                
880  * Atomically updates @v to (@v - @i) with ful    
881  *                                                
882  * Safe to use in noinstr code; prefer atomic_    
883  *                                                
884  * Return: The original value of @v.              
885  */                                               
886 static __always_inline int                        
887 raw_atomic_fetch_sub(int i, atomic_t *v)          
888 {                                                 
889 #if defined(arch_atomic_fetch_sub)                
890         return arch_atomic_fetch_sub(i, v);       
891 #elif defined(arch_atomic_fetch_sub_relaxed)      
892         int ret;                                  
893         __atomic_pre_full_fence();                
894         ret = arch_atomic_fetch_sub_relaxed(i,    
895         __atomic_post_full_fence();               
896         return ret;                               
897 #else                                             
898 #error "Unable to define raw_atomic_fetch_sub"    
899 #endif                                            
900 }                                                 
901                                                   
902 /**                                               
903  * raw_atomic_fetch_sub_acquire() - atomic sub    
904  * @i: int value to subtract                      
905  * @v: pointer to atomic_t                        
906  *                                                
907  * Atomically updates @v to (@v - @i) with acq    
908  *                                                
909  * Safe to use in noinstr code; prefer atomic_    
910  *                                                
911  * Return: The original value of @v.              
912  */                                               
913 static __always_inline int                        
914 raw_atomic_fetch_sub_acquire(int i, atomic_t *    
915 {                                                 
916 #if defined(arch_atomic_fetch_sub_acquire)        
917         return arch_atomic_fetch_sub_acquire(i    
918 #elif defined(arch_atomic_fetch_sub_relaxed)      
919         int ret = arch_atomic_fetch_sub_relaxe    
920         __atomic_acquire_fence();                 
921         return ret;                               
922 #elif defined(arch_atomic_fetch_sub)              
923         return arch_atomic_fetch_sub(i, v);       
924 #else                                             
925 #error "Unable to define raw_atomic_fetch_sub_    
926 #endif                                            
927 }                                                 
928                                                   
929 /**                                               
930  * raw_atomic_fetch_sub_release() - atomic sub    
931  * @i: int value to subtract                      
932  * @v: pointer to atomic_t                        
933  *                                                
934  * Atomically updates @v to (@v - @i) with rel    
935  *                                                
936  * Safe to use in noinstr code; prefer atomic_    
937  *                                                
938  * Return: The original value of @v.              
939  */                                               
940 static __always_inline int                        
941 raw_atomic_fetch_sub_release(int i, atomic_t *    
942 {                                                 
943 #if defined(arch_atomic_fetch_sub_release)        
944         return arch_atomic_fetch_sub_release(i    
945 #elif defined(arch_atomic_fetch_sub_relaxed)      
946         __atomic_release_fence();                 
947         return arch_atomic_fetch_sub_relaxed(i    
948 #elif defined(arch_atomic_fetch_sub)              
949         return arch_atomic_fetch_sub(i, v);       
950 #else                                             
951 #error "Unable to define raw_atomic_fetch_sub_    
952 #endif                                            
953 }                                                 
954                                                   
955 /**                                               
956  * raw_atomic_fetch_sub_relaxed() - atomic sub    
957  * @i: int value to subtract                      
958  * @v: pointer to atomic_t                        
959  *                                                
960  * Atomically updates @v to (@v - @i) with rel    
961  *                                                
962  * Safe to use in noinstr code; prefer atomic_    
963  *                                                
964  * Return: The original value of @v.              
965  */                                               
966 static __always_inline int                        
967 raw_atomic_fetch_sub_relaxed(int i, atomic_t *    
968 {                                                 
969 #if defined(arch_atomic_fetch_sub_relaxed)        
970         return arch_atomic_fetch_sub_relaxed(i    
971 #elif defined(arch_atomic_fetch_sub)              
972         return arch_atomic_fetch_sub(i, v);       
973 #else                                             
974 #error "Unable to define raw_atomic_fetch_sub_    
975 #endif                                            
976 }                                                 
977                                                   
978 /**                                               
979  * raw_atomic_inc() - atomic increment with re    
980  * @v: pointer to atomic_t                        
981  *                                                
982  * Atomically updates @v to (@v + 1) with rela    
983  *                                                
984  * Safe to use in noinstr code; prefer atomic_    
985  *                                                
986  * Return: Nothing.                               
987  */                                               
988 static __always_inline void                       
989 raw_atomic_inc(atomic_t *v)                       
990 {                                                 
991 #if defined(arch_atomic_inc)                      
992         arch_atomic_inc(v);                       
993 #else                                             
994         raw_atomic_add(1, v);                     
995 #endif                                            
996 }                                                 
997                                                   
998 /**                                               
999  * raw_atomic_inc_return() - atomic increment     
1000  * @v: pointer to atomic_t                       
1001  *                                               
1002  * Atomically updates @v to (@v + 1) with ful    
1003  *                                               
1004  * Safe to use in noinstr code; prefer atomic    
1005  *                                               
1006  * Return: The updated value of @v.              
1007  */                                              
1008 static __always_inline int                       
1009 raw_atomic_inc_return(atomic_t *v)               
1010 {                                                
1011 #if defined(arch_atomic_inc_return)              
1012         return arch_atomic_inc_return(v);        
1013 #elif defined(arch_atomic_inc_return_relaxed)    
1014         int ret;                                 
1015         __atomic_pre_full_fence();               
1016         ret = arch_atomic_inc_return_relaxed(    
1017         __atomic_post_full_fence();              
1018         return ret;                              
1019 #else                                            
1020         return raw_atomic_add_return(1, v);      
1021 #endif                                           
1022 }                                                
1023                                                  
1024 /**                                              
1025  * raw_atomic_inc_return_acquire() - atomic i    
1026  * @v: pointer to atomic_t                       
1027  *                                               
1028  * Atomically updates @v to (@v + 1) with acq    
1029  *                                               
1030  * Safe to use in noinstr code; prefer atomic    
1031  *                                               
1032  * Return: The updated value of @v.              
1033  */                                              
1034 static __always_inline int                       
1035 raw_atomic_inc_return_acquire(atomic_t *v)       
1036 {                                                
1037 #if defined(arch_atomic_inc_return_acquire)      
1038         return arch_atomic_inc_return_acquire    
1039 #elif defined(arch_atomic_inc_return_relaxed)    
1040         int ret = arch_atomic_inc_return_rela    
1041         __atomic_acquire_fence();                
1042         return ret;                              
1043 #elif defined(arch_atomic_inc_return)            
1044         return arch_atomic_inc_return(v);        
1045 #else                                            
1046         return raw_atomic_add_return_acquire(    
1047 #endif                                           
1048 }                                                
1049                                                  
1050 /**                                              
1051  * raw_atomic_inc_return_release() - atomic i    
1052  * @v: pointer to atomic_t                       
1053  *                                               
1054  * Atomically updates @v to (@v + 1) with rel    
1055  *                                               
1056  * Safe to use in noinstr code; prefer atomic    
1057  *                                               
1058  * Return: The updated value of @v.              
1059  */                                              
1060 static __always_inline int                       
1061 raw_atomic_inc_return_release(atomic_t *v)       
1062 {                                                
1063 #if defined(arch_atomic_inc_return_release)      
1064         return arch_atomic_inc_return_release    
1065 #elif defined(arch_atomic_inc_return_relaxed)    
1066         __atomic_release_fence();                
1067         return arch_atomic_inc_return_relaxed    
1068 #elif defined(arch_atomic_inc_return)            
1069         return arch_atomic_inc_return(v);        
1070 #else                                            
1071         return raw_atomic_add_return_release(    
1072 #endif                                           
1073 }                                                
1074                                                  
1075 /**                                              
1076  * raw_atomic_inc_return_relaxed() - atomic i    
1077  * @v: pointer to atomic_t                       
1078  *                                               
1079  * Atomically updates @v to (@v + 1) with rel    
1080  *                                               
1081  * Safe to use in noinstr code; prefer atomic    
1082  *                                               
1083  * Return: The updated value of @v.              
1084  */                                              
1085 static __always_inline int                       
1086 raw_atomic_inc_return_relaxed(atomic_t *v)       
1087 {                                                
1088 #if defined(arch_atomic_inc_return_relaxed)      
1089         return arch_atomic_inc_return_relaxed    
1090 #elif defined(arch_atomic_inc_return)            
1091         return arch_atomic_inc_return(v);        
1092 #else                                            
1093         return raw_atomic_add_return_relaxed(    
1094 #endif                                           
1095 }                                                
1096                                                  
1097 /**                                              
1098  * raw_atomic_fetch_inc() - atomic increment     
1099  * @v: pointer to atomic_t                       
1100  *                                               
1101  * Atomically updates @v to (@v + 1) with ful    
1102  *                                               
1103  * Safe to use in noinstr code; prefer atomic    
1104  *                                               
1105  * Return: The original value of @v.             
1106  */                                              
1107 static __always_inline int                       
1108 raw_atomic_fetch_inc(atomic_t *v)                
1109 {                                                
1110 #if defined(arch_atomic_fetch_inc)               
1111         return arch_atomic_fetch_inc(v);         
1112 #elif defined(arch_atomic_fetch_inc_relaxed)     
1113         int ret;                                 
1114         __atomic_pre_full_fence();               
1115         ret = arch_atomic_fetch_inc_relaxed(v    
1116         __atomic_post_full_fence();              
1117         return ret;                              
1118 #else                                            
1119         return raw_atomic_fetch_add(1, v);       
1120 #endif                                           
1121 }                                                
1122                                                  
1123 /**                                              
1124  * raw_atomic_fetch_inc_acquire() - atomic in    
1125  * @v: pointer to atomic_t                       
1126  *                                               
1127  * Atomically updates @v to (@v + 1) with acq    
1128  *                                               
1129  * Safe to use in noinstr code; prefer atomic    
1130  *                                               
1131  * Return: The original value of @v.             
1132  */                                              
1133 static __always_inline int                       
1134 raw_atomic_fetch_inc_acquire(atomic_t *v)        
1135 {                                                
1136 #if defined(arch_atomic_fetch_inc_acquire)       
1137         return arch_atomic_fetch_inc_acquire(    
1138 #elif defined(arch_atomic_fetch_inc_relaxed)     
1139         int ret = arch_atomic_fetch_inc_relax    
1140         __atomic_acquire_fence();                
1141         return ret;                              
1142 #elif defined(arch_atomic_fetch_inc)             
1143         return arch_atomic_fetch_inc(v);         
1144 #else                                            
1145         return raw_atomic_fetch_add_acquire(1    
1146 #endif                                           
1147 }                                                
1148                                                  
1149 /**                                              
1150  * raw_atomic_fetch_inc_release() - atomic in    
1151  * @v: pointer to atomic_t                       
1152  *                                               
1153  * Atomically updates @v to (@v + 1) with rel    
1154  *                                               
1155  * Safe to use in noinstr code; prefer atomic    
1156  *                                               
1157  * Return: The original value of @v.             
1158  */                                              
1159 static __always_inline int                       
1160 raw_atomic_fetch_inc_release(atomic_t *v)        
1161 {                                                
1162 #if defined(arch_atomic_fetch_inc_release)       
1163         return arch_atomic_fetch_inc_release(    
1164 #elif defined(arch_atomic_fetch_inc_relaxed)     
1165         __atomic_release_fence();                
1166         return arch_atomic_fetch_inc_relaxed(    
1167 #elif defined(arch_atomic_fetch_inc)             
1168         return arch_atomic_fetch_inc(v);         
1169 #else                                            
1170         return raw_atomic_fetch_add_release(1    
1171 #endif                                           
1172 }                                                
1173                                                  
1174 /**                                              
1175  * raw_atomic_fetch_inc_relaxed() - atomic in    
1176  * @v: pointer to atomic_t                       
1177  *                                               
1178  * Atomically updates @v to (@v + 1) with rel    
1179  *                                               
1180  * Safe to use in noinstr code; prefer atomic    
1181  *                                               
1182  * Return: The original value of @v.             
1183  */                                              
1184 static __always_inline int                       
1185 raw_atomic_fetch_inc_relaxed(atomic_t *v)        
1186 {                                                
1187 #if defined(arch_atomic_fetch_inc_relaxed)       
1188         return arch_atomic_fetch_inc_relaxed(    
1189 #elif defined(arch_atomic_fetch_inc)             
1190         return arch_atomic_fetch_inc(v);         
1191 #else                                            
1192         return raw_atomic_fetch_add_relaxed(1    
1193 #endif                                           
1194 }                                                
1195                                                  
1196 /**                                              
1197  * raw_atomic_dec() - atomic decrement with r    
1198  * @v: pointer to atomic_t                       
1199  *                                               
1200  * Atomically updates @v to (@v - 1) with rel    
1201  *                                               
1202  * Safe to use in noinstr code; prefer atomic    
1203  *                                               
1204  * Return: Nothing.                              
1205  */                                              
1206 static __always_inline void                      
1207 raw_atomic_dec(atomic_t *v)                      
1208 {                                                
1209 #if defined(arch_atomic_dec)                     
1210         arch_atomic_dec(v);                      
1211 #else                                            
1212         raw_atomic_sub(1, v);                    
1213 #endif                                           
1214 }                                                
1215                                                  
1216 /**                                              
1217  * raw_atomic_dec_return() - atomic decrement    
1218  * @v: pointer to atomic_t                       
1219  *                                               
1220  * Atomically updates @v to (@v - 1) with ful    
1221  *                                               
1222  * Safe to use in noinstr code; prefer atomic    
1223  *                                               
1224  * Return: The updated value of @v.              
1225  */                                              
1226 static __always_inline int                       
1227 raw_atomic_dec_return(atomic_t *v)               
1228 {                                                
1229 #if defined(arch_atomic_dec_return)              
1230         return arch_atomic_dec_return(v);        
1231 #elif defined(arch_atomic_dec_return_relaxed)    
1232         int ret;                                 
1233         __atomic_pre_full_fence();               
1234         ret = arch_atomic_dec_return_relaxed(    
1235         __atomic_post_full_fence();              
1236         return ret;                              
1237 #else                                            
1238         return raw_atomic_sub_return(1, v);      
1239 #endif                                           
1240 }                                                
1241                                                  
1242 /**                                              
1243  * raw_atomic_dec_return_acquire() - atomic d    
1244  * @v: pointer to atomic_t                       
1245  *                                               
1246  * Atomically updates @v to (@v - 1) with acq    
1247  *                                               
1248  * Safe to use in noinstr code; prefer atomic    
1249  *                                               
1250  * Return: The updated value of @v.              
1251  */                                              
1252 static __always_inline int                       
1253 raw_atomic_dec_return_acquire(atomic_t *v)       
1254 {                                                
1255 #if defined(arch_atomic_dec_return_acquire)      
1256         return arch_atomic_dec_return_acquire    
1257 #elif defined(arch_atomic_dec_return_relaxed)    
1258         int ret = arch_atomic_dec_return_rela    
1259         __atomic_acquire_fence();                
1260         return ret;                              
1261 #elif defined(arch_atomic_dec_return)            
1262         return arch_atomic_dec_return(v);        
1263 #else                                            
1264         return raw_atomic_sub_return_acquire(    
1265 #endif                                           
1266 }                                                
1267                                                  
1268 /**                                              
1269  * raw_atomic_dec_return_release() - atomic d    
1270  * @v: pointer to atomic_t                       
1271  *                                               
1272  * Atomically updates @v to (@v - 1) with rel    
1273  *                                               
1274  * Safe to use in noinstr code; prefer atomic    
1275  *                                               
1276  * Return: The updated value of @v.              
1277  */                                              
1278 static __always_inline int                       
1279 raw_atomic_dec_return_release(atomic_t *v)       
1280 {                                                
1281 #if defined(arch_atomic_dec_return_release)      
1282         return arch_atomic_dec_return_release    
1283 #elif defined(arch_atomic_dec_return_relaxed)    
1284         __atomic_release_fence();                
1285         return arch_atomic_dec_return_relaxed    
1286 #elif defined(arch_atomic_dec_return)            
1287         return arch_atomic_dec_return(v);        
1288 #else                                            
1289         return raw_atomic_sub_return_release(    
1290 #endif                                           
1291 }                                                
1292                                                  
1293 /**                                              
1294  * raw_atomic_dec_return_relaxed() - atomic d    
1295  * @v: pointer to atomic_t                       
1296  *                                               
1297  * Atomically updates @v to (@v - 1) with rel    
1298  *                                               
1299  * Safe to use in noinstr code; prefer atomic    
1300  *                                               
1301  * Return: The updated value of @v.              
1302  */                                              
1303 static __always_inline int                       
1304 raw_atomic_dec_return_relaxed(atomic_t *v)       
1305 {                                                
1306 #if defined(arch_atomic_dec_return_relaxed)      
1307         return arch_atomic_dec_return_relaxed    
1308 #elif defined(arch_atomic_dec_return)            
1309         return arch_atomic_dec_return(v);        
1310 #else                                            
1311         return raw_atomic_sub_return_relaxed(    
1312 #endif                                           
1313 }                                                
1314                                                  
1315 /**                                              
1316  * raw_atomic_fetch_dec() - atomic decrement     
1317  * @v: pointer to atomic_t                       
1318  *                                               
1319  * Atomically updates @v to (@v - 1) with ful    
1320  *                                               
1321  * Safe to use in noinstr code; prefer atomic    
1322  *                                               
1323  * Return: The original value of @v.             
1324  */                                              
1325 static __always_inline int                       
1326 raw_atomic_fetch_dec(atomic_t *v)                
1327 {                                                
1328 #if defined(arch_atomic_fetch_dec)               
1329         return arch_atomic_fetch_dec(v);         
1330 #elif defined(arch_atomic_fetch_dec_relaxed)     
1331         int ret;                                 
1332         __atomic_pre_full_fence();               
1333         ret = arch_atomic_fetch_dec_relaxed(v    
1334         __atomic_post_full_fence();              
1335         return ret;                              
1336 #else                                            
1337         return raw_atomic_fetch_sub(1, v);       
1338 #endif                                           
1339 }                                                
1340                                                  
1341 /**                                              
1342  * raw_atomic_fetch_dec_acquire() - atomic de    
1343  * @v: pointer to atomic_t                       
1344  *                                               
1345  * Atomically updates @v to (@v - 1) with acq    
1346  *                                               
1347  * Safe to use in noinstr code; prefer atomic    
1348  *                                               
1349  * Return: The original value of @v.             
1350  */                                              
1351 static __always_inline int                       
1352 raw_atomic_fetch_dec_acquire(atomic_t *v)        
1353 {                                                
1354 #if defined(arch_atomic_fetch_dec_acquire)       
1355         return arch_atomic_fetch_dec_acquire(    
1356 #elif defined(arch_atomic_fetch_dec_relaxed)     
1357         int ret = arch_atomic_fetch_dec_relax    
1358         __atomic_acquire_fence();                
1359         return ret;                              
1360 #elif defined(arch_atomic_fetch_dec)             
1361         return arch_atomic_fetch_dec(v);         
1362 #else                                            
1363         return raw_atomic_fetch_sub_acquire(1    
1364 #endif                                           
1365 }                                                
1366                                                  
1367 /**                                              
1368  * raw_atomic_fetch_dec_release() - atomic de    
1369  * @v: pointer to atomic_t                       
1370  *                                               
1371  * Atomically updates @v to (@v - 1) with rel    
1372  *                                               
1373  * Safe to use in noinstr code; prefer atomic    
1374  *                                               
1375  * Return: The original value of @v.             
1376  */                                              
1377 static __always_inline int                       
1378 raw_atomic_fetch_dec_release(atomic_t *v)        
1379 {                                                
1380 #if defined(arch_atomic_fetch_dec_release)       
1381         return arch_atomic_fetch_dec_release(    
1382 #elif defined(arch_atomic_fetch_dec_relaxed)     
1383         __atomic_release_fence();                
1384         return arch_atomic_fetch_dec_relaxed(    
1385 #elif defined(arch_atomic_fetch_dec)             
1386         return arch_atomic_fetch_dec(v);         
1387 #else                                            
1388         return raw_atomic_fetch_sub_release(1    
1389 #endif                                           
1390 }                                                
1391                                                  
1392 /**                                              
1393  * raw_atomic_fetch_dec_relaxed() - atomic de    
1394  * @v: pointer to atomic_t                       
1395  *                                               
1396  * Atomically updates @v to (@v - 1) with rel    
1397  *                                               
1398  * Safe to use in noinstr code; prefer atomic    
1399  *                                               
1400  * Return: The original value of @v.             
1401  */                                              
1402 static __always_inline int                       
1403 raw_atomic_fetch_dec_relaxed(atomic_t *v)        
1404 {                                                
1405 #if defined(arch_atomic_fetch_dec_relaxed)       
1406         return arch_atomic_fetch_dec_relaxed(    
1407 #elif defined(arch_atomic_fetch_dec)             
1408         return arch_atomic_fetch_dec(v);         
1409 #else                                            
1410         return raw_atomic_fetch_sub_relaxed(1    
1411 #endif                                           
1412 }                                                
1413                                                  
1414 /**                                              
1415  * raw_atomic_and() - atomic bitwise AND with    
1416  * @i: int value                                 
1417  * @v: pointer to atomic_t                       
1418  *                                               
1419  * Atomically updates @v to (@v & @i) with re    
1420  *                                               
1421  * Safe to use in noinstr code; prefer atomic    
1422  *                                               
1423  * Return: Nothing.                              
1424  */                                              
1425 static __always_inline void                      
1426 raw_atomic_and(int i, atomic_t *v)               
1427 {                                                
1428         arch_atomic_and(i, v);                   
1429 }                                                
1430                                                  
1431 /**                                              
1432  * raw_atomic_fetch_and() - atomic bitwise AN    
1433  * @i: int value                                 
1434  * @v: pointer to atomic_t                       
1435  *                                               
1436  * Atomically updates @v to (@v & @i) with fu    
1437  *                                               
1438  * Safe to use in noinstr code; prefer atomic    
1439  *                                               
1440  * Return: The original value of @v.             
1441  */                                              
1442 static __always_inline int                       
1443 raw_atomic_fetch_and(int i, atomic_t *v)         
1444 {                                                
1445 #if defined(arch_atomic_fetch_and)               
1446         return arch_atomic_fetch_and(i, v);      
1447 #elif defined(arch_atomic_fetch_and_relaxed)     
1448         int ret;                                 
1449         __atomic_pre_full_fence();               
1450         ret = arch_atomic_fetch_and_relaxed(i    
1451         __atomic_post_full_fence();              
1452         return ret;                              
1453 #else                                            
1454 #error "Unable to define raw_atomic_fetch_and    
1455 #endif                                           
1456 }                                                
1457                                                  
1458 /**                                              
1459  * raw_atomic_fetch_and_acquire() - atomic bi    
1460  * @i: int value                                 
1461  * @v: pointer to atomic_t                       
1462  *                                               
1463  * Atomically updates @v to (@v & @i) with ac    
1464  *                                               
1465  * Safe to use in noinstr code; prefer atomic    
1466  *                                               
1467  * Return: The original value of @v.             
1468  */                                              
1469 static __always_inline int                       
1470 raw_atomic_fetch_and_acquire(int i, atomic_t     
1471 {                                                
1472 #if defined(arch_atomic_fetch_and_acquire)       
1473         return arch_atomic_fetch_and_acquire(    
1474 #elif defined(arch_atomic_fetch_and_relaxed)     
1475         int ret = arch_atomic_fetch_and_relax    
1476         __atomic_acquire_fence();                
1477         return ret;                              
1478 #elif defined(arch_atomic_fetch_and)             
1479         return arch_atomic_fetch_and(i, v);      
1480 #else                                            
1481 #error "Unable to define raw_atomic_fetch_and    
1482 #endif                                           
1483 }                                                
1484                                                  
1485 /**                                              
1486  * raw_atomic_fetch_and_release() - atomic bi    
1487  * @i: int value                                 
1488  * @v: pointer to atomic_t                       
1489  *                                               
1490  * Atomically updates @v to (@v & @i) with re    
1491  *                                               
1492  * Safe to use in noinstr code; prefer atomic    
1493  *                                               
1494  * Return: The original value of @v.             
1495  */                                              
1496 static __always_inline int                       
1497 raw_atomic_fetch_and_release(int i, atomic_t     
1498 {                                                
1499 #if defined(arch_atomic_fetch_and_release)       
1500         return arch_atomic_fetch_and_release(    
1501 #elif defined(arch_atomic_fetch_and_relaxed)     
1502         __atomic_release_fence();                
1503         return arch_atomic_fetch_and_relaxed(    
1504 #elif defined(arch_atomic_fetch_and)             
1505         return arch_atomic_fetch_and(i, v);      
1506 #else                                            
1507 #error "Unable to define raw_atomic_fetch_and    
1508 #endif                                           
1509 }                                                
1510                                                  
1511 /**                                              
1512  * raw_atomic_fetch_and_relaxed() - atomic bi    
1513  * @i: int value                                 
1514  * @v: pointer to atomic_t                       
1515  *                                               
1516  * Atomically updates @v to (@v & @i) with re    
1517  *                                               
1518  * Safe to use in noinstr code; prefer atomic    
1519  *                                               
1520  * Return: The original value of @v.             
1521  */                                              
1522 static __always_inline int                       
1523 raw_atomic_fetch_and_relaxed(int i, atomic_t     
1524 {                                                
1525 #if defined(arch_atomic_fetch_and_relaxed)       
1526         return arch_atomic_fetch_and_relaxed(    
1527 #elif defined(arch_atomic_fetch_and)             
1528         return arch_atomic_fetch_and(i, v);      
1529 #else                                            
1530 #error "Unable to define raw_atomic_fetch_and    
1531 #endif                                           
1532 }                                                
1533                                                  
1534 /**                                              
1535  * raw_atomic_andnot() - atomic bitwise AND N    
1536  * @i: int value                                 
1537  * @v: pointer to atomic_t                       
1538  *                                               
1539  * Atomically updates @v to (@v & ~@i) with r    
1540  *                                               
1541  * Safe to use in noinstr code; prefer atomic    
1542  *                                               
1543  * Return: Nothing.                              
1544  */                                              
1545 static __always_inline void                      
1546 raw_atomic_andnot(int i, atomic_t *v)            
1547 {                                                
1548 #if defined(arch_atomic_andnot)                  
1549         arch_atomic_andnot(i, v);                
1550 #else                                            
1551         raw_atomic_and(~i, v);                   
1552 #endif                                           
1553 }                                                
1554                                                  
1555 /**                                              
1556  * raw_atomic_fetch_andnot() - atomic bitwise    
1557  * @i: int value                                 
1558  * @v: pointer to atomic_t                       
1559  *                                               
1560  * Atomically updates @v to (@v & ~@i) with f    
1561  *                                               
1562  * Safe to use in noinstr code; prefer atomic    
1563  *                                               
1564  * Return: The original value of @v.             
1565  */                                              
1566 static __always_inline int                       
1567 raw_atomic_fetch_andnot(int i, atomic_t *v)      
1568 {                                                
1569 #if defined(arch_atomic_fetch_andnot)            
1570         return arch_atomic_fetch_andnot(i, v)    
1571 #elif defined(arch_atomic_fetch_andnot_relaxe    
1572         int ret;                                 
1573         __atomic_pre_full_fence();               
1574         ret = arch_atomic_fetch_andnot_relaxe    
1575         __atomic_post_full_fence();              
1576         return ret;                              
1577 #else                                            
1578         return raw_atomic_fetch_and(~i, v);      
1579 #endif                                           
1580 }                                                
1581                                                  
1582 /**                                              
1583  * raw_atomic_fetch_andnot_acquire() - atomic    
1584  * @i: int value                                 
1585  * @v: pointer to atomic_t                       
1586  *                                               
1587  * Atomically updates @v to (@v & ~@i) with a    
1588  *                                               
1589  * Safe to use in noinstr code; prefer atomic    
1590  *                                               
1591  * Return: The original value of @v.             
1592  */                                              
1593 static __always_inline int                       
1594 raw_atomic_fetch_andnot_acquire(int i, atomic    
1595 {                                                
1596 #if defined(arch_atomic_fetch_andnot_acquire)    
1597         return arch_atomic_fetch_andnot_acqui    
1598 #elif defined(arch_atomic_fetch_andnot_relaxe    
1599         int ret = arch_atomic_fetch_andnot_re    
1600         __atomic_acquire_fence();                
1601         return ret;                              
1602 #elif defined(arch_atomic_fetch_andnot)          
1603         return arch_atomic_fetch_andnot(i, v)    
1604 #else                                            
1605         return raw_atomic_fetch_and_acquire(~    
1606 #endif                                           
1607 }                                                
1608                                                  
1609 /**                                              
1610  * raw_atomic_fetch_andnot_release() - atomic    
1611  * @i: int value                                 
1612  * @v: pointer to atomic_t                       
1613  *                                               
1614  * Atomically updates @v to (@v & ~@i) with r    
1615  *                                               
1616  * Safe to use in noinstr code; prefer atomic    
1617  *                                               
1618  * Return: The original value of @v.             
1619  */                                              
1620 static __always_inline int                       
1621 raw_atomic_fetch_andnot_release(int i, atomic    
1622 {                                                
1623 #if defined(arch_atomic_fetch_andnot_release)    
1624         return arch_atomic_fetch_andnot_relea    
1625 #elif defined(arch_atomic_fetch_andnot_relaxe    
1626         __atomic_release_fence();                
1627         return arch_atomic_fetch_andnot_relax    
1628 #elif defined(arch_atomic_fetch_andnot)          
1629         return arch_atomic_fetch_andnot(i, v)    
1630 #else                                            
1631         return raw_atomic_fetch_and_release(~    
1632 #endif                                           
1633 }                                                
1634                                                  
1635 /**                                              
1636  * raw_atomic_fetch_andnot_relaxed() - atomic    
1637  * @i: int value                                 
1638  * @v: pointer to atomic_t                       
1639  *                                               
1640  * Atomically updates @v to (@v & ~@i) with r    
1641  *                                               
1642  * Safe to use in noinstr code; prefer atomic    
1643  *                                               
1644  * Return: The original value of @v.             
1645  */                                              
1646 static __always_inline int                       
1647 raw_atomic_fetch_andnot_relaxed(int i, atomic    
1648 {                                                
1649 #if defined(arch_atomic_fetch_andnot_relaxed)    
1650         return arch_atomic_fetch_andnot_relax    
1651 #elif defined(arch_atomic_fetch_andnot)          
1652         return arch_atomic_fetch_andnot(i, v)    
1653 #else                                            
1654         return raw_atomic_fetch_and_relaxed(~    
1655 #endif                                           
1656 }                                                
1657                                                  
1658 /**                                              
1659  * raw_atomic_or() - atomic bitwise OR with r    
1660  * @i: int value                                 
1661  * @v: pointer to atomic_t                       
1662  *                                               
1663  * Atomically updates @v to (@v | @i) with re    
1664  *                                               
1665  * Safe to use in noinstr code; prefer atomic    
1666  *                                               
1667  * Return: Nothing.                              
1668  */                                              
1669 static __always_inline void                      
1670 raw_atomic_or(int i, atomic_t *v)                
1671 {                                                
1672         arch_atomic_or(i, v);                    
1673 }                                                
1674                                                  
1675 /**                                              
1676  * raw_atomic_fetch_or() - atomic bitwise OR     
1677  * @i: int value                                 
1678  * @v: pointer to atomic_t                       
1679  *                                               
1680  * Atomically updates @v to (@v | @i) with fu    
1681  *                                               
1682  * Safe to use in noinstr code; prefer atomic    
1683  *                                               
1684  * Return: The original value of @v.             
1685  */                                              
1686 static __always_inline int                       
1687 raw_atomic_fetch_or(int i, atomic_t *v)          
1688 {                                                
1689 #if defined(arch_atomic_fetch_or)                
1690         return arch_atomic_fetch_or(i, v);       
1691 #elif defined(arch_atomic_fetch_or_relaxed)      
1692         int ret;                                 
1693         __atomic_pre_full_fence();               
1694         ret = arch_atomic_fetch_or_relaxed(i,    
1695         __atomic_post_full_fence();              
1696         return ret;                              
1697 #else                                            
1698 #error "Unable to define raw_atomic_fetch_or"    
1699 #endif                                           
1700 }                                                
1701                                                  
1702 /**                                              
1703  * raw_atomic_fetch_or_acquire() - atomic bit    
1704  * @i: int value                                 
1705  * @v: pointer to atomic_t                       
1706  *                                               
1707  * Atomically updates @v to (@v | @i) with ac    
1708  *                                               
1709  * Safe to use in noinstr code; prefer atomic    
1710  *                                               
1711  * Return: The original value of @v.             
1712  */                                              
1713 static __always_inline int                       
1714 raw_atomic_fetch_or_acquire(int i, atomic_t *    
1715 {                                                
1716 #if defined(arch_atomic_fetch_or_acquire)        
1717         return arch_atomic_fetch_or_acquire(i    
1718 #elif defined(arch_atomic_fetch_or_relaxed)      
1719         int ret = arch_atomic_fetch_or_relaxe    
1720         __atomic_acquire_fence();                
1721         return ret;                              
1722 #elif defined(arch_atomic_fetch_or)              
1723         return arch_atomic_fetch_or(i, v);       
1724 #else                                            
1725 #error "Unable to define raw_atomic_fetch_or_    
1726 #endif                                           
1727 }                                                
1728                                                  
1729 /**                                              
1730  * raw_atomic_fetch_or_release() - atomic bit    
1731  * @i: int value                                 
1732  * @v: pointer to atomic_t                       
1733  *                                               
1734  * Atomically updates @v to (@v | @i) with re    
1735  *                                               
1736  * Safe to use in noinstr code; prefer atomic    
1737  *                                               
1738  * Return: The original value of @v.             
1739  */                                              
1740 static __always_inline int                       
1741 raw_atomic_fetch_or_release(int i, atomic_t *    
1742 {                                                
1743 #if defined(arch_atomic_fetch_or_release)        
1744         return arch_atomic_fetch_or_release(i    
1745 #elif defined(arch_atomic_fetch_or_relaxed)      
1746         __atomic_release_fence();                
1747         return arch_atomic_fetch_or_relaxed(i    
1748 #elif defined(arch_atomic_fetch_or)              
1749         return arch_atomic_fetch_or(i, v);       
1750 #else                                            
1751 #error "Unable to define raw_atomic_fetch_or_    
1752 #endif                                           
1753 }                                                
1754                                                  
1755 /**                                              
1756  * raw_atomic_fetch_or_relaxed() - atomic bit    
1757  * @i: int value                                 
1758  * @v: pointer to atomic_t                       
1759  *                                               
1760  * Atomically updates @v to (@v | @i) with re    
1761  *                                               
1762  * Safe to use in noinstr code; prefer atomic    
1763  *                                               
1764  * Return: The original value of @v.             
1765  */                                              
1766 static __always_inline int                       
1767 raw_atomic_fetch_or_relaxed(int i, atomic_t *    
1768 {                                                
1769 #if defined(arch_atomic_fetch_or_relaxed)        
1770         return arch_atomic_fetch_or_relaxed(i    
1771 #elif defined(arch_atomic_fetch_or)              
1772         return arch_atomic_fetch_or(i, v);       
1773 #else                                            
1774 #error "Unable to define raw_atomic_fetch_or_    
1775 #endif                                           
1776 }                                                
1777                                                  
1778 /**                                              
1779  * raw_atomic_xor() - atomic bitwise XOR with    
1780  * @i: int value                                 
1781  * @v: pointer to atomic_t                       
1782  *                                               
1783  * Atomically updates @v to (@v ^ @i) with re    
1784  *                                               
1785  * Safe to use in noinstr code; prefer atomic    
1786  *                                               
1787  * Return: Nothing.                              
1788  */                                              
1789 static __always_inline void                      
1790 raw_atomic_xor(int i, atomic_t *v)               
1791 {                                                
1792         arch_atomic_xor(i, v);                   
1793 }                                                
1794                                                  
1795 /**                                              
1796  * raw_atomic_fetch_xor() - atomic bitwise XO    
1797  * @i: int value                                 
1798  * @v: pointer to atomic_t                       
1799  *                                               
1800  * Atomically updates @v to (@v ^ @i) with fu    
1801  *                                               
1802  * Safe to use in noinstr code; prefer atomic    
1803  *                                               
1804  * Return: The original value of @v.             
1805  */                                              
1806 static __always_inline int                       
1807 raw_atomic_fetch_xor(int i, atomic_t *v)         
1808 {                                                
1809 #if defined(arch_atomic_fetch_xor)               
1810         return arch_atomic_fetch_xor(i, v);      
1811 #elif defined(arch_atomic_fetch_xor_relaxed)     
1812         int ret;                                 
1813         __atomic_pre_full_fence();               
1814         ret = arch_atomic_fetch_xor_relaxed(i    
1815         __atomic_post_full_fence();              
1816         return ret;                              
1817 #else                                            
1818 #error "Unable to define raw_atomic_fetch_xor    
1819 #endif                                           
1820 }                                                
1821                                                  
1822 /**                                              
1823  * raw_atomic_fetch_xor_acquire() - atomic bi    
1824  * @i: int value                                 
1825  * @v: pointer to atomic_t                       
1826  *                                               
1827  * Atomically updates @v to (@v ^ @i) with ac    
1828  *                                               
1829  * Safe to use in noinstr code; prefer atomic    
1830  *                                               
1831  * Return: The original value of @v.             
1832  */                                              
1833 static __always_inline int                       
1834 raw_atomic_fetch_xor_acquire(int i, atomic_t     
1835 {                                                
1836 #if defined(arch_atomic_fetch_xor_acquire)       
1837         return arch_atomic_fetch_xor_acquire(    
1838 #elif defined(arch_atomic_fetch_xor_relaxed)     
1839         int ret = arch_atomic_fetch_xor_relax    
1840         __atomic_acquire_fence();                
1841         return ret;                              
1842 #elif defined(arch_atomic_fetch_xor)             
1843         return arch_atomic_fetch_xor(i, v);      
1844 #else                                            
1845 #error "Unable to define raw_atomic_fetch_xor    
1846 #endif                                           
1847 }                                                
1848                                                  
1849 /**                                              
1850  * raw_atomic_fetch_xor_release() - atomic bi    
1851  * @i: int value                                 
1852  * @v: pointer to atomic_t                       
1853  *                                               
1854  * Atomically updates @v to (@v ^ @i) with re    
1855  *                                               
1856  * Safe to use in noinstr code; prefer atomic    
1857  *                                               
1858  * Return: The original value of @v.             
1859  */                                              
1860 static __always_inline int                       
1861 raw_atomic_fetch_xor_release(int i, atomic_t     
1862 {                                                
1863 #if defined(arch_atomic_fetch_xor_release)       
1864         return arch_atomic_fetch_xor_release(    
1865 #elif defined(arch_atomic_fetch_xor_relaxed)     
1866         __atomic_release_fence();                
1867         return arch_atomic_fetch_xor_relaxed(    
1868 #elif defined(arch_atomic_fetch_xor)             
1869         return arch_atomic_fetch_xor(i, v);      
1870 #else                                            
1871 #error "Unable to define raw_atomic_fetch_xor    
1872 #endif                                           
1873 }                                                
1874                                                  
1875 /**                                              
1876  * raw_atomic_fetch_xor_relaxed() - atomic bi    
1877  * @i: int value                                 
1878  * @v: pointer to atomic_t                       
1879  *                                               
1880  * Atomically updates @v to (@v ^ @i) with re    
1881  *                                               
1882  * Safe to use in noinstr code; prefer atomic    
1883  *                                               
1884  * Return: The original value of @v.             
1885  */                                              
1886 static __always_inline int                       
1887 raw_atomic_fetch_xor_relaxed(int i, atomic_t     
1888 {                                                
1889 #if defined(arch_atomic_fetch_xor_relaxed)       
1890         return arch_atomic_fetch_xor_relaxed(    
1891 #elif defined(arch_atomic_fetch_xor)             
1892         return arch_atomic_fetch_xor(i, v);      
1893 #else                                            
1894 #error "Unable to define raw_atomic_fetch_xor    
1895 #endif                                           
1896 }                                                
1897                                                  
1898 /**                                              
1899  * raw_atomic_xchg() - atomic exchange with f    
1900  * @v: pointer to atomic_t                       
1901  * @new: int value to assign                     
1902  *                                               
1903  * Atomically updates @v to @new with full or    
1904  *                                               
1905  * Safe to use in noinstr code; prefer atomic    
1906  *                                               
1907  * Return: The original value of @v.             
1908  */                                              
1909 static __always_inline int                       
1910 raw_atomic_xchg(atomic_t *v, int new)            
1911 {                                                
1912 #if defined(arch_atomic_xchg)                    
1913         return arch_atomic_xchg(v, new);         
1914 #elif defined(arch_atomic_xchg_relaxed)          
1915         int ret;                                 
1916         __atomic_pre_full_fence();               
1917         ret = arch_atomic_xchg_relaxed(v, new    
1918         __atomic_post_full_fence();              
1919         return ret;                              
1920 #else                                            
1921         return raw_xchg(&v->counter, new);       
1922 #endif                                           
1923 }                                                
1924                                                  
1925 /**                                              
1926  * raw_atomic_xchg_acquire() - atomic exchang    
1927  * @v: pointer to atomic_t                       
1928  * @new: int value to assign                     
1929  *                                               
1930  * Atomically updates @v to @new with acquire    
1931  *                                               
1932  * Safe to use in noinstr code; prefer atomic    
1933  *                                               
1934  * Return: The original value of @v.             
1935  */                                              
1936 static __always_inline int                       
1937 raw_atomic_xchg_acquire(atomic_t *v, int new)    
1938 {                                                
1939 #if defined(arch_atomic_xchg_acquire)            
1940         return arch_atomic_xchg_acquire(v, ne    
1941 #elif defined(arch_atomic_xchg_relaxed)          
1942         int ret = arch_atomic_xchg_relaxed(v,    
1943         __atomic_acquire_fence();                
1944         return ret;                              
1945 #elif defined(arch_atomic_xchg)                  
1946         return arch_atomic_xchg(v, new);         
1947 #else                                            
1948         return raw_xchg_acquire(&v->counter,     
1949 #endif                                           
1950 }                                                
1951                                                  
1952 /**                                              
1953  * raw_atomic_xchg_release() - atomic exchang    
1954  * @v: pointer to atomic_t                       
1955  * @new: int value to assign                     
1956  *                                               
1957  * Atomically updates @v to @new with release    
1958  *                                               
1959  * Safe to use in noinstr code; prefer atomic    
1960  *                                               
1961  * Return: The original value of @v.             
1962  */                                              
1963 static __always_inline int                       
1964 raw_atomic_xchg_release(atomic_t *v, int new)    
1965 {                                                
1966 #if defined(arch_atomic_xchg_release)            
1967         return arch_atomic_xchg_release(v, ne    
1968 #elif defined(arch_atomic_xchg_relaxed)          
1969         __atomic_release_fence();                
1970         return arch_atomic_xchg_relaxed(v, ne    
1971 #elif defined(arch_atomic_xchg)                  
1972         return arch_atomic_xchg(v, new);         
1973 #else                                            
1974         return raw_xchg_release(&v->counter,     
1975 #endif                                           
1976 }                                                
1977                                                  
1978 /**                                              
1979  * raw_atomic_xchg_relaxed() - atomic exchang    
1980  * @v: pointer to atomic_t                       
1981  * @new: int value to assign                     
1982  *                                               
1983  * Atomically updates @v to @new with relaxed    
1984  *                                               
1985  * Safe to use in noinstr code; prefer atomic    
1986  *                                               
1987  * Return: The original value of @v.             
1988  */                                              
1989 static __always_inline int                       
1990 raw_atomic_xchg_relaxed(atomic_t *v, int new)    
1991 {                                                
1992 #if defined(arch_atomic_xchg_relaxed)            
1993         return arch_atomic_xchg_relaxed(v, ne    
1994 #elif defined(arch_atomic_xchg)                  
1995         return arch_atomic_xchg(v, new);         
1996 #else                                            
1997         return raw_xchg_relaxed(&v->counter,     
1998 #endif                                           
1999 }                                                
2000                                                  
2001 /**                                              
2002  * raw_atomic_cmpxchg() - atomic compare and     
2003  * @v: pointer to atomic_t                       
2004  * @old: int value to compare with               
2005  * @new: int value to assign                     
2006  *                                               
2007  * If (@v == @old), atomically updates @v to     
2008  * Otherwise, @v is not modified and relaxed     
2009  *                                               
2010  * Safe to use in noinstr code; prefer atomic    
2011  *                                               
2012  * Return: The original value of @v.             
2013  */                                              
2014 static __always_inline int                       
2015 raw_atomic_cmpxchg(atomic_t *v, int old, int     
2016 {                                                
2017 #if defined(arch_atomic_cmpxchg)                 
2018         return arch_atomic_cmpxchg(v, old, ne    
2019 #elif defined(arch_atomic_cmpxchg_relaxed)       
2020         int ret;                                 
2021         __atomic_pre_full_fence();               
2022         ret = arch_atomic_cmpxchg_relaxed(v,     
2023         __atomic_post_full_fence();              
2024         return ret;                              
2025 #else                                            
2026         return raw_cmpxchg(&v->counter, old,     
2027 #endif                                           
2028 }                                                
2029                                                  
2030 /**                                              
2031  * raw_atomic_cmpxchg_acquire() - atomic comp    
2032  * @v: pointer to atomic_t                       
2033  * @old: int value to compare with               
2034  * @new: int value to assign                     
2035  *                                               
2036  * If (@v == @old), atomically updates @v to     
2037  * Otherwise, @v is not modified and relaxed     
2038  *                                               
2039  * Safe to use in noinstr code; prefer atomic    
2040  *                                               
2041  * Return: The original value of @v.             
2042  */                                              
2043 static __always_inline int                       
2044 raw_atomic_cmpxchg_acquire(atomic_t *v, int o    
2045 {                                                
2046 #if defined(arch_atomic_cmpxchg_acquire)         
2047         return arch_atomic_cmpxchg_acquire(v,    
2048 #elif defined(arch_atomic_cmpxchg_relaxed)       
2049         int ret = arch_atomic_cmpxchg_relaxed    
2050         __atomic_acquire_fence();                
2051         return ret;                              
2052 #elif defined(arch_atomic_cmpxchg)               
2053         return arch_atomic_cmpxchg(v, old, ne    
2054 #else                                            
2055         return raw_cmpxchg_acquire(&v->counte    
2056 #endif                                           
2057 }                                                
2058                                                  
2059 /**                                              
2060  * raw_atomic_cmpxchg_release() - atomic comp    
2061  * @v: pointer to atomic_t                       
2062  * @old: int value to compare with               
2063  * @new: int value to assign                     
2064  *                                               
2065  * If (@v == @old), atomically updates @v to     
2066  * Otherwise, @v is not modified and relaxed     
2067  *                                               
2068  * Safe to use in noinstr code; prefer atomic    
2069  *                                               
2070  * Return: The original value of @v.             
2071  */                                              
2072 static __always_inline int                       
2073 raw_atomic_cmpxchg_release(atomic_t *v, int o    
2074 {                                                
2075 #if defined(arch_atomic_cmpxchg_release)         
2076         return arch_atomic_cmpxchg_release(v,    
2077 #elif defined(arch_atomic_cmpxchg_relaxed)       
2078         __atomic_release_fence();                
2079         return arch_atomic_cmpxchg_relaxed(v,    
2080 #elif defined(arch_atomic_cmpxchg)               
2081         return arch_atomic_cmpxchg(v, old, ne    
2082 #else                                            
2083         return raw_cmpxchg_release(&v->counte    
2084 #endif                                           
2085 }                                                
2086                                                  
2087 /**                                              
2088  * raw_atomic_cmpxchg_relaxed() - atomic comp    
2089  * @v: pointer to atomic_t                       
2090  * @old: int value to compare with               
2091  * @new: int value to assign                     
2092  *                                               
2093  * If (@v == @old), atomically updates @v to     
2094  * Otherwise, @v is not modified and relaxed     
2095  *                                               
2096  * Safe to use in noinstr code; prefer atomic    
2097  *                                               
2098  * Return: The original value of @v.             
2099  */                                              
2100 static __always_inline int                       
2101 raw_atomic_cmpxchg_relaxed(atomic_t *v, int o    
2102 {                                                
2103 #if defined(arch_atomic_cmpxchg_relaxed)         
2104         return arch_atomic_cmpxchg_relaxed(v,    
2105 #elif defined(arch_atomic_cmpxchg)               
2106         return arch_atomic_cmpxchg(v, old, ne    
2107 #else                                            
2108         return raw_cmpxchg_relaxed(&v->counte    
2109 #endif                                           
2110 }                                                
2111                                                  
2112 /**                                              
2113  * raw_atomic_try_cmpxchg() - atomic compare     
2114  * @v: pointer to atomic_t                       
2115  * @old: pointer to int value to compare with    
2116  * @new: int value to assign                     
2117  *                                               
2118  * If (@v == @old), atomically updates @v to     
2119  * Otherwise, @v is not modified, @old is upd    
2120  * and relaxed ordering is provided.             
2121  *                                               
2122  * Safe to use in noinstr code; prefer atomic    
2123  *                                               
2124  * Return: @true if the exchange occured, @fa    
2125  */                                              
2126 static __always_inline bool                      
2127 raw_atomic_try_cmpxchg(atomic_t *v, int *old,    
2128 {                                                
2129 #if defined(arch_atomic_try_cmpxchg)             
2130         return arch_atomic_try_cmpxchg(v, old    
2131 #elif defined(arch_atomic_try_cmpxchg_relaxed    
2132         bool ret;                                
2133         __atomic_pre_full_fence();               
2134         ret = arch_atomic_try_cmpxchg_relaxed    
2135         __atomic_post_full_fence();              
2136         return ret;                              
2137 #else                                            
2138         int r, o = *old;                         
2139         r = raw_atomic_cmpxchg(v, o, new);       
2140         if (unlikely(r != o))                    
2141                 *old = r;                        
2142         return likely(r == o);                   
2143 #endif                                           
2144 }                                                
2145                                                  
2146 /**                                              
2147  * raw_atomic_try_cmpxchg_acquire() - atomic     
2148  * @v: pointer to atomic_t                       
2149  * @old: pointer to int value to compare with    
2150  * @new: int value to assign                     
2151  *                                               
2152  * If (@v == @old), atomically updates @v to     
2153  * Otherwise, @v is not modified, @old is upd    
2154  * and relaxed ordering is provided.             
2155  *                                               
2156  * Safe to use in noinstr code; prefer atomic    
2157  *                                               
2158  * Return: @true if the exchange occured, @fa    
2159  */                                              
2160 static __always_inline bool                      
2161 raw_atomic_try_cmpxchg_acquire(atomic_t *v, i    
2162 {                                                
2163 #if defined(arch_atomic_try_cmpxchg_acquire)     
2164         return arch_atomic_try_cmpxchg_acquir    
2165 #elif defined(arch_atomic_try_cmpxchg_relaxed    
2166         bool ret = arch_atomic_try_cmpxchg_re    
2167         __atomic_acquire_fence();                
2168         return ret;                              
2169 #elif defined(arch_atomic_try_cmpxchg)           
2170         return arch_atomic_try_cmpxchg(v, old    
2171 #else                                            
2172         int r, o = *old;                         
2173         r = raw_atomic_cmpxchg_acquire(v, o,     
2174         if (unlikely(r != o))                    
2175                 *old = r;                        
2176         return likely(r == o);                   
2177 #endif                                           
2178 }                                                
2179                                                  
2180 /**                                              
2181  * raw_atomic_try_cmpxchg_release() - atomic     
2182  * @v: pointer to atomic_t                       
2183  * @old: pointer to int value to compare with    
2184  * @new: int value to assign                     
2185  *                                               
2186  * If (@v == @old), atomically updates @v to     
2187  * Otherwise, @v is not modified, @old is upd    
2188  * and relaxed ordering is provided.             
2189  *                                               
2190  * Safe to use in noinstr code; prefer atomic    
2191  *                                               
2192  * Return: @true if the exchange occured, @fa    
2193  */                                              
2194 static __always_inline bool                      
2195 raw_atomic_try_cmpxchg_release(atomic_t *v, i    
2196 {                                                
2197 #if defined(arch_atomic_try_cmpxchg_release)     
2198         return arch_atomic_try_cmpxchg_releas    
2199 #elif defined(arch_atomic_try_cmpxchg_relaxed    
2200         __atomic_release_fence();                
2201         return arch_atomic_try_cmpxchg_relaxe    
2202 #elif defined(arch_atomic_try_cmpxchg)           
2203         return arch_atomic_try_cmpxchg(v, old    
2204 #else                                            
2205         int r, o = *old;                         
2206         r = raw_atomic_cmpxchg_release(v, o,     
2207         if (unlikely(r != o))                    
2208                 *old = r;                        
2209         return likely(r == o);                   
2210 #endif                                           
2211 }                                                
2212                                                  
2213 /**                                              
2214  * raw_atomic_try_cmpxchg_relaxed() - atomic     
2215  * @v: pointer to atomic_t                       
2216  * @old: pointer to int value to compare with    
2217  * @new: int value to assign                     
2218  *                                               
2219  * If (@v == @old), atomically updates @v to     
2220  * Otherwise, @v is not modified, @old is upd    
2221  * and relaxed ordering is provided.             
2222  *                                               
2223  * Safe to use in noinstr code; prefer atomic    
2224  *                                               
2225  * Return: @true if the exchange occured, @fa    
2226  */                                              
2227 static __always_inline bool                      
2228 raw_atomic_try_cmpxchg_relaxed(atomic_t *v, i    
2229 {                                                
2230 #if defined(arch_atomic_try_cmpxchg_relaxed)     
2231         return arch_atomic_try_cmpxchg_relaxe    
2232 #elif defined(arch_atomic_try_cmpxchg)           
2233         return arch_atomic_try_cmpxchg(v, old    
2234 #else                                            
2235         int r, o = *old;                         
2236         r = raw_atomic_cmpxchg_relaxed(v, o,     
2237         if (unlikely(r != o))                    
2238                 *old = r;                        
2239         return likely(r == o);                   
2240 #endif                                           
2241 }                                                
2242                                                  
2243 /**                                              
2244  * raw_atomic_sub_and_test() - atomic subtrac    
2245  * @i: int value to subtract                     
2246  * @v: pointer to atomic_t                       
2247  *                                               
2248  * Atomically updates @v to (@v - @i) with fu    
2249  *                                               
2250  * Safe to use in noinstr code; prefer atomic    
2251  *                                               
2252  * Return: @true if the resulting value of @v    
2253  */                                              
2254 static __always_inline bool                      
2255 raw_atomic_sub_and_test(int i, atomic_t *v)      
2256 {                                                
2257 #if defined(arch_atomic_sub_and_test)            
2258         return arch_atomic_sub_and_test(i, v)    
2259 #else                                            
2260         return raw_atomic_sub_return(i, v) ==    
2261 #endif                                           
2262 }                                                
2263                                                  
2264 /**                                              
2265  * raw_atomic_dec_and_test() - atomic decreme    
2266  * @v: pointer to atomic_t                       
2267  *                                               
2268  * Atomically updates @v to (@v - 1) with ful    
2269  *                                               
2270  * Safe to use in noinstr code; prefer atomic    
2271  *                                               
2272  * Return: @true if the resulting value of @v    
2273  */                                              
2274 static __always_inline bool                      
2275 raw_atomic_dec_and_test(atomic_t *v)             
2276 {                                                
2277 #if defined(arch_atomic_dec_and_test)            
2278         return arch_atomic_dec_and_test(v);      
2279 #else                                            
2280         return raw_atomic_dec_return(v) == 0;    
2281 #endif                                           
2282 }                                                
2283                                                  
2284 /**                                              
2285  * raw_atomic_inc_and_test() - atomic increme    
2286  * @v: pointer to atomic_t                       
2287  *                                               
2288  * Atomically updates @v to (@v + 1) with ful    
2289  *                                               
2290  * Safe to use in noinstr code; prefer atomic    
2291  *                                               
2292  * Return: @true if the resulting value of @v    
2293  */                                              
2294 static __always_inline bool                      
2295 raw_atomic_inc_and_test(atomic_t *v)             
2296 {                                                
2297 #if defined(arch_atomic_inc_and_test)            
2298         return arch_atomic_inc_and_test(v);      
2299 #else                                            
2300         return raw_atomic_inc_return(v) == 0;    
2301 #endif                                           
2302 }                                                
2303                                                  
2304 /**                                              
2305  * raw_atomic_add_negative() - atomic add and    
2306  * @i: int value to add                          
2307  * @v: pointer to atomic_t                       
2308  *                                               
2309  * Atomically updates @v to (@v + @i) with fu    
2310  *                                               
2311  * Safe to use in noinstr code; prefer atomic    
2312  *                                               
2313  * Return: @true if the resulting value of @v    
2314  */                                              
2315 static __always_inline bool                      
2316 raw_atomic_add_negative(int i, atomic_t *v)      
2317 {                                                
2318 #if defined(arch_atomic_add_negative)            
2319         return arch_atomic_add_negative(i, v)    
2320 #elif defined(arch_atomic_add_negative_relaxe    
2321         bool ret;                                
2322         __atomic_pre_full_fence();               
2323         ret = arch_atomic_add_negative_relaxe    
2324         __atomic_post_full_fence();              
2325         return ret;                              
2326 #else                                            
2327         return raw_atomic_add_return(i, v) <     
2328 #endif                                           
2329 }                                                
2330                                                  
2331 /**                                              
2332  * raw_atomic_add_negative_acquire() - atomic    
2333  * @i: int value to add                          
2334  * @v: pointer to atomic_t                       
2335  *                                               
2336  * Atomically updates @v to (@v + @i) with ac    
2337  *                                               
2338  * Safe to use in noinstr code; prefer atomic    
2339  *                                               
2340  * Return: @true if the resulting value of @v    
2341  */                                              
2342 static __always_inline bool                      
2343 raw_atomic_add_negative_acquire(int i, atomic    
2344 {                                                
2345 #if defined(arch_atomic_add_negative_acquire)    
2346         return arch_atomic_add_negative_acqui    
2347 #elif defined(arch_atomic_add_negative_relaxe    
2348         bool ret = arch_atomic_add_negative_r    
2349         __atomic_acquire_fence();                
2350         return ret;                              
2351 #elif defined(arch_atomic_add_negative)          
2352         return arch_atomic_add_negative(i, v)    
2353 #else                                            
2354         return raw_atomic_add_return_acquire(    
2355 #endif                                           
2356 }                                                
2357                                                  
2358 /**                                              
2359  * raw_atomic_add_negative_release() - atomic    
2360  * @i: int value to add                          
2361  * @v: pointer to atomic_t                       
2362  *                                               
2363  * Atomically updates @v to (@v + @i) with re    
2364  *                                               
2365  * Safe to use in noinstr code; prefer atomic    
2366  *                                               
2367  * Return: @true if the resulting value of @v    
2368  */                                              
2369 static __always_inline bool                      
2370 raw_atomic_add_negative_release(int i, atomic    
2371 {                                                
2372 #if defined(arch_atomic_add_negative_release)    
2373         return arch_atomic_add_negative_relea    
2374 #elif defined(arch_atomic_add_negative_relaxe    
2375         __atomic_release_fence();                
2376         return arch_atomic_add_negative_relax    
2377 #elif defined(arch_atomic_add_negative)          
2378         return arch_atomic_add_negative(i, v)    
2379 #else                                            
2380         return raw_atomic_add_return_release(    
2381 #endif                                           
2382 }                                                
2383                                                  
2384 /**                                              
2385  * raw_atomic_add_negative_relaxed() - atomic    
2386  * @i: int value to add                          
2387  * @v: pointer to atomic_t                       
2388  *                                               
2389  * Atomically updates @v to (@v + @i) with re    
2390  *                                               
2391  * Safe to use in noinstr code; prefer atomic    
2392  *                                               
2393  * Return: @true if the resulting value of @v    
2394  */                                              
2395 static __always_inline bool                      
2396 raw_atomic_add_negative_relaxed(int i, atomic    
2397 {                                                
2398 #if defined(arch_atomic_add_negative_relaxed)    
2399         return arch_atomic_add_negative_relax    
2400 #elif defined(arch_atomic_add_negative)          
2401         return arch_atomic_add_negative(i, v)    
2402 #else                                            
2403         return raw_atomic_add_return_relaxed(    
2404 #endif                                           
2405 }                                                
2406                                                  
2407 /**                                              
2408  * raw_atomic_fetch_add_unless() - atomic add    
2409  * @v: pointer to atomic_t                       
2410  * @a: int value to add                          
2411  * @u: int value to compare with                 
2412  *                                               
2413  * If (@v != @u), atomically updates @v to (@    
2414  * Otherwise, @v is not modified and relaxed     
2415  *                                               
2416  * Safe to use in noinstr code; prefer atomic    
2417  *                                               
2418  * Return: The original value of @v.             
2419  */                                              
2420 static __always_inline int                       
2421 raw_atomic_fetch_add_unless(atomic_t *v, int     
2422 {                                                
2423 #if defined(arch_atomic_fetch_add_unless)        
2424         return arch_atomic_fetch_add_unless(v    
2425 #else                                            
2426         int c = raw_atomic_read(v);              
2427                                                  
2428         do {                                     
2429                 if (unlikely(c == u))            
2430                         break;                   
2431         } while (!raw_atomic_try_cmpxchg(v, &    
2432                                                  
2433         return c;                                
2434 #endif                                           
2435 }                                                
2436                                                  
2437 /**                                              
2438  * raw_atomic_add_unless() - atomic add unles    
2439  * @v: pointer to atomic_t                       
2440  * @a: int value to add                          
2441  * @u: int value to compare with                 
2442  *                                               
2443  * If (@v != @u), atomically updates @v to (@    
2444  * Otherwise, @v is not modified and relaxed     
2445  *                                               
2446  * Safe to use in noinstr code; prefer atomic    
2447  *                                               
2448  * Return: @true if @v was updated, @false ot    
2449  */                                              
2450 static __always_inline bool                      
2451 raw_atomic_add_unless(atomic_t *v, int a, int    
2452 {                                                
2453 #if defined(arch_atomic_add_unless)              
2454         return arch_atomic_add_unless(v, a, u    
2455 #else                                            
2456         return raw_atomic_fetch_add_unless(v,    
2457 #endif                                           
2458 }                                                
2459                                                  
2460 /**                                              
2461  * raw_atomic_inc_not_zero() - atomic increme    
2462  * @v: pointer to atomic_t                       
2463  *                                               
2464  * If (@v != 0), atomically updates @v to (@v    
2465  * Otherwise, @v is not modified and relaxed     
2466  *                                               
2467  * Safe to use in noinstr code; prefer atomic    
2468  *                                               
2469  * Return: @true if @v was updated, @false ot    
2470  */                                              
2471 static __always_inline bool                      
2472 raw_atomic_inc_not_zero(atomic_t *v)             
2473 {                                                
2474 #if defined(arch_atomic_inc_not_zero)            
2475         return arch_atomic_inc_not_zero(v);      
2476 #else                                            
2477         return raw_atomic_add_unless(v, 1, 0)    
2478 #endif                                           
2479 }                                                
2480                                                  
2481 /**                                              
2482  * raw_atomic_inc_unless_negative() - atomic     
2483  * @v: pointer to atomic_t                       
2484  *                                               
2485  * If (@v >= 0), atomically updates @v to (@v    
2486  * Otherwise, @v is not modified and relaxed     
2487  *                                               
2488  * Safe to use in noinstr code; prefer atomic    
2489  *                                               
2490  * Return: @true if @v was updated, @false ot    
2491  */                                              
2492 static __always_inline bool                      
2493 raw_atomic_inc_unless_negative(atomic_t *v)      
2494 {                                                
2495 #if defined(arch_atomic_inc_unless_negative)     
2496         return arch_atomic_inc_unless_negativ    
2497 #else                                            
2498         int c = raw_atomic_read(v);              
2499                                                  
2500         do {                                     
2501                 if (unlikely(c < 0))             
2502                         return false;            
2503         } while (!raw_atomic_try_cmpxchg(v, &    
2504                                                  
2505         return true;                             
2506 #endif                                           
2507 }                                                
2508                                                  
2509 /**                                              
2510  * raw_atomic_dec_unless_positive() - atomic     
2511  * @v: pointer to atomic_t                       
2512  *                                               
2513  * If (@v <= 0), atomically updates @v to (@v    
2514  * Otherwise, @v is not modified and relaxed     
2515  *                                               
2516  * Safe to use in noinstr code; prefer atomic    
2517  *                                               
2518  * Return: @true if @v was updated, @false ot    
2519  */                                              
2520 static __always_inline bool                      
2521 raw_atomic_dec_unless_positive(atomic_t *v)      
2522 {                                                
2523 #if defined(arch_atomic_dec_unless_positive)     
2524         return arch_atomic_dec_unless_positiv    
2525 #else                                            
2526         int c = raw_atomic_read(v);              
2527                                                  
2528         do {                                     
2529                 if (unlikely(c > 0))             
2530                         return false;            
2531         } while (!raw_atomic_try_cmpxchg(v, &    
2532                                                  
2533         return true;                             
2534 #endif                                           
2535 }                                                
2536                                                  
2537 /**                                              
2538  * raw_atomic_dec_if_positive() - atomic decr    
2539  * @v: pointer to atomic_t                       
2540  *                                               
2541  * If (@v > 0), atomically updates @v to (@v     
2542  * Otherwise, @v is not modified and relaxed     
2543  *                                               
2544  * Safe to use in noinstr code; prefer atomic    
2545  *                                               
2546  * Return: The old value of (@v - 1), regardl    
2547  */                                              
2548 static __always_inline int                       
2549 raw_atomic_dec_if_positive(atomic_t *v)          
2550 {                                                
2551 #if defined(arch_atomic_dec_if_positive)         
2552         return arch_atomic_dec_if_positive(v)    
2553 #else                                            
2554         int dec, c = raw_atomic_read(v);         
2555                                                  
2556         do {                                     
2557                 dec = c - 1;                     
2558                 if (unlikely(dec < 0))           
2559                         break;                   
2560         } while (!raw_atomic_try_cmpxchg(v, &    
2561                                                  
2562         return dec;                              
2563 #endif                                           
2564 }                                                
2565                                                  
2566 #ifdef CONFIG_GENERIC_ATOMIC64                   
2567 #include <asm-generic/atomic64.h>                
2568 #endif                                           
2569                                                  
2570 /**                                              
2571  * raw_atomic64_read() - atomic load with rel    
2572  * @v: pointer to atomic64_t                     
2573  *                                               
2574  * Atomically loads the value of @v with rela    
2575  *                                               
2576  * Safe to use in noinstr code; prefer atomic    
2577  *                                               
2578  * Return: The value loaded from @v.             
2579  */                                              
2580 static __always_inline s64                       
2581 raw_atomic64_read(const atomic64_t *v)           
2582 {                                                
2583         return arch_atomic64_read(v);            
2584 }                                                
2585                                                  
2586 /**                                              
2587  * raw_atomic64_read_acquire() - atomic load     
2588  * @v: pointer to atomic64_t                     
2589  *                                               
2590  * Atomically loads the value of @v with acqu    
2591  *                                               
2592  * Safe to use in noinstr code; prefer atomic    
2593  *                                               
2594  * Return: The value loaded from @v.             
2595  */                                              
2596 static __always_inline s64                       
2597 raw_atomic64_read_acquire(const atomic64_t *v    
2598 {                                                
2599 #if defined(arch_atomic64_read_acquire)          
2600         return arch_atomic64_read_acquire(v);    
2601 #else                                            
2602         s64 ret;                                 
2603                                                  
2604         if (__native_word(atomic64_t)) {         
2605                 ret = smp_load_acquire(&(v)->    
2606         } else {                                 
2607                 ret = raw_atomic64_read(v);      
2608                 __atomic_acquire_fence();        
2609         }                                        
2610                                                  
2611         return ret;                              
2612 #endif                                           
2613 }                                                
2614                                                  
2615 /**                                              
2616  * raw_atomic64_set() - atomic set with relax    
2617  * @v: pointer to atomic64_t                     
2618  * @i: s64 value to assign                       
2619  *                                               
2620  * Atomically sets @v to @i with relaxed orde    
2621  *                                               
2622  * Safe to use in noinstr code; prefer atomic    
2623  *                                               
2624  * Return: Nothing.                              
2625  */                                              
2626 static __always_inline void                      
2627 raw_atomic64_set(atomic64_t *v, s64 i)           
2628 {                                                
2629         arch_atomic64_set(v, i);                 
2630 }                                                
2631                                                  
2632 /**                                              
2633  * raw_atomic64_set_release() - atomic set wi    
2634  * @v: pointer to atomic64_t                     
2635  * @i: s64 value to assign                       
2636  *                                               
2637  * Atomically sets @v to @i with release orde    
2638  *                                               
2639  * Safe to use in noinstr code; prefer atomic    
2640  *                                               
2641  * Return: Nothing.                              
2642  */                                              
2643 static __always_inline void                      
2644 raw_atomic64_set_release(atomic64_t *v, s64 i    
2645 {                                                
2646 #if defined(arch_atomic64_set_release)           
2647         arch_atomic64_set_release(v, i);         
2648 #else                                            
2649         if (__native_word(atomic64_t)) {         
2650                 smp_store_release(&(v)->count    
2651         } else {                                 
2652                 __atomic_release_fence();        
2653                 raw_atomic64_set(v, i);          
2654         }                                        
2655 #endif                                           
2656 }                                                
2657                                                  
2658 /**                                              
2659  * raw_atomic64_add() - atomic add with relax    
2660  * @i: s64 value to add                          
2661  * @v: pointer to atomic64_t                     
2662  *                                               
2663  * Atomically updates @v to (@v + @i) with re    
2664  *                                               
2665  * Safe to use in noinstr code; prefer atomic    
2666  *                                               
2667  * Return: Nothing.                              
2668  */                                              
2669 static __always_inline void                      
2670 raw_atomic64_add(s64 i, atomic64_t *v)           
2671 {                                                
2672         arch_atomic64_add(i, v);                 
2673 }                                                
2674                                                  
2675 /**                                              
2676  * raw_atomic64_add_return() - atomic add wit    
2677  * @i: s64 value to add                          
2678  * @v: pointer to atomic64_t                     
2679  *                                               
2680  * Atomically updates @v to (@v + @i) with fu    
2681  *                                               
2682  * Safe to use in noinstr code; prefer atomic    
2683  *                                               
2684  * Return: The updated value of @v.              
2685  */                                              
2686 static __always_inline s64                       
2687 raw_atomic64_add_return(s64 i, atomic64_t *v)    
2688 {                                                
2689 #if defined(arch_atomic64_add_return)            
2690         return arch_atomic64_add_return(i, v)    
2691 #elif defined(arch_atomic64_add_return_relaxe    
2692         s64 ret;                                 
2693         __atomic_pre_full_fence();               
2694         ret = arch_atomic64_add_return_relaxe    
2695         __atomic_post_full_fence();              
2696         return ret;                              
2697 #else                                            
2698 #error "Unable to define raw_atomic64_add_ret    
2699 #endif                                           
2700 }                                                
2701                                                  
2702 /**                                              
2703  * raw_atomic64_add_return_acquire() - atomic    
2704  * @i: s64 value to add                          
2705  * @v: pointer to atomic64_t                     
2706  *                                               
2707  * Atomically updates @v to (@v + @i) with ac    
2708  *                                               
2709  * Safe to use in noinstr code; prefer atomic    
2710  *                                               
2711  * Return: The updated value of @v.              
2712  */                                              
2713 static __always_inline s64                       
2714 raw_atomic64_add_return_acquire(s64 i, atomic    
2715 {                                                
2716 #if defined(arch_atomic64_add_return_acquire)    
2717         return arch_atomic64_add_return_acqui    
2718 #elif defined(arch_atomic64_add_return_relaxe    
2719         s64 ret = arch_atomic64_add_return_re    
2720         __atomic_acquire_fence();                
2721         return ret;                              
2722 #elif defined(arch_atomic64_add_return)          
2723         return arch_atomic64_add_return(i, v)    
2724 #else                                            
2725 #error "Unable to define raw_atomic64_add_ret    
2726 #endif                                           
2727 }                                                
2728                                                  
2729 /**                                              
2730  * raw_atomic64_add_return_release() - atomic    
2731  * @i: s64 value to add                          
2732  * @v: pointer to atomic64_t                     
2733  *                                               
2734  * Atomically updates @v to (@v + @i) with re    
2735  *                                               
2736  * Safe to use in noinstr code; prefer atomic    
2737  *                                               
2738  * Return: The updated value of @v.              
2739  */                                              
2740 static __always_inline s64                       
2741 raw_atomic64_add_return_release(s64 i, atomic    
2742 {                                                
2743 #if defined(arch_atomic64_add_return_release)    
2744         return arch_atomic64_add_return_relea    
2745 #elif defined(arch_atomic64_add_return_relaxe    
2746         __atomic_release_fence();                
2747         return arch_atomic64_add_return_relax    
2748 #elif defined(arch_atomic64_add_return)          
2749         return arch_atomic64_add_return(i, v)    
2750 #else                                            
2751 #error "Unable to define raw_atomic64_add_ret    
2752 #endif                                           
2753 }                                                
2754                                                  
2755 /**                                              
2756  * raw_atomic64_add_return_relaxed() - atomic    
2757  * @i: s64 value to add                          
2758  * @v: pointer to atomic64_t                     
2759  *                                               
2760  * Atomically updates @v to (@v + @i) with re    
2761  *                                               
2762  * Safe to use in noinstr code; prefer atomic    
2763  *                                               
2764  * Return: The updated value of @v.              
2765  */                                              
2766 static __always_inline s64                       
2767 raw_atomic64_add_return_relaxed(s64 i, atomic    
2768 {                                                
2769 #if defined(arch_atomic64_add_return_relaxed)    
2770         return arch_atomic64_add_return_relax    
2771 #elif defined(arch_atomic64_add_return)          
2772         return arch_atomic64_add_return(i, v)    
2773 #else                                            
2774 #error "Unable to define raw_atomic64_add_ret    
2775 #endif                                           
2776 }                                                
2777                                                  
2778 /**                                              
2779  * raw_atomic64_fetch_add() - atomic add with    
2780  * @i: s64 value to add                          
2781  * @v: pointer to atomic64_t                     
2782  *                                               
2783  * Atomically updates @v to (@v + @i) with fu    
2784  *                                               
2785  * Safe to use in noinstr code; prefer atomic    
2786  *                                               
2787  * Return: The original value of @v.             
2788  */                                              
2789 static __always_inline s64                       
2790 raw_atomic64_fetch_add(s64 i, atomic64_t *v)     
2791 {                                                
2792 #if defined(arch_atomic64_fetch_add)             
2793         return arch_atomic64_fetch_add(i, v);    
2794 #elif defined(arch_atomic64_fetch_add_relaxed    
2795         s64 ret;                                 
2796         __atomic_pre_full_fence();               
2797         ret = arch_atomic64_fetch_add_relaxed    
2798         __atomic_post_full_fence();              
2799         return ret;                              
2800 #else                                            
2801 #error "Unable to define raw_atomic64_fetch_a    
2802 #endif                                           
2803 }                                                
2804                                                  
2805 /**                                              
2806  * raw_atomic64_fetch_add_acquire() - atomic     
2807  * @i: s64 value to add                          
2808  * @v: pointer to atomic64_t                     
2809  *                                               
2810  * Atomically updates @v to (@v + @i) with ac    
2811  *                                               
2812  * Safe to use in noinstr code; prefer atomic    
2813  *                                               
2814  * Return: The original value of @v.             
2815  */                                              
2816 static __always_inline s64                       
2817 raw_atomic64_fetch_add_acquire(s64 i, atomic6    
2818 {                                                
2819 #if defined(arch_atomic64_fetch_add_acquire)     
2820         return arch_atomic64_fetch_add_acquir    
2821 #elif defined(arch_atomic64_fetch_add_relaxed    
2822         s64 ret = arch_atomic64_fetch_add_rel    
2823         __atomic_acquire_fence();                
2824         return ret;                              
2825 #elif defined(arch_atomic64_fetch_add)           
2826         return arch_atomic64_fetch_add(i, v);    
2827 #else                                            
2828 #error "Unable to define raw_atomic64_fetch_a    
2829 #endif                                           
2830 }                                                
2831                                                  
2832 /**                                              
2833  * raw_atomic64_fetch_add_release() - atomic     
2834  * @i: s64 value to add                          
2835  * @v: pointer to atomic64_t                     
2836  *                                               
2837  * Atomically updates @v to (@v + @i) with re    
2838  *                                               
2839  * Safe to use in noinstr code; prefer atomic    
2840  *                                               
2841  * Return: The original value of @v.             
2842  */                                              
2843 static __always_inline s64                       
2844 raw_atomic64_fetch_add_release(s64 i, atomic6    
2845 {                                                
2846 #if defined(arch_atomic64_fetch_add_release)     
2847         return arch_atomic64_fetch_add_releas    
2848 #elif defined(arch_atomic64_fetch_add_relaxed    
2849         __atomic_release_fence();                
2850         return arch_atomic64_fetch_add_relaxe    
2851 #elif defined(arch_atomic64_fetch_add)           
2852         return arch_atomic64_fetch_add(i, v);    
2853 #else                                            
2854 #error "Unable to define raw_atomic64_fetch_a    
2855 #endif                                           
2856 }                                                
2857                                                  
2858 /**                                              
2859  * raw_atomic64_fetch_add_relaxed() - atomic     
2860  * @i: s64 value to add                          
2861  * @v: pointer to atomic64_t                     
2862  *                                               
2863  * Atomically updates @v to (@v + @i) with re    
2864  *                                               
2865  * Safe to use in noinstr code; prefer atomic    
2866  *                                               
2867  * Return: The original value of @v.             
2868  */                                              
2869 static __always_inline s64                       
2870 raw_atomic64_fetch_add_relaxed(s64 i, atomic6    
2871 {                                                
2872 #if defined(arch_atomic64_fetch_add_relaxed)     
2873         return arch_atomic64_fetch_add_relaxe    
2874 #elif defined(arch_atomic64_fetch_add)           
2875         return arch_atomic64_fetch_add(i, v);    
2876 #else                                            
2877 #error "Unable to define raw_atomic64_fetch_a    
2878 #endif                                           
2879 }                                                
2880                                                  
2881 /**                                              
2882  * raw_atomic64_sub() - atomic subtract with     
2883  * @i: s64 value to subtract                     
2884  * @v: pointer to atomic64_t                     
2885  *                                               
2886  * Atomically updates @v to (@v - @i) with re    
2887  *                                               
2888  * Safe to use in noinstr code; prefer atomic    
2889  *                                               
2890  * Return: Nothing.                              
2891  */                                              
2892 static __always_inline void                      
2893 raw_atomic64_sub(s64 i, atomic64_t *v)           
2894 {                                                
2895         arch_atomic64_sub(i, v);                 
2896 }                                                
2897                                                  
2898 /**                                              
2899  * raw_atomic64_sub_return() - atomic subtrac    
2900  * @i: s64 value to subtract                     
2901  * @v: pointer to atomic64_t                     
2902  *                                               
2903  * Atomically updates @v to (@v - @i) with fu    
2904  *                                               
2905  * Safe to use in noinstr code; prefer atomic    
2906  *                                               
2907  * Return: The updated value of @v.              
2908  */                                              
2909 static __always_inline s64                       
2910 raw_atomic64_sub_return(s64 i, atomic64_t *v)    
2911 {                                                
2912 #if defined(arch_atomic64_sub_return)            
2913         return arch_atomic64_sub_return(i, v)    
2914 #elif defined(arch_atomic64_sub_return_relaxe    
2915         s64 ret;                                 
2916         __atomic_pre_full_fence();               
2917         ret = arch_atomic64_sub_return_relaxe    
2918         __atomic_post_full_fence();              
2919         return ret;                              
2920 #else                                            
2921 #error "Unable to define raw_atomic64_sub_ret    
2922 #endif                                           
2923 }                                                
2924                                                  
2925 /**                                              
2926  * raw_atomic64_sub_return_acquire() - atomic    
2927  * @i: s64 value to subtract                     
2928  * @v: pointer to atomic64_t                     
2929  *                                               
2930  * Atomically updates @v to (@v - @i) with ac    
2931  *                                               
2932  * Safe to use in noinstr code; prefer atomic    
2933  *                                               
2934  * Return: The updated value of @v.              
2935  */                                              
2936 static __always_inline s64                       
2937 raw_atomic64_sub_return_acquire(s64 i, atomic    
2938 {                                                
2939 #if defined(arch_atomic64_sub_return_acquire)    
2940         return arch_atomic64_sub_return_acqui    
2941 #elif defined(arch_atomic64_sub_return_relaxe    
2942         s64 ret = arch_atomic64_sub_return_re    
2943         __atomic_acquire_fence();                
2944         return ret;                              
2945 #elif defined(arch_atomic64_sub_return)          
2946         return arch_atomic64_sub_return(i, v)    
2947 #else                                            
2948 #error "Unable to define raw_atomic64_sub_ret    
2949 #endif                                           
2950 }                                                
2951                                                  
2952 /**                                              
2953  * raw_atomic64_sub_return_release() - atomic    
2954  * @i: s64 value to subtract                     
2955  * @v: pointer to atomic64_t                     
2956  *                                               
2957  * Atomically updates @v to (@v - @i) with re    
2958  *                                               
2959  * Safe to use in noinstr code; prefer atomic    
2960  *                                               
2961  * Return: The updated value of @v.              
2962  */                                              
2963 static __always_inline s64                       
2964 raw_atomic64_sub_return_release(s64 i, atomic    
2965 {                                                
2966 #if defined(arch_atomic64_sub_return_release)    
2967         return arch_atomic64_sub_return_relea    
2968 #elif defined(arch_atomic64_sub_return_relaxe    
2969         __atomic_release_fence();                
2970         return arch_atomic64_sub_return_relax    
2971 #elif defined(arch_atomic64_sub_return)          
2972         return arch_atomic64_sub_return(i, v)    
2973 #else                                            
2974 #error "Unable to define raw_atomic64_sub_ret    
2975 #endif                                           
2976 }                                                
2977                                                  
2978 /**                                              
2979  * raw_atomic64_sub_return_relaxed() - atomic    
2980  * @i: s64 value to subtract                     
2981  * @v: pointer to atomic64_t                     
2982  *                                               
2983  * Atomically updates @v to (@v - @i) with re    
2984  *                                               
2985  * Safe to use in noinstr code; prefer atomic    
2986  *                                               
2987  * Return: The updated value of @v.              
2988  */                                              
2989 static __always_inline s64                       
2990 raw_atomic64_sub_return_relaxed(s64 i, atomic    
2991 {                                                
2992 #if defined(arch_atomic64_sub_return_relaxed)    
2993         return arch_atomic64_sub_return_relax    
2994 #elif defined(arch_atomic64_sub_return)          
2995         return arch_atomic64_sub_return(i, v)    
2996 #else                                            
2997 #error "Unable to define raw_atomic64_sub_ret    
2998 #endif                                           
2999 }                                                
3000                                                  
3001 /**                                              
3002  * raw_atomic64_fetch_sub() - atomic subtract    
3003  * @i: s64 value to subtract                     
3004  * @v: pointer to atomic64_t                     
3005  *                                               
3006  * Atomically updates @v to (@v - @i) with fu    
3007  *                                               
3008  * Safe to use in noinstr code; prefer atomic    
3009  *                                               
3010  * Return: The original value of @v.             
3011  */                                              
3012 static __always_inline s64                       
3013 raw_atomic64_fetch_sub(s64 i, atomic64_t *v)     
3014 {                                                
3015 #if defined(arch_atomic64_fetch_sub)             
3016         return arch_atomic64_fetch_sub(i, v);    
3017 #elif defined(arch_atomic64_fetch_sub_relaxed    
3018         s64 ret;                                 
3019         __atomic_pre_full_fence();               
3020         ret = arch_atomic64_fetch_sub_relaxed    
3021         __atomic_post_full_fence();              
3022         return ret;                              
3023 #else                                            
3024 #error "Unable to define raw_atomic64_fetch_s    
3025 #endif                                           
3026 }                                                
3027                                                  
3028 /**                                              
3029  * raw_atomic64_fetch_sub_acquire() - atomic     
3030  * @i: s64 value to subtract                     
3031  * @v: pointer to atomic64_t                     
3032  *                                               
3033  * Atomically updates @v to (@v - @i) with ac    
3034  *                                               
3035  * Safe to use in noinstr code; prefer atomic    
3036  *                                               
3037  * Return: The original value of @v.             
3038  */                                              
3039 static __always_inline s64                       
3040 raw_atomic64_fetch_sub_acquire(s64 i, atomic6    
3041 {                                                
3042 #if defined(arch_atomic64_fetch_sub_acquire)     
3043         return arch_atomic64_fetch_sub_acquir    
3044 #elif defined(arch_atomic64_fetch_sub_relaxed    
3045         s64 ret = arch_atomic64_fetch_sub_rel    
3046         __atomic_acquire_fence();                
3047         return ret;                              
3048 #elif defined(arch_atomic64_fetch_sub)           
3049         return arch_atomic64_fetch_sub(i, v);    
3050 #else                                            
3051 #error "Unable to define raw_atomic64_fetch_s    
3052 #endif                                           
3053 }                                                
3054                                                  
3055 /**                                              
3056  * raw_atomic64_fetch_sub_release() - atomic     
3057  * @i: s64 value to subtract                     
3058  * @v: pointer to atomic64_t                     
3059  *                                               
3060  * Atomically updates @v to (@v - @i) with re    
3061  *                                               
3062  * Safe to use in noinstr code; prefer atomic    
3063  *                                               
3064  * Return: The original value of @v.             
3065  */                                              
3066 static __always_inline s64                       
3067 raw_atomic64_fetch_sub_release(s64 i, atomic6    
3068 {                                                
3069 #if defined(arch_atomic64_fetch_sub_release)     
3070         return arch_atomic64_fetch_sub_releas    
3071 #elif defined(arch_atomic64_fetch_sub_relaxed    
3072         __atomic_release_fence();                
3073         return arch_atomic64_fetch_sub_relaxe    
3074 #elif defined(arch_atomic64_fetch_sub)           
3075         return arch_atomic64_fetch_sub(i, v);    
3076 #else                                            
3077 #error "Unable to define raw_atomic64_fetch_s    
3078 #endif                                           
3079 }                                                
3080                                                  
3081 /**                                              
3082  * raw_atomic64_fetch_sub_relaxed() - atomic     
3083  * @i: s64 value to subtract                     
3084  * @v: pointer to atomic64_t                     
3085  *                                               
3086  * Atomically updates @v to (@v - @i) with re    
3087  *                                               
3088  * Safe to use in noinstr code; prefer atomic    
3089  *                                               
3090  * Return: The original value of @v.             
3091  */                                              
3092 static __always_inline s64                       
3093 raw_atomic64_fetch_sub_relaxed(s64 i, atomic6    
3094 {                                                
3095 #if defined(arch_atomic64_fetch_sub_relaxed)     
3096         return arch_atomic64_fetch_sub_relaxe    
3097 #elif defined(arch_atomic64_fetch_sub)           
3098         return arch_atomic64_fetch_sub(i, v);    
3099 #else                                            
3100 #error "Unable to define raw_atomic64_fetch_s    
3101 #endif                                           
3102 }                                                
3103                                                  
3104 /**                                              
3105  * raw_atomic64_inc() - atomic increment with    
3106  * @v: pointer to atomic64_t                     
3107  *                                               
3108  * Atomically updates @v to (@v + 1) with rel    
3109  *                                               
3110  * Safe to use in noinstr code; prefer atomic    
3111  *                                               
3112  * Return: Nothing.                              
3113  */                                              
3114 static __always_inline void                      
3115 raw_atomic64_inc(atomic64_t *v)                  
3116 {                                                
3117 #if defined(arch_atomic64_inc)                   
3118         arch_atomic64_inc(v);                    
3119 #else                                            
3120         raw_atomic64_add(1, v);                  
3121 #endif                                           
3122 }                                                
3123                                                  
3124 /**                                              
3125  * raw_atomic64_inc_return() - atomic increme    
3126  * @v: pointer to atomic64_t                     
3127  *                                               
3128  * Atomically updates @v to (@v + 1) with ful    
3129  *                                               
3130  * Safe to use in noinstr code; prefer atomic    
3131  *                                               
3132  * Return: The updated value of @v.              
3133  */                                              
3134 static __always_inline s64                       
3135 raw_atomic64_inc_return(atomic64_t *v)           
3136 {                                                
3137 #if defined(arch_atomic64_inc_return)            
3138         return arch_atomic64_inc_return(v);      
3139 #elif defined(arch_atomic64_inc_return_relaxe    
3140         s64 ret;                                 
3141         __atomic_pre_full_fence();               
3142         ret = arch_atomic64_inc_return_relaxe    
3143         __atomic_post_full_fence();              
3144         return ret;                              
3145 #else                                            
3146         return raw_atomic64_add_return(1, v);    
3147 #endif                                           
3148 }                                                
3149                                                  
3150 /**                                              
3151  * raw_atomic64_inc_return_acquire() - atomic    
3152  * @v: pointer to atomic64_t                     
3153  *                                               
3154  * Atomically updates @v to (@v + 1) with acq    
3155  *                                               
3156  * Safe to use in noinstr code; prefer atomic    
3157  *                                               
3158  * Return: The updated value of @v.              
3159  */                                              
3160 static __always_inline s64                       
3161 raw_atomic64_inc_return_acquire(atomic64_t *v    
3162 {                                                
3163 #if defined(arch_atomic64_inc_return_acquire)    
3164         return arch_atomic64_inc_return_acqui    
3165 #elif defined(arch_atomic64_inc_return_relaxe    
3166         s64 ret = arch_atomic64_inc_return_re    
3167         __atomic_acquire_fence();                
3168         return ret;                              
3169 #elif defined(arch_atomic64_inc_return)          
3170         return arch_atomic64_inc_return(v);      
3171 #else                                            
3172         return raw_atomic64_add_return_acquir    
3173 #endif                                           
3174 }                                                
3175                                                  
3176 /**                                              
3177  * raw_atomic64_inc_return_release() - atomic    
3178  * @v: pointer to atomic64_t                     
3179  *                                               
3180  * Atomically updates @v to (@v + 1) with rel    
3181  *                                               
3182  * Safe to use in noinstr code; prefer atomic    
3183  *                                               
3184  * Return: The updated value of @v.              
3185  */                                              
3186 static __always_inline s64                       
3187 raw_atomic64_inc_return_release(atomic64_t *v    
3188 {                                                
3189 #if defined(arch_atomic64_inc_return_release)    
3190         return arch_atomic64_inc_return_relea    
3191 #elif defined(arch_atomic64_inc_return_relaxe    
3192         __atomic_release_fence();                
3193         return arch_atomic64_inc_return_relax    
3194 #elif defined(arch_atomic64_inc_return)          
3195         return arch_atomic64_inc_return(v);      
3196 #else                                            
3197         return raw_atomic64_add_return_releas    
3198 #endif                                           
3199 }                                                
3200                                                  
3201 /**                                              
3202  * raw_atomic64_inc_return_relaxed() - atomic    
3203  * @v: pointer to atomic64_t                     
3204  *                                               
3205  * Atomically updates @v to (@v + 1) with rel    
3206  *                                               
3207  * Safe to use in noinstr code; prefer atomic    
3208  *                                               
3209  * Return: The updated value of @v.              
3210  */                                              
3211 static __always_inline s64                       
3212 raw_atomic64_inc_return_relaxed(atomic64_t *v    
3213 {                                                
3214 #if defined(arch_atomic64_inc_return_relaxed)    
3215         return arch_atomic64_inc_return_relax    
3216 #elif defined(arch_atomic64_inc_return)          
3217         return arch_atomic64_inc_return(v);      
3218 #else                                            
3219         return raw_atomic64_add_return_relaxe    
3220 #endif                                           
3221 }                                                
3222                                                  
3223 /**                                              
3224  * raw_atomic64_fetch_inc() - atomic incremen    
3225  * @v: pointer to atomic64_t                     
3226  *                                               
3227  * Atomically updates @v to (@v + 1) with ful    
3228  *                                               
3229  * Safe to use in noinstr code; prefer atomic    
3230  *                                               
3231  * Return: The original value of @v.             
3232  */                                              
3233 static __always_inline s64                       
3234 raw_atomic64_fetch_inc(atomic64_t *v)            
3235 {                                                
3236 #if defined(arch_atomic64_fetch_inc)             
3237         return arch_atomic64_fetch_inc(v);       
3238 #elif defined(arch_atomic64_fetch_inc_relaxed    
3239         s64 ret;                                 
3240         __atomic_pre_full_fence();               
3241         ret = arch_atomic64_fetch_inc_relaxed    
3242         __atomic_post_full_fence();              
3243         return ret;                              
3244 #else                                            
3245         return raw_atomic64_fetch_add(1, v);     
3246 #endif                                           
3247 }                                                
3248                                                  
3249 /**                                              
3250  * raw_atomic64_fetch_inc_acquire() - atomic     
3251  * @v: pointer to atomic64_t                     
3252  *                                               
3253  * Atomically updates @v to (@v + 1) with acq    
3254  *                                               
3255  * Safe to use in noinstr code; prefer atomic    
3256  *                                               
3257  * Return: The original value of @v.             
3258  */                                              
3259 static __always_inline s64                       
3260 raw_atomic64_fetch_inc_acquire(atomic64_t *v)    
3261 {                                                
3262 #if defined(arch_atomic64_fetch_inc_acquire)     
3263         return arch_atomic64_fetch_inc_acquir    
3264 #elif defined(arch_atomic64_fetch_inc_relaxed    
3265         s64 ret = arch_atomic64_fetch_inc_rel    
3266         __atomic_acquire_fence();                
3267         return ret;                              
3268 #elif defined(arch_atomic64_fetch_inc)           
3269         return arch_atomic64_fetch_inc(v);       
3270 #else                                            
3271         return raw_atomic64_fetch_add_acquire    
3272 #endif                                           
3273 }                                                
3274                                                  
3275 /**                                              
3276  * raw_atomic64_fetch_inc_release() - atomic     
3277  * @v: pointer to atomic64_t                     
3278  *                                               
3279  * Atomically updates @v to (@v + 1) with rel    
3280  *                                               
3281  * Safe to use in noinstr code; prefer atomic    
3282  *                                               
3283  * Return: The original value of @v.             
3284  */                                              
3285 static __always_inline s64                       
3286 raw_atomic64_fetch_inc_release(atomic64_t *v)    
3287 {                                                
3288 #if defined(arch_atomic64_fetch_inc_release)     
3289         return arch_atomic64_fetch_inc_releas    
3290 #elif defined(arch_atomic64_fetch_inc_relaxed    
3291         __atomic_release_fence();                
3292         return arch_atomic64_fetch_inc_relaxe    
3293 #elif defined(arch_atomic64_fetch_inc)           
3294         return arch_atomic64_fetch_inc(v);       
3295 #else                                            
3296         return raw_atomic64_fetch_add_release    
3297 #endif                                           
3298 }                                                
3299                                                  
3300 /**                                              
3301  * raw_atomic64_fetch_inc_relaxed() - atomic     
3302  * @v: pointer to atomic64_t                     
3303  *                                               
3304  * Atomically updates @v to (@v + 1) with rel    
3305  *                                               
3306  * Safe to use in noinstr code; prefer atomic    
3307  *                                               
3308  * Return: The original value of @v.             
3309  */                                              
3310 static __always_inline s64                       
3311 raw_atomic64_fetch_inc_relaxed(atomic64_t *v)    
3312 {                                                
3313 #if defined(arch_atomic64_fetch_inc_relaxed)     
3314         return arch_atomic64_fetch_inc_relaxe    
3315 #elif defined(arch_atomic64_fetch_inc)           
3316         return arch_atomic64_fetch_inc(v);       
3317 #else                                            
3318         return raw_atomic64_fetch_add_relaxed    
3319 #endif                                           
3320 }                                                
3321                                                  
3322 /**                                              
3323  * raw_atomic64_dec() - atomic decrement with    
3324  * @v: pointer to atomic64_t                     
3325  *                                               
3326  * Atomically updates @v to (@v - 1) with rel    
3327  *                                               
3328  * Safe to use in noinstr code; prefer atomic    
3329  *                                               
3330  * Return: Nothing.                              
3331  */                                              
3332 static __always_inline void                      
3333 raw_atomic64_dec(atomic64_t *v)                  
3334 {                                                
3335 #if defined(arch_atomic64_dec)                   
3336         arch_atomic64_dec(v);                    
3337 #else                                            
3338         raw_atomic64_sub(1, v);                  
3339 #endif                                           
3340 }                                                
3341                                                  
3342 /**                                              
3343  * raw_atomic64_dec_return() - atomic decreme    
3344  * @v: pointer to atomic64_t                     
3345  *                                               
3346  * Atomically updates @v to (@v - 1) with ful    
3347  *                                               
3348  * Safe to use in noinstr code; prefer atomic    
3349  *                                               
3350  * Return: The updated value of @v.              
3351  */                                              
3352 static __always_inline s64                       
3353 raw_atomic64_dec_return(atomic64_t *v)           
3354 {                                                
3355 #if defined(arch_atomic64_dec_return)            
3356         return arch_atomic64_dec_return(v);      
3357 #elif defined(arch_atomic64_dec_return_relaxe    
3358         s64 ret;                                 
3359         __atomic_pre_full_fence();               
3360         ret = arch_atomic64_dec_return_relaxe    
3361         __atomic_post_full_fence();              
3362         return ret;                              
3363 #else                                            
3364         return raw_atomic64_sub_return(1, v);    
3365 #endif                                           
3366 }                                                
3367                                                  
3368 /**                                              
3369  * raw_atomic64_dec_return_acquire() - atomic    
3370  * @v: pointer to atomic64_t                     
3371  *                                               
3372  * Atomically updates @v to (@v - 1) with acq    
3373  *                                               
3374  * Safe to use in noinstr code; prefer atomic    
3375  *                                               
3376  * Return: The updated value of @v.              
3377  */                                              
3378 static __always_inline s64                       
3379 raw_atomic64_dec_return_acquire(atomic64_t *v    
3380 {                                                
3381 #if defined(arch_atomic64_dec_return_acquire)    
3382         return arch_atomic64_dec_return_acqui    
3383 #elif defined(arch_atomic64_dec_return_relaxe    
3384         s64 ret = arch_atomic64_dec_return_re    
3385         __atomic_acquire_fence();                
3386         return ret;                              
3387 #elif defined(arch_atomic64_dec_return)          
3388         return arch_atomic64_dec_return(v);      
3389 #else                                            
3390         return raw_atomic64_sub_return_acquir    
3391 #endif                                           
3392 }                                                
3393                                                  
3394 /**                                              
3395  * raw_atomic64_dec_return_release() - atomic    
3396  * @v: pointer to atomic64_t                     
3397  *                                               
3398  * Atomically updates @v to (@v - 1) with rel    
3399  *                                               
3400  * Safe to use in noinstr code; prefer atomic    
3401  *                                               
3402  * Return: The updated value of @v.              
3403  */                                              
3404 static __always_inline s64                       
3405 raw_atomic64_dec_return_release(atomic64_t *v    
3406 {                                                
3407 #if defined(arch_atomic64_dec_return_release)    
3408         return arch_atomic64_dec_return_relea    
3409 #elif defined(arch_atomic64_dec_return_relaxe    
3410         __atomic_release_fence();                
3411         return arch_atomic64_dec_return_relax    
3412 #elif defined(arch_atomic64_dec_return)          
3413         return arch_atomic64_dec_return(v);      
3414 #else                                            
3415         return raw_atomic64_sub_return_releas    
3416 #endif                                           
3417 }                                                
3418                                                  
3419 /**                                              
3420  * raw_atomic64_dec_return_relaxed() - atomic    
3421  * @v: pointer to atomic64_t                     
3422  *                                               
3423  * Atomically updates @v to (@v - 1) with rel    
3424  *                                               
3425  * Safe to use in noinstr code; prefer atomic    
3426  *                                               
3427  * Return: The updated value of @v.              
3428  */                                              
3429 static __always_inline s64                       
3430 raw_atomic64_dec_return_relaxed(atomic64_t *v    
3431 {                                                
3432 #if defined(arch_atomic64_dec_return_relaxed)    
3433         return arch_atomic64_dec_return_relax    
3434 #elif defined(arch_atomic64_dec_return)          
3435         return arch_atomic64_dec_return(v);      
3436 #else                                            
3437         return raw_atomic64_sub_return_relaxe    
3438 #endif                                           
3439 }                                                
3440                                                  
3441 /**                                              
3442  * raw_atomic64_fetch_dec() - atomic decremen    
3443  * @v: pointer to atomic64_t                     
3444  *                                               
3445  * Atomically updates @v to (@v - 1) with ful    
3446  *                                               
3447  * Safe to use in noinstr code; prefer atomic    
3448  *                                               
3449  * Return: The original value of @v.             
3450  */                                              
3451 static __always_inline s64                       
3452 raw_atomic64_fetch_dec(atomic64_t *v)            
3453 {                                                
3454 #if defined(arch_atomic64_fetch_dec)             
3455         return arch_atomic64_fetch_dec(v);       
3456 #elif defined(arch_atomic64_fetch_dec_relaxed    
3457         s64 ret;                                 
3458         __atomic_pre_full_fence();               
3459         ret = arch_atomic64_fetch_dec_relaxed    
3460         __atomic_post_full_fence();              
3461         return ret;                              
3462 #else                                            
3463         return raw_atomic64_fetch_sub(1, v);     
3464 #endif                                           
3465 }                                                
3466                                                  
3467 /**                                              
3468  * raw_atomic64_fetch_dec_acquire() - atomic     
3469  * @v: pointer to atomic64_t                     
3470  *                                               
3471  * Atomically updates @v to (@v - 1) with acq    
3472  *                                               
3473  * Safe to use in noinstr code; prefer atomic    
3474  *                                               
3475  * Return: The original value of @v.             
3476  */                                              
3477 static __always_inline s64                       
3478 raw_atomic64_fetch_dec_acquire(atomic64_t *v)    
3479 {                                                
3480 #if defined(arch_atomic64_fetch_dec_acquire)     
3481         return arch_atomic64_fetch_dec_acquir    
3482 #elif defined(arch_atomic64_fetch_dec_relaxed    
3483         s64 ret = arch_atomic64_fetch_dec_rel    
3484         __atomic_acquire_fence();                
3485         return ret;                              
3486 #elif defined(arch_atomic64_fetch_dec)           
3487         return arch_atomic64_fetch_dec(v);       
3488 #else                                            
3489         return raw_atomic64_fetch_sub_acquire    
3490 #endif                                           
3491 }                                                
3492                                                  
3493 /**                                              
3494  * raw_atomic64_fetch_dec_release() - atomic     
3495  * @v: pointer to atomic64_t                     
3496  *                                               
3497  * Atomically updates @v to (@v - 1) with rel    
3498  *                                               
3499  * Safe to use in noinstr code; prefer atomic    
3500  *                                               
3501  * Return: The original value of @v.             
3502  */                                              
3503 static __always_inline s64                       
3504 raw_atomic64_fetch_dec_release(atomic64_t *v)    
3505 {                                                
3506 #if defined(arch_atomic64_fetch_dec_release)     
3507         return arch_atomic64_fetch_dec_releas    
3508 #elif defined(arch_atomic64_fetch_dec_relaxed    
3509         __atomic_release_fence();                
3510         return arch_atomic64_fetch_dec_relaxe    
3511 #elif defined(arch_atomic64_fetch_dec)           
3512         return arch_atomic64_fetch_dec(v);       
3513 #else                                            
3514         return raw_atomic64_fetch_sub_release    
3515 #endif                                           
3516 }                                                
3517                                                  
3518 /**                                              
3519  * raw_atomic64_fetch_dec_relaxed() - atomic     
3520  * @v: pointer to atomic64_t                     
3521  *                                               
3522  * Atomically updates @v to (@v - 1) with rel    
3523  *                                               
3524  * Safe to use in noinstr code; prefer atomic    
3525  *                                               
3526  * Return: The original value of @v.             
3527  */                                              
3528 static __always_inline s64                       
3529 raw_atomic64_fetch_dec_relaxed(atomic64_t *v)    
3530 {                                                
3531 #if defined(arch_atomic64_fetch_dec_relaxed)     
3532         return arch_atomic64_fetch_dec_relaxe    
3533 #elif defined(arch_atomic64_fetch_dec)           
3534         return arch_atomic64_fetch_dec(v);       
3535 #else                                            
3536         return raw_atomic64_fetch_sub_relaxed    
3537 #endif                                           
3538 }                                                
3539                                                  
3540 /**                                              
3541  * raw_atomic64_and() - atomic bitwise AND wi    
3542  * @i: s64 value                                 
3543  * @v: pointer to atomic64_t                     
3544  *                                               
3545  * Atomically updates @v to (@v & @i) with re    
3546  *                                               
3547  * Safe to use in noinstr code; prefer atomic    
3548  *                                               
3549  * Return: Nothing.                              
3550  */                                              
3551 static __always_inline void                      
3552 raw_atomic64_and(s64 i, atomic64_t *v)           
3553 {                                                
3554         arch_atomic64_and(i, v);                 
3555 }                                                
3556                                                  
3557 /**                                              
3558  * raw_atomic64_fetch_and() - atomic bitwise     
3559  * @i: s64 value                                 
3560  * @v: pointer to atomic64_t                     
3561  *                                               
3562  * Atomically updates @v to (@v & @i) with fu    
3563  *                                               
3564  * Safe to use in noinstr code; prefer atomic    
3565  *                                               
3566  * Return: The original value of @v.             
3567  */                                              
3568 static __always_inline s64                       
3569 raw_atomic64_fetch_and(s64 i, atomic64_t *v)     
3570 {                                                
3571 #if defined(arch_atomic64_fetch_and)             
3572         return arch_atomic64_fetch_and(i, v);    
3573 #elif defined(arch_atomic64_fetch_and_relaxed    
3574         s64 ret;                                 
3575         __atomic_pre_full_fence();               
3576         ret = arch_atomic64_fetch_and_relaxed    
3577         __atomic_post_full_fence();              
3578         return ret;                              
3579 #else                                            
3580 #error "Unable to define raw_atomic64_fetch_a    
3581 #endif                                           
3582 }                                                
3583                                                  
3584 /**                                              
3585  * raw_atomic64_fetch_and_acquire() - atomic     
3586  * @i: s64 value                                 
3587  * @v: pointer to atomic64_t                     
3588  *                                               
3589  * Atomically updates @v to (@v & @i) with ac    
3590  *                                               
3591  * Safe to use in noinstr code; prefer atomic    
3592  *                                               
3593  * Return: The original value of @v.             
3594  */                                              
3595 static __always_inline s64                       
3596 raw_atomic64_fetch_and_acquire(s64 i, atomic6    
3597 {                                                
3598 #if defined(arch_atomic64_fetch_and_acquire)     
3599         return arch_atomic64_fetch_and_acquir    
3600 #elif defined(arch_atomic64_fetch_and_relaxed    
3601         s64 ret = arch_atomic64_fetch_and_rel    
3602         __atomic_acquire_fence();                
3603         return ret;                              
3604 #elif defined(arch_atomic64_fetch_and)           
3605         return arch_atomic64_fetch_and(i, v);    
3606 #else                                            
3607 #error "Unable to define raw_atomic64_fetch_a    
3608 #endif                                           
3609 }                                                
3610                                                  
3611 /**                                              
3612  * raw_atomic64_fetch_and_release() - atomic     
3613  * @i: s64 value                                 
3614  * @v: pointer to atomic64_t                     
3615  *                                               
3616  * Atomically updates @v to (@v & @i) with re    
3617  *                                               
3618  * Safe to use in noinstr code; prefer atomic    
3619  *                                               
3620  * Return: The original value of @v.             
3621  */                                              
3622 static __always_inline s64                       
3623 raw_atomic64_fetch_and_release(s64 i, atomic6    
3624 {                                                
3625 #if defined(arch_atomic64_fetch_and_release)     
3626         return arch_atomic64_fetch_and_releas    
3627 #elif defined(arch_atomic64_fetch_and_relaxed    
3628         __atomic_release_fence();                
3629         return arch_atomic64_fetch_and_relaxe    
3630 #elif defined(arch_atomic64_fetch_and)           
3631         return arch_atomic64_fetch_and(i, v);    
3632 #else                                            
3633 #error "Unable to define raw_atomic64_fetch_a    
3634 #endif                                           
3635 }                                                
3636                                                  
3637 /**                                              
3638  * raw_atomic64_fetch_and_relaxed() - atomic     
3639  * @i: s64 value                                 
3640  * @v: pointer to atomic64_t                     
3641  *                                               
3642  * Atomically updates @v to (@v & @i) with re    
3643  *                                               
3644  * Safe to use in noinstr code; prefer atomic    
3645  *                                               
3646  * Return: The original value of @v.             
3647  */                                              
3648 static __always_inline s64                       
3649 raw_atomic64_fetch_and_relaxed(s64 i, atomic6    
3650 {                                                
3651 #if defined(arch_atomic64_fetch_and_relaxed)     
3652         return arch_atomic64_fetch_and_relaxe    
3653 #elif defined(arch_atomic64_fetch_and)           
3654         return arch_atomic64_fetch_and(i, v);    
3655 #else                                            
3656 #error "Unable to define raw_atomic64_fetch_a    
3657 #endif                                           
3658 }                                                
3659                                                  
3660 /**                                              
3661  * raw_atomic64_andnot() - atomic bitwise AND    
3662  * @i: s64 value                                 
3663  * @v: pointer to atomic64_t                     
3664  *                                               
3665  * Atomically updates @v to (@v & ~@i) with r    
3666  *                                               
3667  * Safe to use in noinstr code; prefer atomic    
3668  *                                               
3669  * Return: Nothing.                              
3670  */                                              
3671 static __always_inline void                      
3672 raw_atomic64_andnot(s64 i, atomic64_t *v)        
3673 {                                                
3674 #if defined(arch_atomic64_andnot)                
3675         arch_atomic64_andnot(i, v);              
3676 #else                                            
3677         raw_atomic64_and(~i, v);                 
3678 #endif                                           
3679 }                                                
3680                                                  
3681 /**                                              
3682  * raw_atomic64_fetch_andnot() - atomic bitwi    
3683  * @i: s64 value                                 
3684  * @v: pointer to atomic64_t                     
3685  *                                               
3686  * Atomically updates @v to (@v & ~@i) with f    
3687  *                                               
3688  * Safe to use in noinstr code; prefer atomic    
3689  *                                               
3690  * Return: The original value of @v.             
3691  */                                              
3692 static __always_inline s64                       
3693 raw_atomic64_fetch_andnot(s64 i, atomic64_t *    
3694 {                                                
3695 #if defined(arch_atomic64_fetch_andnot)          
3696         return arch_atomic64_fetch_andnot(i,     
3697 #elif defined(arch_atomic64_fetch_andnot_rela    
3698         s64 ret;                                 
3699         __atomic_pre_full_fence();               
3700         ret = arch_atomic64_fetch_andnot_rela    
3701         __atomic_post_full_fence();              
3702         return ret;                              
3703 #else                                            
3704         return raw_atomic64_fetch_and(~i, v);    
3705 #endif                                           
3706 }                                                
3707                                                  
3708 /**                                              
3709  * raw_atomic64_fetch_andnot_acquire() - atom    
3710  * @i: s64 value                                 
3711  * @v: pointer to atomic64_t                     
3712  *                                               
3713  * Atomically updates @v to (@v & ~@i) with a    
3714  *                                               
3715  * Safe to use in noinstr code; prefer atomic    
3716  *                                               
3717  * Return: The original value of @v.             
3718  */                                              
3719 static __always_inline s64                       
3720 raw_atomic64_fetch_andnot_acquire(s64 i, atom    
3721 {                                                
3722 #if defined(arch_atomic64_fetch_andnot_acquir    
3723         return arch_atomic64_fetch_andnot_acq    
3724 #elif defined(arch_atomic64_fetch_andnot_rela    
3725         s64 ret = arch_atomic64_fetch_andnot_    
3726         __atomic_acquire_fence();                
3727         return ret;                              
3728 #elif defined(arch_atomic64_fetch_andnot)        
3729         return arch_atomic64_fetch_andnot(i,     
3730 #else                                            
3731         return raw_atomic64_fetch_and_acquire    
3732 #endif                                           
3733 }                                                
3734                                                  
3735 /**                                              
3736  * raw_atomic64_fetch_andnot_release() - atom    
3737  * @i: s64 value                                 
3738  * @v: pointer to atomic64_t                     
3739  *                                               
3740  * Atomically updates @v to (@v & ~@i) with r    
3741  *                                               
3742  * Safe to use in noinstr code; prefer atomic    
3743  *                                               
3744  * Return: The original value of @v.             
3745  */                                              
3746 static __always_inline s64                       
3747 raw_atomic64_fetch_andnot_release(s64 i, atom    
3748 {                                                
3749 #if defined(arch_atomic64_fetch_andnot_releas    
3750         return arch_atomic64_fetch_andnot_rel    
3751 #elif defined(arch_atomic64_fetch_andnot_rela    
3752         __atomic_release_fence();                
3753         return arch_atomic64_fetch_andnot_rel    
3754 #elif defined(arch_atomic64_fetch_andnot)        
3755         return arch_atomic64_fetch_andnot(i,     
3756 #else                                            
3757         return raw_atomic64_fetch_and_release    
3758 #endif                                           
3759 }                                                
3760                                                  
3761 /**                                              
3762  * raw_atomic64_fetch_andnot_relaxed() - atom    
3763  * @i: s64 value                                 
3764  * @v: pointer to atomic64_t                     
3765  *                                               
3766  * Atomically updates @v to (@v & ~@i) with r    
3767  *                                               
3768  * Safe to use in noinstr code; prefer atomic    
3769  *                                               
3770  * Return: The original value of @v.             
3771  */                                              
3772 static __always_inline s64                       
3773 raw_atomic64_fetch_andnot_relaxed(s64 i, atom    
3774 {                                                
3775 #if defined(arch_atomic64_fetch_andnot_relaxe    
3776         return arch_atomic64_fetch_andnot_rel    
3777 #elif defined(arch_atomic64_fetch_andnot)        
3778         return arch_atomic64_fetch_andnot(i,     
3779 #else                                            
3780         return raw_atomic64_fetch_and_relaxed    
3781 #endif                                           
3782 }                                                
3783                                                  
3784 /**                                              
3785  * raw_atomic64_or() - atomic bitwise OR with    
3786  * @i: s64 value                                 
3787  * @v: pointer to atomic64_t                     
3788  *                                               
3789  * Atomically updates @v to (@v | @i) with re    
3790  *                                               
3791  * Safe to use in noinstr code; prefer atomic    
3792  *                                               
3793  * Return: Nothing.                              
3794  */                                              
3795 static __always_inline void                      
3796 raw_atomic64_or(s64 i, atomic64_t *v)            
3797 {                                                
3798         arch_atomic64_or(i, v);                  
3799 }                                                
3800                                                  
3801 /**                                              
3802  * raw_atomic64_fetch_or() - atomic bitwise O    
3803  * @i: s64 value                                 
3804  * @v: pointer to atomic64_t                     
3805  *                                               
3806  * Atomically updates @v to (@v | @i) with fu    
3807  *                                               
3808  * Safe to use in noinstr code; prefer atomic    
3809  *                                               
3810  * Return: The original value of @v.             
3811  */                                              
3812 static __always_inline s64                       
3813 raw_atomic64_fetch_or(s64 i, atomic64_t *v)      
3814 {                                                
3815 #if defined(arch_atomic64_fetch_or)              
3816         return arch_atomic64_fetch_or(i, v);     
3817 #elif defined(arch_atomic64_fetch_or_relaxed)    
3818         s64 ret;                                 
3819         __atomic_pre_full_fence();               
3820         ret = arch_atomic64_fetch_or_relaxed(    
3821         __atomic_post_full_fence();              
3822         return ret;                              
3823 #else                                            
3824 #error "Unable to define raw_atomic64_fetch_o    
3825 #endif                                           
3826 }                                                
3827                                                  
3828 /**                                              
3829  * raw_atomic64_fetch_or_acquire() - atomic b    
3830  * @i: s64 value                                 
3831  * @v: pointer to atomic64_t                     
3832  *                                               
3833  * Atomically updates @v to (@v | @i) with ac    
3834  *                                               
3835  * Safe to use in noinstr code; prefer atomic    
3836  *                                               
3837  * Return: The original value of @v.             
3838  */                                              
3839 static __always_inline s64                       
3840 raw_atomic64_fetch_or_acquire(s64 i, atomic64    
3841 {                                                
3842 #if defined(arch_atomic64_fetch_or_acquire)      
3843         return arch_atomic64_fetch_or_acquire    
3844 #elif defined(arch_atomic64_fetch_or_relaxed)    
3845         s64 ret = arch_atomic64_fetch_or_rela    
3846         __atomic_acquire_fence();                
3847         return ret;                              
3848 #elif defined(arch_atomic64_fetch_or)            
3849         return arch_atomic64_fetch_or(i, v);     
3850 #else                                            
3851 #error "Unable to define raw_atomic64_fetch_o    
3852 #endif                                           
3853 }                                                
3854                                                  
3855 /**                                              
3856  * raw_atomic64_fetch_or_release() - atomic b    
3857  * @i: s64 value                                 
3858  * @v: pointer to atomic64_t                     
3859  *                                               
3860  * Atomically updates @v to (@v | @i) with re    
3861  *                                               
3862  * Safe to use in noinstr code; prefer atomic    
3863  *                                               
3864  * Return: The original value of @v.             
3865  */                                              
3866 static __always_inline s64                       
3867 raw_atomic64_fetch_or_release(s64 i, atomic64    
3868 {                                                
3869 #if defined(arch_atomic64_fetch_or_release)      
3870         return arch_atomic64_fetch_or_release    
3871 #elif defined(arch_atomic64_fetch_or_relaxed)    
3872         __atomic_release_fence();                
3873         return arch_atomic64_fetch_or_relaxed    
3874 #elif defined(arch_atomic64_fetch_or)            
3875         return arch_atomic64_fetch_or(i, v);     
3876 #else                                            
3877 #error "Unable to define raw_atomic64_fetch_o    
3878 #endif                                           
3879 }                                                
3880                                                  
3881 /**                                              
3882  * raw_atomic64_fetch_or_relaxed() - atomic b    
3883  * @i: s64 value                                 
3884  * @v: pointer to atomic64_t                     
3885  *                                               
3886  * Atomically updates @v to (@v | @i) with re    
3887  *                                               
3888  * Safe to use in noinstr code; prefer atomic    
3889  *                                               
3890  * Return: The original value of @v.             
3891  */                                              
3892 static __always_inline s64                       
3893 raw_atomic64_fetch_or_relaxed(s64 i, atomic64    
3894 {                                                
3895 #if defined(arch_atomic64_fetch_or_relaxed)      
3896         return arch_atomic64_fetch_or_relaxed    
3897 #elif defined(arch_atomic64_fetch_or)            
3898         return arch_atomic64_fetch_or(i, v);     
3899 #else                                            
3900 #error "Unable to define raw_atomic64_fetch_o    
3901 #endif                                           
3902 }                                                
3903                                                  
3904 /**                                              
3905  * raw_atomic64_xor() - atomic bitwise XOR wi    
3906  * @i: s64 value                                 
3907  * @v: pointer to atomic64_t                     
3908  *                                               
3909  * Atomically updates @v to (@v ^ @i) with re    
3910  *                                               
3911  * Safe to use in noinstr code; prefer atomic    
3912  *                                               
3913  * Return: Nothing.                              
3914  */                                              
3915 static __always_inline void                      
3916 raw_atomic64_xor(s64 i, atomic64_t *v)           
3917 {                                                
3918         arch_atomic64_xor(i, v);                 
3919 }                                                
3920                                                  
3921 /**                                              
3922  * raw_atomic64_fetch_xor() - atomic bitwise     
3923  * @i: s64 value                                 
3924  * @v: pointer to atomic64_t                     
3925  *                                               
3926  * Atomically updates @v to (@v ^ @i) with fu    
3927  *                                               
3928  * Safe to use in noinstr code; prefer atomic    
3929  *                                               
3930  * Return: The original value of @v.             
3931  */                                              
3932 static __always_inline s64                       
3933 raw_atomic64_fetch_xor(s64 i, atomic64_t *v)     
3934 {                                                
3935 #if defined(arch_atomic64_fetch_xor)             
3936         return arch_atomic64_fetch_xor(i, v);    
3937 #elif defined(arch_atomic64_fetch_xor_relaxed    
3938         s64 ret;                                 
3939         __atomic_pre_full_fence();               
3940         ret = arch_atomic64_fetch_xor_relaxed    
3941         __atomic_post_full_fence();              
3942         return ret;                              
3943 #else                                            
3944 #error "Unable to define raw_atomic64_fetch_x    
3945 #endif                                           
3946 }                                                
3947                                                  
3948 /**                                              
3949  * raw_atomic64_fetch_xor_acquire() - atomic     
3950  * @i: s64 value                                 
3951  * @v: pointer to atomic64_t                     
3952  *                                               
3953  * Atomically updates @v to (@v ^ @i) with ac    
3954  *                                               
3955  * Safe to use in noinstr code; prefer atomic    
3956  *                                               
3957  * Return: The original value of @v.             
3958  */                                              
3959 static __always_inline s64                       
3960 raw_atomic64_fetch_xor_acquire(s64 i, atomic6    
3961 {                                                
3962 #if defined(arch_atomic64_fetch_xor_acquire)     
3963         return arch_atomic64_fetch_xor_acquir    
3964 #elif defined(arch_atomic64_fetch_xor_relaxed    
3965         s64 ret = arch_atomic64_fetch_xor_rel    
3966         __atomic_acquire_fence();                
3967         return ret;                              
3968 #elif defined(arch_atomic64_fetch_xor)           
3969         return arch_atomic64_fetch_xor(i, v);    
3970 #else                                            
3971 #error "Unable to define raw_atomic64_fetch_x    
3972 #endif                                           
3973 }                                                
3974                                                  
3975 /**                                              
3976  * raw_atomic64_fetch_xor_release() - atomic     
3977  * @i: s64 value                                 
3978  * @v: pointer to atomic64_t                     
3979  *                                               
3980  * Atomically updates @v to (@v ^ @i) with re    
3981  *                                               
3982  * Safe to use in noinstr code; prefer atomic    
3983  *                                               
3984  * Return: The original value of @v.             
3985  */                                              
3986 static __always_inline s64                       
3987 raw_atomic64_fetch_xor_release(s64 i, atomic6    
3988 {                                                
3989 #if defined(arch_atomic64_fetch_xor_release)     
3990         return arch_atomic64_fetch_xor_releas    
3991 #elif defined(arch_atomic64_fetch_xor_relaxed    
3992         __atomic_release_fence();                
3993         return arch_atomic64_fetch_xor_relaxe    
3994 #elif defined(arch_atomic64_fetch_xor)           
3995         return arch_atomic64_fetch_xor(i, v);    
3996 #else                                            
3997 #error "Unable to define raw_atomic64_fetch_x    
3998 #endif                                           
3999 }                                                
4000                                                  
4001 /**                                              
4002  * raw_atomic64_fetch_xor_relaxed() - atomic     
4003  * @i: s64 value                                 
4004  * @v: pointer to atomic64_t                     
4005  *                                               
4006  * Atomically updates @v to (@v ^ @i) with re    
4007  *                                               
4008  * Safe to use in noinstr code; prefer atomic    
4009  *                                               
4010  * Return: The original value of @v.             
4011  */                                              
4012 static __always_inline s64                       
4013 raw_atomic64_fetch_xor_relaxed(s64 i, atomic6    
4014 {                                                
4015 #if defined(arch_atomic64_fetch_xor_relaxed)     
4016         return arch_atomic64_fetch_xor_relaxe    
4017 #elif defined(arch_atomic64_fetch_xor)           
4018         return arch_atomic64_fetch_xor(i, v);    
4019 #else                                            
4020 #error "Unable to define raw_atomic64_fetch_x    
4021 #endif                                           
4022 }                                                
4023                                                  
4024 /**                                              
4025  * raw_atomic64_xchg() - atomic exchange with    
4026  * @v: pointer to atomic64_t                     
4027  * @new: s64 value to assign                     
4028  *                                               
4029  * Atomically updates @v to @new with full or    
4030  *                                               
4031  * Safe to use in noinstr code; prefer atomic    
4032  *                                               
4033  * Return: The original value of @v.             
4034  */                                              
4035 static __always_inline s64                       
4036 raw_atomic64_xchg(atomic64_t *v, s64 new)        
4037 {                                                
4038 #if defined(arch_atomic64_xchg)                  
4039         return arch_atomic64_xchg(v, new);       
4040 #elif defined(arch_atomic64_xchg_relaxed)        
4041         s64 ret;                                 
4042         __atomic_pre_full_fence();               
4043         ret = arch_atomic64_xchg_relaxed(v, n    
4044         __atomic_post_full_fence();              
4045         return ret;                              
4046 #else                                            
4047         return raw_xchg(&v->counter, new);       
4048 #endif                                           
4049 }                                                
4050                                                  
4051 /**                                              
4052  * raw_atomic64_xchg_acquire() - atomic excha    
4053  * @v: pointer to atomic64_t                     
4054  * @new: s64 value to assign                     
4055  *                                               
4056  * Atomically updates @v to @new with acquire    
4057  *                                               
4058  * Safe to use in noinstr code; prefer atomic    
4059  *                                               
4060  * Return: The original value of @v.             
4061  */                                              
4062 static __always_inline s64                       
4063 raw_atomic64_xchg_acquire(atomic64_t *v, s64     
4064 {                                                
4065 #if defined(arch_atomic64_xchg_acquire)          
4066         return arch_atomic64_xchg_acquire(v,     
4067 #elif defined(arch_atomic64_xchg_relaxed)        
4068         s64 ret = arch_atomic64_xchg_relaxed(    
4069         __atomic_acquire_fence();                
4070         return ret;                              
4071 #elif defined(arch_atomic64_xchg)                
4072         return arch_atomic64_xchg(v, new);       
4073 #else                                            
4074         return raw_xchg_acquire(&v->counter,     
4075 #endif                                           
4076 }                                                
4077                                                  
4078 /**                                              
4079  * raw_atomic64_xchg_release() - atomic excha    
4080  * @v: pointer to atomic64_t                     
4081  * @new: s64 value to assign                     
4082  *                                               
4083  * Atomically updates @v to @new with release    
4084  *                                               
4085  * Safe to use in noinstr code; prefer atomic    
4086  *                                               
4087  * Return: The original value of @v.             
4088  */                                              
4089 static __always_inline s64                       
4090 raw_atomic64_xchg_release(atomic64_t *v, s64     
4091 {                                                
4092 #if defined(arch_atomic64_xchg_release)          
4093         return arch_atomic64_xchg_release(v,     
4094 #elif defined(arch_atomic64_xchg_relaxed)        
4095         __atomic_release_fence();                
4096         return arch_atomic64_xchg_relaxed(v,     
4097 #elif defined(arch_atomic64_xchg)                
4098         return arch_atomic64_xchg(v, new);       
4099 #else                                            
4100         return raw_xchg_release(&v->counter,     
4101 #endif                                           
4102 }                                                
4103                                                  
4104 /**                                              
4105  * raw_atomic64_xchg_relaxed() - atomic excha    
4106  * @v: pointer to atomic64_t                     
4107  * @new: s64 value to assign                     
4108  *                                               
4109  * Atomically updates @v to @new with relaxed    
4110  *                                               
4111  * Safe to use in noinstr code; prefer atomic    
4112  *                                               
4113  * Return: The original value of @v.             
4114  */                                              
4115 static __always_inline s64                       
4116 raw_atomic64_xchg_relaxed(atomic64_t *v, s64     
4117 {                                                
4118 #if defined(arch_atomic64_xchg_relaxed)          
4119         return arch_atomic64_xchg_relaxed(v,     
4120 #elif defined(arch_atomic64_xchg)                
4121         return arch_atomic64_xchg(v, new);       
4122 #else                                            
4123         return raw_xchg_relaxed(&v->counter,     
4124 #endif                                           
4125 }                                                
4126                                                  
4127 /**                                              
4128  * raw_atomic64_cmpxchg() - atomic compare an    
4129  * @v: pointer to atomic64_t                     
4130  * @old: s64 value to compare with               
4131  * @new: s64 value to assign                     
4132  *                                               
4133  * If (@v == @old), atomically updates @v to     
4134  * Otherwise, @v is not modified and relaxed     
4135  *                                               
4136  * Safe to use in noinstr code; prefer atomic    
4137  *                                               
4138  * Return: The original value of @v.             
4139  */                                              
4140 static __always_inline s64                       
4141 raw_atomic64_cmpxchg(atomic64_t *v, s64 old,     
4142 {                                                
4143 #if defined(arch_atomic64_cmpxchg)               
4144         return arch_atomic64_cmpxchg(v, old,     
4145 #elif defined(arch_atomic64_cmpxchg_relaxed)     
4146         s64 ret;                                 
4147         __atomic_pre_full_fence();               
4148         ret = arch_atomic64_cmpxchg_relaxed(v    
4149         __atomic_post_full_fence();              
4150         return ret;                              
4151 #else                                            
4152         return raw_cmpxchg(&v->counter, old,     
4153 #endif                                           
4154 }                                                
4155                                                  
4156 /**                                              
4157  * raw_atomic64_cmpxchg_acquire() - atomic co    
4158  * @v: pointer to atomic64_t                     
4159  * @old: s64 value to compare with               
4160  * @new: s64 value to assign                     
4161  *                                               
4162  * If (@v == @old), atomically updates @v to     
4163  * Otherwise, @v is not modified and relaxed     
4164  *                                               
4165  * Safe to use in noinstr code; prefer atomic    
4166  *                                               
4167  * Return: The original value of @v.             
4168  */                                              
4169 static __always_inline s64                       
4170 raw_atomic64_cmpxchg_acquire(atomic64_t *v, s    
4171 {                                                
4172 #if defined(arch_atomic64_cmpxchg_acquire)       
4173         return arch_atomic64_cmpxchg_acquire(    
4174 #elif defined(arch_atomic64_cmpxchg_relaxed)     
4175         s64 ret = arch_atomic64_cmpxchg_relax    
4176         __atomic_acquire_fence();                
4177         return ret;                              
4178 #elif defined(arch_atomic64_cmpxchg)             
4179         return arch_atomic64_cmpxchg(v, old,     
4180 #else                                            
4181         return raw_cmpxchg_acquire(&v->counte    
4182 #endif                                           
4183 }                                                
4184                                                  
4185 /**                                              
4186  * raw_atomic64_cmpxchg_release() - atomic co    
4187  * @v: pointer to atomic64_t                     
4188  * @old: s64 value to compare with               
4189  * @new: s64 value to assign                     
4190  *                                               
4191  * If (@v == @old), atomically updates @v to     
4192  * Otherwise, @v is not modified and relaxed     
4193  *                                               
4194  * Safe to use in noinstr code; prefer atomic    
4195  *                                               
4196  * Return: The original value of @v.             
4197  */                                              
4198 static __always_inline s64                       
4199 raw_atomic64_cmpxchg_release(atomic64_t *v, s    
4200 {                                                
4201 #if defined(arch_atomic64_cmpxchg_release)       
4202         return arch_atomic64_cmpxchg_release(    
4203 #elif defined(arch_atomic64_cmpxchg_relaxed)     
4204         __atomic_release_fence();                
4205         return arch_atomic64_cmpxchg_relaxed(    
4206 #elif defined(arch_atomic64_cmpxchg)             
4207         return arch_atomic64_cmpxchg(v, old,     
4208 #else                                            
4209         return raw_cmpxchg_release(&v->counte    
4210 #endif                                           
4211 }                                                
4212                                                  
4213 /**                                              
4214  * raw_atomic64_cmpxchg_relaxed() - atomic co    
4215  * @v: pointer to atomic64_t                     
4216  * @old: s64 value to compare with               
4217  * @new: s64 value to assign                     
4218  *                                               
4219  * If (@v == @old), atomically updates @v to     
4220  * Otherwise, @v is not modified and relaxed     
4221  *                                               
4222  * Safe to use in noinstr code; prefer atomic    
4223  *                                               
4224  * Return: The original value of @v.             
4225  */                                              
4226 static __always_inline s64                       
4227 raw_atomic64_cmpxchg_relaxed(atomic64_t *v, s    
4228 {                                                
4229 #if defined(arch_atomic64_cmpxchg_relaxed)       
4230         return arch_atomic64_cmpxchg_relaxed(    
4231 #elif defined(arch_atomic64_cmpxchg)             
4232         return arch_atomic64_cmpxchg(v, old,     
4233 #else                                            
4234         return raw_cmpxchg_relaxed(&v->counte    
4235 #endif                                           
4236 }                                                
4237                                                  
4238 /**                                              
4239  * raw_atomic64_try_cmpxchg() - atomic compar    
4240  * @v: pointer to atomic64_t                     
4241  * @old: pointer to s64 value to compare with    
4242  * @new: s64 value to assign                     
4243  *                                               
4244  * If (@v == @old), atomically updates @v to     
4245  * Otherwise, @v is not modified, @old is upd    
4246  * and relaxed ordering is provided.             
4247  *                                               
4248  * Safe to use in noinstr code; prefer atomic    
4249  *                                               
4250  * Return: @true if the exchange occured, @fa    
4251  */                                              
4252 static __always_inline bool                      
4253 raw_atomic64_try_cmpxchg(atomic64_t *v, s64 *    
4254 {                                                
4255 #if defined(arch_atomic64_try_cmpxchg)           
4256         return arch_atomic64_try_cmpxchg(v, o    
4257 #elif defined(arch_atomic64_try_cmpxchg_relax    
4258         bool ret;                                
4259         __atomic_pre_full_fence();               
4260         ret = arch_atomic64_try_cmpxchg_relax    
4261         __atomic_post_full_fence();              
4262         return ret;                              
4263 #else                                            
4264         s64 r, o = *old;                         
4265         r = raw_atomic64_cmpxchg(v, o, new);     
4266         if (unlikely(r != o))                    
4267                 *old = r;                        
4268         return likely(r == o);                   
4269 #endif                                           
4270 }                                                
4271                                                  
4272 /**                                              
4273  * raw_atomic64_try_cmpxchg_acquire() - atomi    
4274  * @v: pointer to atomic64_t                     
4275  * @old: pointer to s64 value to compare with    
4276  * @new: s64 value to assign                     
4277  *                                               
4278  * If (@v == @old), atomically updates @v to     
4279  * Otherwise, @v is not modified, @old is upd    
4280  * and relaxed ordering is provided.             
4281  *                                               
4282  * Safe to use in noinstr code; prefer atomic    
4283  *                                               
4284  * Return: @true if the exchange occured, @fa    
4285  */                                              
4286 static __always_inline bool                      
4287 raw_atomic64_try_cmpxchg_acquire(atomic64_t *    
4288 {                                                
4289 #if defined(arch_atomic64_try_cmpxchg_acquire    
4290         return arch_atomic64_try_cmpxchg_acqu    
4291 #elif defined(arch_atomic64_try_cmpxchg_relax    
4292         bool ret = arch_atomic64_try_cmpxchg_    
4293         __atomic_acquire_fence();                
4294         return ret;                              
4295 #elif defined(arch_atomic64_try_cmpxchg)         
4296         return arch_atomic64_try_cmpxchg(v, o    
4297 #else                                            
4298         s64 r, o = *old;                         
4299         r = raw_atomic64_cmpxchg_acquire(v, o    
4300         if (unlikely(r != o))                    
4301                 *old = r;                        
4302         return likely(r == o);                   
4303 #endif                                           
4304 }                                                
4305                                                  
4306 /**                                              
4307  * raw_atomic64_try_cmpxchg_release() - atomi    
4308  * @v: pointer to atomic64_t                     
4309  * @old: pointer to s64 value to compare with    
4310  * @new: s64 value to assign                     
4311  *                                               
4312  * If (@v == @old), atomically updates @v to     
4313  * Otherwise, @v is not modified, @old is upd    
4314  * and relaxed ordering is provided.             
4315  *                                               
4316  * Safe to use in noinstr code; prefer atomic    
4317  *                                               
4318  * Return: @true if the exchange occured, @fa    
4319  */                                              
4320 static __always_inline bool                      
4321 raw_atomic64_try_cmpxchg_release(atomic64_t *    
4322 {                                                
4323 #if defined(arch_atomic64_try_cmpxchg_release    
4324         return arch_atomic64_try_cmpxchg_rele    
4325 #elif defined(arch_atomic64_try_cmpxchg_relax    
4326         __atomic_release_fence();                
4327         return arch_atomic64_try_cmpxchg_rela    
4328 #elif defined(arch_atomic64_try_cmpxchg)         
4329         return arch_atomic64_try_cmpxchg(v, o    
4330 #else                                            
4331         s64 r, o = *old;                         
4332         r = raw_atomic64_cmpxchg_release(v, o    
4333         if (unlikely(r != o))                    
4334                 *old = r;                        
4335         return likely(r == o);                   
4336 #endif                                           
4337 }                                                
4338                                                  
4339 /**                                              
4340  * raw_atomic64_try_cmpxchg_relaxed() - atomi    
4341  * @v: pointer to atomic64_t                     
4342  * @old: pointer to s64 value to compare with    
4343  * @new: s64 value to assign                     
4344  *                                               
4345  * If (@v == @old), atomically updates @v to     
4346  * Otherwise, @v is not modified, @old is upd    
4347  * and relaxed ordering is provided.             
4348  *                                               
4349  * Safe to use in noinstr code; prefer atomic    
4350  *                                               
4351  * Return: @true if the exchange occured, @fa    
4352  */                                              
4353 static __always_inline bool                      
4354 raw_atomic64_try_cmpxchg_relaxed(atomic64_t *    
4355 {                                                
4356 #if defined(arch_atomic64_try_cmpxchg_relaxed    
4357         return arch_atomic64_try_cmpxchg_rela    
4358 #elif defined(arch_atomic64_try_cmpxchg)         
4359         return arch_atomic64_try_cmpxchg(v, o    
4360 #else                                            
4361         s64 r, o = *old;                         
4362         r = raw_atomic64_cmpxchg_relaxed(v, o    
4363         if (unlikely(r != o))                    
4364                 *old = r;                        
4365         return likely(r == o);                   
4366 #endif                                           
4367 }                                                
4368                                                  
4369 /**                                              
4370  * raw_atomic64_sub_and_test() - atomic subtr    
4371  * @i: s64 value to subtract                     
4372  * @v: pointer to atomic64_t                     
4373  *                                               
4374  * Atomically updates @v to (@v - @i) with fu    
4375  *                                               
4376  * Safe to use in noinstr code; prefer atomic    
4377  *                                               
4378  * Return: @true if the resulting value of @v    
4379  */                                              
4380 static __always_inline bool                      
4381 raw_atomic64_sub_and_test(s64 i, atomic64_t *    
4382 {                                                
4383 #if defined(arch_atomic64_sub_and_test)          
4384         return arch_atomic64_sub_and_test(i,     
4385 #else                                            
4386         return raw_atomic64_sub_return(i, v)     
4387 #endif                                           
4388 }                                                
4389                                                  
4390 /**                                              
4391  * raw_atomic64_dec_and_test() - atomic decre    
4392  * @v: pointer to atomic64_t                     
4393  *                                               
4394  * Atomically updates @v to (@v - 1) with ful    
4395  *                                               
4396  * Safe to use in noinstr code; prefer atomic    
4397  *                                               
4398  * Return: @true if the resulting value of @v    
4399  */                                              
4400 static __always_inline bool                      
4401 raw_atomic64_dec_and_test(atomic64_t *v)         
4402 {                                                
4403 #if defined(arch_atomic64_dec_and_test)          
4404         return arch_atomic64_dec_and_test(v);    
4405 #else                                            
4406         return raw_atomic64_dec_return(v) ==     
4407 #endif                                           
4408 }                                                
4409                                                  
4410 /**                                              
4411  * raw_atomic64_inc_and_test() - atomic incre    
4412  * @v: pointer to atomic64_t                     
4413  *                                               
4414  * Atomically updates @v to (@v + 1) with ful    
4415  *                                               
4416  * Safe to use in noinstr code; prefer atomic    
4417  *                                               
4418  * Return: @true if the resulting value of @v    
4419  */                                              
4420 static __always_inline bool                      
4421 raw_atomic64_inc_and_test(atomic64_t *v)         
4422 {                                                
4423 #if defined(arch_atomic64_inc_and_test)          
4424         return arch_atomic64_inc_and_test(v);    
4425 #else                                            
4426         return raw_atomic64_inc_return(v) ==     
4427 #endif                                           
4428 }                                                
4429                                                  
4430 /**                                              
4431  * raw_atomic64_add_negative() - atomic add a    
4432  * @i: s64 value to add                          
4433  * @v: pointer to atomic64_t                     
4434  *                                               
4435  * Atomically updates @v to (@v + @i) with fu    
4436  *                                               
4437  * Safe to use in noinstr code; prefer atomic    
4438  *                                               
4439  * Return: @true if the resulting value of @v    
4440  */                                              
4441 static __always_inline bool                      
4442 raw_atomic64_add_negative(s64 i, atomic64_t *    
4443 {                                                
4444 #if defined(arch_atomic64_add_negative)          
4445         return arch_atomic64_add_negative(i,     
4446 #elif defined(arch_atomic64_add_negative_rela    
4447         bool ret;                                
4448         __atomic_pre_full_fence();               
4449         ret = arch_atomic64_add_negative_rela    
4450         __atomic_post_full_fence();              
4451         return ret;                              
4452 #else                                            
4453         return raw_atomic64_add_return(i, v)     
4454 #endif                                           
4455 }                                                
4456                                                  
4457 /**                                              
4458  * raw_atomic64_add_negative_acquire() - atom    
4459  * @i: s64 value to add                          
4460  * @v: pointer to atomic64_t                     
4461  *                                               
4462  * Atomically updates @v to (@v + @i) with ac    
4463  *                                               
4464  * Safe to use in noinstr code; prefer atomic    
4465  *                                               
4466  * Return: @true if the resulting value of @v    
4467  */                                              
4468 static __always_inline bool                      
4469 raw_atomic64_add_negative_acquire(s64 i, atom    
4470 {                                                
4471 #if defined(arch_atomic64_add_negative_acquir    
4472         return arch_atomic64_add_negative_acq    
4473 #elif defined(arch_atomic64_add_negative_rela    
4474         bool ret = arch_atomic64_add_negative    
4475         __atomic_acquire_fence();                
4476         return ret;                              
4477 #elif defined(arch_atomic64_add_negative)        
4478         return arch_atomic64_add_negative(i,     
4479 #else                                            
4480         return raw_atomic64_add_return_acquir    
4481 #endif                                           
4482 }                                                
4483                                                  
4484 /**                                              
4485  * raw_atomic64_add_negative_release() - atom    
4486  * @i: s64 value to add                          
4487  * @v: pointer to atomic64_t                     
4488  *                                               
4489  * Atomically updates @v to (@v + @i) with re    
4490  *                                               
4491  * Safe to use in noinstr code; prefer atomic    
4492  *                                               
4493  * Return: @true if the resulting value of @v    
4494  */                                              
4495 static __always_inline bool                      
4496 raw_atomic64_add_negative_release(s64 i, atom    
4497 {                                                
4498 #if defined(arch_atomic64_add_negative_releas    
4499         return arch_atomic64_add_negative_rel    
4500 #elif defined(arch_atomic64_add_negative_rela    
4501         __atomic_release_fence();                
4502         return arch_atomic64_add_negative_rel    
4503 #elif defined(arch_atomic64_add_negative)        
4504         return arch_atomic64_add_negative(i,     
4505 #else                                            
4506         return raw_atomic64_add_return_releas    
4507 #endif                                           
4508 }                                                
4509                                                  
4510 /**                                              
4511  * raw_atomic64_add_negative_relaxed() - atom    
4512  * @i: s64 value to add                          
4513  * @v: pointer to atomic64_t                     
4514  *                                               
4515  * Atomically updates @v to (@v + @i) with re    
4516  *                                               
4517  * Safe to use in noinstr code; prefer atomic    
4518  *                                               
4519  * Return: @true if the resulting value of @v    
4520  */                                              
4521 static __always_inline bool                      
4522 raw_atomic64_add_negative_relaxed(s64 i, atom    
4523 {                                                
4524 #if defined(arch_atomic64_add_negative_relaxe    
4525         return arch_atomic64_add_negative_rel    
4526 #elif defined(arch_atomic64_add_negative)        
4527         return arch_atomic64_add_negative(i,     
4528 #else                                            
4529         return raw_atomic64_add_return_relaxe    
4530 #endif                                           
4531 }                                                
4532                                                  
4533 /**                                              
4534  * raw_atomic64_fetch_add_unless() - atomic a    
4535  * @v: pointer to atomic64_t                     
4536  * @a: s64 value to add                          
4537  * @u: s64 value to compare with                 
4538  *                                               
4539  * If (@v != @u), atomically updates @v to (@    
4540  * Otherwise, @v is not modified and relaxed     
4541  *                                               
4542  * Safe to use in noinstr code; prefer atomic    
4543  *                                               
4544  * Return: The original value of @v.             
4545  */                                              
4546 static __always_inline s64                       
4547 raw_atomic64_fetch_add_unless(atomic64_t *v,     
4548 {                                                
4549 #if defined(arch_atomic64_fetch_add_unless)      
4550         return arch_atomic64_fetch_add_unless    
4551 #else                                            
4552         s64 c = raw_atomic64_read(v);            
4553                                                  
4554         do {                                     
4555                 if (unlikely(c == u))            
4556                         break;                   
4557         } while (!raw_atomic64_try_cmpxchg(v,    
4558                                                  
4559         return c;                                
4560 #endif                                           
4561 }                                                
4562                                                  
4563 /**                                              
4564  * raw_atomic64_add_unless() - atomic add unl    
4565  * @v: pointer to atomic64_t                     
4566  * @a: s64 value to add                          
4567  * @u: s64 value to compare with                 
4568  *                                               
4569  * If (@v != @u), atomically updates @v to (@    
4570  * Otherwise, @v is not modified and relaxed     
4571  *                                               
4572  * Safe to use in noinstr code; prefer atomic    
4573  *                                               
4574  * Return: @true if @v was updated, @false ot    
4575  */                                              
4576 static __always_inline bool                      
4577 raw_atomic64_add_unless(atomic64_t *v, s64 a,    
4578 {                                                
4579 #if defined(arch_atomic64_add_unless)            
4580         return arch_atomic64_add_unless(v, a,    
4581 #else                                            
4582         return raw_atomic64_fetch_add_unless(    
4583 #endif                                           
4584 }                                                
4585                                                  
4586 /**                                              
4587  * raw_atomic64_inc_not_zero() - atomic incre    
4588  * @v: pointer to atomic64_t                     
4589  *                                               
4590  * If (@v != 0), atomically updates @v to (@v    
4591  * Otherwise, @v is not modified and relaxed     
4592  *                                               
4593  * Safe to use in noinstr code; prefer atomic    
4594  *                                               
4595  * Return: @true if @v was updated, @false ot    
4596  */                                              
4597 static __always_inline bool                      
4598 raw_atomic64_inc_not_zero(atomic64_t *v)         
4599 {                                                
4600 #if defined(arch_atomic64_inc_not_zero)          
4601         return arch_atomic64_inc_not_zero(v);    
4602 #else                                            
4603         return raw_atomic64_add_unless(v, 1,     
4604 #endif                                           
4605 }                                                
4606                                                  
4607 /**                                              
4608  * raw_atomic64_inc_unless_negative() - atomi    
4609  * @v: pointer to atomic64_t                     
4610  *                                               
4611  * If (@v >= 0), atomically updates @v to (@v    
4612  * Otherwise, @v is not modified and relaxed     
4613  *                                               
4614  * Safe to use in noinstr code; prefer atomic    
4615  *                                               
4616  * Return: @true if @v was updated, @false ot    
4617  */                                              
4618 static __always_inline bool                      
4619 raw_atomic64_inc_unless_negative(atomic64_t *    
4620 {                                                
4621 #if defined(arch_atomic64_inc_unless_negative    
4622         return arch_atomic64_inc_unless_negat    
4623 #else                                            
4624         s64 c = raw_atomic64_read(v);            
4625                                                  
4626         do {                                     
4627                 if (unlikely(c < 0))             
4628                         return false;            
4629         } while (!raw_atomic64_try_cmpxchg(v,    
4630                                                  
4631         return true;                             
4632 #endif                                           
4633 }                                                
4634                                                  
4635 /**                                              
4636  * raw_atomic64_dec_unless_positive() - atomi    
4637  * @v: pointer to atomic64_t                     
4638  *                                               
4639  * If (@v <= 0), atomically updates @v to (@v    
4640  * Otherwise, @v is not modified and relaxed     
4641  *                                               
4642  * Safe to use in noinstr code; prefer atomic    
4643  *                                               
4644  * Return: @true if @v was updated, @false ot    
4645  */                                              
4646 static __always_inline bool                      
4647 raw_atomic64_dec_unless_positive(atomic64_t *    
4648 {                                                
4649 #if defined(arch_atomic64_dec_unless_positive    
4650         return arch_atomic64_dec_unless_posit    
4651 #else                                            
4652         s64 c = raw_atomic64_read(v);            
4653                                                  
4654         do {                                     
4655                 if (unlikely(c > 0))             
4656                         return false;            
4657         } while (!raw_atomic64_try_cmpxchg(v,    
4658                                                  
4659         return true;                             
4660 #endif                                           
4661 }                                                
4662                                                  
4663 /**                                              
4664  * raw_atomic64_dec_if_positive() - atomic de    
4665  * @v: pointer to atomic64_t                     
4666  *                                               
4667  * If (@v > 0), atomically updates @v to (@v     
4668  * Otherwise, @v is not modified and relaxed     
4669  *                                               
4670  * Safe to use in noinstr code; prefer atomic    
4671  *                                               
4672  * Return: The old value of (@v - 1), regardl    
4673  */                                              
4674 static __always_inline s64                       
4675 raw_atomic64_dec_if_positive(atomic64_t *v)      
4676 {                                                
4677 #if defined(arch_atomic64_dec_if_positive)       
4678         return arch_atomic64_dec_if_positive(    
4679 #else                                            
4680         s64 dec, c = raw_atomic64_read(v);       
4681                                                  
4682         do {                                     
4683                 dec = c - 1;                     
4684                 if (unlikely(dec < 0))           
4685                         break;                   
4686         } while (!raw_atomic64_try_cmpxchg(v,    
4687                                                  
4688         return dec;                              
4689 #endif                                           
4690 }                                                
4691                                                  
4692 #endif /* _LINUX_ATOMIC_FALLBACK_H */            
4693 // b565db590afeeff0d7c9485ccbca5bb6e155749f      
4694                                                  

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php