~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/include/linux/atomic/atomic-long.h

Version: ~ [ linux-6.11.5 ] ~ [ linux-6.10.14 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.58 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.114 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.169 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.228 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.284 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.322 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.9 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

Diff markup

Differences between /include/linux/atomic/atomic-long.h (Version linux-6.11.5) and /include/linux/atomic/atomic-long.h (Version linux-6.0.19)


  1 // SPDX-License-Identifier: GPL-2.0                 1 // SPDX-License-Identifier: GPL-2.0
  2                                                     2 
  3 // Generated by scripts/atomic/gen-atomic-long      3 // Generated by scripts/atomic/gen-atomic-long.sh
  4 // DO NOT MODIFY THIS FILE DIRECTLY                 4 // DO NOT MODIFY THIS FILE DIRECTLY
  5                                                     5 
  6 #ifndef _LINUX_ATOMIC_LONG_H                        6 #ifndef _LINUX_ATOMIC_LONG_H
  7 #define _LINUX_ATOMIC_LONG_H                        7 #define _LINUX_ATOMIC_LONG_H
  8                                                     8 
  9 #include <linux/compiler.h>                         9 #include <linux/compiler.h>
 10 #include <asm/types.h>                             10 #include <asm/types.h>
 11                                                    11 
 12 #ifdef CONFIG_64BIT                                12 #ifdef CONFIG_64BIT
 13 typedef atomic64_t atomic_long_t;                  13 typedef atomic64_t atomic_long_t;
 14 #define ATOMIC_LONG_INIT(i)             ATOMIC     14 #define ATOMIC_LONG_INIT(i)             ATOMIC64_INIT(i)
 15 #define atomic_long_cond_read_acquire   atomic     15 #define atomic_long_cond_read_acquire   atomic64_cond_read_acquire
 16 #define atomic_long_cond_read_relaxed   atomic     16 #define atomic_long_cond_read_relaxed   atomic64_cond_read_relaxed
 17 #else                                              17 #else
 18 typedef atomic_t atomic_long_t;                    18 typedef atomic_t atomic_long_t;
 19 #define ATOMIC_LONG_INIT(i)             ATOMIC     19 #define ATOMIC_LONG_INIT(i)             ATOMIC_INIT(i)
 20 #define atomic_long_cond_read_acquire   atomic     20 #define atomic_long_cond_read_acquire   atomic_cond_read_acquire
 21 #define atomic_long_cond_read_relaxed   atomic     21 #define atomic_long_cond_read_relaxed   atomic_cond_read_relaxed
 22 #endif                                             22 #endif
 23                                                    23 
 24 /**                                            !!  24 #ifdef CONFIG_64BIT
 25  * raw_atomic_long_read() - atomic load with r !!  25 
 26  * @v: pointer to atomic_long_t                << 
 27  *                                             << 
 28  * Atomically loads the value of @v with relax << 
 29  *                                             << 
 30  * Safe to use in noinstr code; prefer atomic_ << 
 31  *                                             << 
 32  * Return: The value loaded from @v.           << 
 33  */                                            << 
 34 static __always_inline long                        26 static __always_inline long
 35 raw_atomic_long_read(const atomic_long_t *v)   !!  27 arch_atomic_long_read(const atomic_long_t *v)
 36 {                                                  28 {
 37 #ifdef CONFIG_64BIT                            !!  29         return arch_atomic64_read(v);
 38         return raw_atomic64_read(v);           !!  30 }
 39 #else                                          !!  31 
 40         return raw_atomic_read(v);             << 
 41 #endif                                         << 
 42 }                                              << 
 43                                                << 
 44 /**                                            << 
 45  * raw_atomic_long_read_acquire() - atomic loa << 
 46  * @v: pointer to atomic_long_t                << 
 47  *                                             << 
 48  * Atomically loads the value of @v with acqui << 
 49  *                                             << 
 50  * Safe to use in noinstr code; prefer atomic_ << 
 51  *                                             << 
 52  * Return: The value loaded from @v.           << 
 53  */                                            << 
 54 static __always_inline long                        32 static __always_inline long
 55 raw_atomic_long_read_acquire(const atomic_long !!  33 arch_atomic_long_read_acquire(const atomic_long_t *v)
 56 {                                                  34 {
 57 #ifdef CONFIG_64BIT                            !!  35         return arch_atomic64_read_acquire(v);
 58         return raw_atomic64_read_acquire(v);   !!  36 }
 59 #else                                          !!  37 
 60         return raw_atomic_read_acquire(v);     << 
 61 #endif                                         << 
 62 }                                              << 
 63                                                << 
 64 /**                                            << 
 65  * raw_atomic_long_set() - atomic set with rel << 
 66  * @v: pointer to atomic_long_t                << 
 67  * @i: long value to assign                    << 
 68  *                                             << 
 69  * Atomically sets @v to @i with relaxed order << 
 70  *                                             << 
 71  * Safe to use in noinstr code; prefer atomic_ << 
 72  *                                             << 
 73  * Return: Nothing.                            << 
 74  */                                            << 
 75 static __always_inline void                        38 static __always_inline void
 76 raw_atomic_long_set(atomic_long_t *v, long i)  !!  39 arch_atomic_long_set(atomic_long_t *v, long i)
 77 {                                                  40 {
 78 #ifdef CONFIG_64BIT                            !!  41         arch_atomic64_set(v, i);
 79         raw_atomic64_set(v, i);                !!  42 }
 80 #else                                          !!  43 
 81         raw_atomic_set(v, i);                  << 
 82 #endif                                         << 
 83 }                                              << 
 84                                                << 
 85 /**                                            << 
 86  * raw_atomic_long_set_release() - atomic set  << 
 87  * @v: pointer to atomic_long_t                << 
 88  * @i: long value to assign                    << 
 89  *                                             << 
 90  * Atomically sets @v to @i with release order << 
 91  *                                             << 
 92  * Safe to use in noinstr code; prefer atomic_ << 
 93  *                                             << 
 94  * Return: Nothing.                            << 
 95  */                                            << 
 96 static __always_inline void                        44 static __always_inline void
 97 raw_atomic_long_set_release(atomic_long_t *v,  !!  45 arch_atomic_long_set_release(atomic_long_t *v, long i)
 98 {                                                  46 {
 99 #ifdef CONFIG_64BIT                            !!  47         arch_atomic64_set_release(v, i);
100         raw_atomic64_set_release(v, i);        !!  48 }
101 #else                                          !!  49 
102         raw_atomic_set_release(v, i);          << 
103 #endif                                         << 
104 }                                              << 
105                                                << 
106 /**                                            << 
107  * raw_atomic_long_add() - atomic add with rel << 
108  * @i: long value to add                       << 
109  * @v: pointer to atomic_long_t                << 
110  *                                             << 
111  * Atomically updates @v to (@v + @i) with rel << 
112  *                                             << 
113  * Safe to use in noinstr code; prefer atomic_ << 
114  *                                             << 
115  * Return: Nothing.                            << 
116  */                                            << 
117 static __always_inline void                        50 static __always_inline void
118 raw_atomic_long_add(long i, atomic_long_t *v)  !!  51 arch_atomic_long_add(long i, atomic_long_t *v)
119 {                                                  52 {
120 #ifdef CONFIG_64BIT                            !!  53         arch_atomic64_add(i, v);
121         raw_atomic64_add(i, v);                !!  54 }
122 #else                                          !!  55 
123         raw_atomic_add(i, v);                  << 
124 #endif                                         << 
125 }                                              << 
126                                                << 
127 /**                                            << 
128  * raw_atomic_long_add_return() - atomic add w << 
129  * @i: long value to add                       << 
130  * @v: pointer to atomic_long_t                << 
131  *                                             << 
132  * Atomically updates @v to (@v + @i) with ful << 
133  *                                             << 
134  * Safe to use in noinstr code; prefer atomic_ << 
135  *                                             << 
136  * Return: The updated value of @v.            << 
137  */                                            << 
138 static __always_inline long                        56 static __always_inline long
139 raw_atomic_long_add_return(long i, atomic_long !!  57 arch_atomic_long_add_return(long i, atomic_long_t *v)
140 {                                                  58 {
141 #ifdef CONFIG_64BIT                            !!  59         return arch_atomic64_add_return(i, v);
142         return raw_atomic64_add_return(i, v);  !!  60 }
143 #else                                          !!  61 
144         return raw_atomic_add_return(i, v);    << 
145 #endif                                         << 
146 }                                              << 
147                                                << 
148 /**                                            << 
149  * raw_atomic_long_add_return_acquire() - atom << 
150  * @i: long value to add                       << 
151  * @v: pointer to atomic_long_t                << 
152  *                                             << 
153  * Atomically updates @v to (@v + @i) with acq << 
154  *                                             << 
155  * Safe to use in noinstr code; prefer atomic_ << 
156  *                                             << 
157  * Return: The updated value of @v.            << 
158  */                                            << 
159 static __always_inline long                        62 static __always_inline long
160 raw_atomic_long_add_return_acquire(long i, ato !!  63 arch_atomic_long_add_return_acquire(long i, atomic_long_t *v)
161 {                                                  64 {
162 #ifdef CONFIG_64BIT                            !!  65         return arch_atomic64_add_return_acquire(i, v);
163         return raw_atomic64_add_return_acquire !!  66 }
164 #else                                          !!  67 
165         return raw_atomic_add_return_acquire(i << 
166 #endif                                         << 
167 }                                              << 
168                                                << 
169 /**                                            << 
170  * raw_atomic_long_add_return_release() - atom << 
171  * @i: long value to add                       << 
172  * @v: pointer to atomic_long_t                << 
173  *                                             << 
174  * Atomically updates @v to (@v + @i) with rel << 
175  *                                             << 
176  * Safe to use in noinstr code; prefer atomic_ << 
177  *                                             << 
178  * Return: The updated value of @v.            << 
179  */                                            << 
180 static __always_inline long                        68 static __always_inline long
181 raw_atomic_long_add_return_release(long i, ato !!  69 arch_atomic_long_add_return_release(long i, atomic_long_t *v)
182 {                                                  70 {
183 #ifdef CONFIG_64BIT                            !!  71         return arch_atomic64_add_return_release(i, v);
184         return raw_atomic64_add_return_release !!  72 }
185 #else                                          !!  73 
186         return raw_atomic_add_return_release(i << 
187 #endif                                         << 
188 }                                              << 
189                                                << 
190 /**                                            << 
191  * raw_atomic_long_add_return_relaxed() - atom << 
192  * @i: long value to add                       << 
193  * @v: pointer to atomic_long_t                << 
194  *                                             << 
195  * Atomically updates @v to (@v + @i) with rel << 
196  *                                             << 
197  * Safe to use in noinstr code; prefer atomic_ << 
198  *                                             << 
199  * Return: The updated value of @v.            << 
200  */                                            << 
201 static __always_inline long                        74 static __always_inline long
202 raw_atomic_long_add_return_relaxed(long i, ato !!  75 arch_atomic_long_add_return_relaxed(long i, atomic_long_t *v)
203 {                                                  76 {
204 #ifdef CONFIG_64BIT                            !!  77         return arch_atomic64_add_return_relaxed(i, v);
205         return raw_atomic64_add_return_relaxed !!  78 }
206 #else                                          !!  79 
207         return raw_atomic_add_return_relaxed(i << 
208 #endif                                         << 
209 }                                              << 
210                                                << 
211 /**                                            << 
212  * raw_atomic_long_fetch_add() - atomic add wi << 
213  * @i: long value to add                       << 
214  * @v: pointer to atomic_long_t                << 
215  *                                             << 
216  * Atomically updates @v to (@v + @i) with ful << 
217  *                                             << 
218  * Safe to use in noinstr code; prefer atomic_ << 
219  *                                             << 
220  * Return: The original value of @v.           << 
221  */                                            << 
222 static __always_inline long                        80 static __always_inline long
223 raw_atomic_long_fetch_add(long i, atomic_long_ !!  81 arch_atomic_long_fetch_add(long i, atomic_long_t *v)
224 {                                                  82 {
225 #ifdef CONFIG_64BIT                            !!  83         return arch_atomic64_fetch_add(i, v);
226         return raw_atomic64_fetch_add(i, v);   !!  84 }
227 #else                                          !!  85 
228         return raw_atomic_fetch_add(i, v);     << 
229 #endif                                         << 
230 }                                              << 
231                                                << 
232 /**                                            << 
233  * raw_atomic_long_fetch_add_acquire() - atomi << 
234  * @i: long value to add                       << 
235  * @v: pointer to atomic_long_t                << 
236  *                                             << 
237  * Atomically updates @v to (@v + @i) with acq << 
238  *                                             << 
239  * Safe to use in noinstr code; prefer atomic_ << 
240  *                                             << 
241  * Return: The original value of @v.           << 
242  */                                            << 
243 static __always_inline long                        86 static __always_inline long
244 raw_atomic_long_fetch_add_acquire(long i, atom !!  87 arch_atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
245 {                                                  88 {
246 #ifdef CONFIG_64BIT                            !!  89         return arch_atomic64_fetch_add_acquire(i, v);
247         return raw_atomic64_fetch_add_acquire( !!  90 }
248 #else                                          !!  91 
249         return raw_atomic_fetch_add_acquire(i, << 
250 #endif                                         << 
251 }                                              << 
252                                                << 
253 /**                                            << 
254  * raw_atomic_long_fetch_add_release() - atomi << 
255  * @i: long value to add                       << 
256  * @v: pointer to atomic_long_t                << 
257  *                                             << 
258  * Atomically updates @v to (@v + @i) with rel << 
259  *                                             << 
260  * Safe to use in noinstr code; prefer atomic_ << 
261  *                                             << 
262  * Return: The original value of @v.           << 
263  */                                            << 
264 static __always_inline long                        92 static __always_inline long
265 raw_atomic_long_fetch_add_release(long i, atom !!  93 arch_atomic_long_fetch_add_release(long i, atomic_long_t *v)
266 {                                                  94 {
267 #ifdef CONFIG_64BIT                            !!  95         return arch_atomic64_fetch_add_release(i, v);
268         return raw_atomic64_fetch_add_release( !!  96 }
269 #else                                          !!  97 
270         return raw_atomic_fetch_add_release(i, << 
271 #endif                                         << 
272 }                                              << 
273                                                << 
274 /**                                            << 
275  * raw_atomic_long_fetch_add_relaxed() - atomi << 
276  * @i: long value to add                       << 
277  * @v: pointer to atomic_long_t                << 
278  *                                             << 
279  * Atomically updates @v to (@v + @i) with rel << 
280  *                                             << 
281  * Safe to use in noinstr code; prefer atomic_ << 
282  *                                             << 
283  * Return: The original value of @v.           << 
284  */                                            << 
285 static __always_inline long                        98 static __always_inline long
286 raw_atomic_long_fetch_add_relaxed(long i, atom !!  99 arch_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
287 {                                                 100 {
288 #ifdef CONFIG_64BIT                            !! 101         return arch_atomic64_fetch_add_relaxed(i, v);
289         return raw_atomic64_fetch_add_relaxed( !! 102 }
290 #else                                          !! 103 
291         return raw_atomic_fetch_add_relaxed(i, << 
292 #endif                                         << 
293 }                                              << 
294                                                << 
295 /**                                            << 
296  * raw_atomic_long_sub() - atomic subtract wit << 
297  * @i: long value to subtract                  << 
298  * @v: pointer to atomic_long_t                << 
299  *                                             << 
300  * Atomically updates @v to (@v - @i) with rel << 
301  *                                             << 
302  * Safe to use in noinstr code; prefer atomic_ << 
303  *                                             << 
304  * Return: Nothing.                            << 
305  */                                            << 
306 static __always_inline void                       104 static __always_inline void
307 raw_atomic_long_sub(long i, atomic_long_t *v)  !! 105 arch_atomic_long_sub(long i, atomic_long_t *v)
308 {                                                 106 {
309 #ifdef CONFIG_64BIT                            !! 107         arch_atomic64_sub(i, v);
310         raw_atomic64_sub(i, v);                !! 108 }
311 #else                                          !! 109 
312         raw_atomic_sub(i, v);                  << 
313 #endif                                         << 
314 }                                              << 
315                                                << 
316 /**                                            << 
317  * raw_atomic_long_sub_return() - atomic subtr << 
318  * @i: long value to subtract                  << 
319  * @v: pointer to atomic_long_t                << 
320  *                                             << 
321  * Atomically updates @v to (@v - @i) with ful << 
322  *                                             << 
323  * Safe to use in noinstr code; prefer atomic_ << 
324  *                                             << 
325  * Return: The updated value of @v.            << 
326  */                                            << 
327 static __always_inline long                       110 static __always_inline long
328 raw_atomic_long_sub_return(long i, atomic_long !! 111 arch_atomic_long_sub_return(long i, atomic_long_t *v)
329 {                                                 112 {
330 #ifdef CONFIG_64BIT                            !! 113         return arch_atomic64_sub_return(i, v);
331         return raw_atomic64_sub_return(i, v);  !! 114 }
332 #else                                          !! 115 
333         return raw_atomic_sub_return(i, v);    << 
334 #endif                                         << 
335 }                                              << 
336                                                << 
337 /**                                            << 
338  * raw_atomic_long_sub_return_acquire() - atom << 
339  * @i: long value to subtract                  << 
340  * @v: pointer to atomic_long_t                << 
341  *                                             << 
342  * Atomically updates @v to (@v - @i) with acq << 
343  *                                             << 
344  * Safe to use in noinstr code; prefer atomic_ << 
345  *                                             << 
346  * Return: The updated value of @v.            << 
347  */                                            << 
348 static __always_inline long                       116 static __always_inline long
349 raw_atomic_long_sub_return_acquire(long i, ato !! 117 arch_atomic_long_sub_return_acquire(long i, atomic_long_t *v)
350 {                                                 118 {
351 #ifdef CONFIG_64BIT                            !! 119         return arch_atomic64_sub_return_acquire(i, v);
352         return raw_atomic64_sub_return_acquire !! 120 }
353 #else                                          !! 121 
354         return raw_atomic_sub_return_acquire(i << 
355 #endif                                         << 
356 }                                              << 
357                                                << 
358 /**                                            << 
359  * raw_atomic_long_sub_return_release() - atom << 
360  * @i: long value to subtract                  << 
361  * @v: pointer to atomic_long_t                << 
362  *                                             << 
363  * Atomically updates @v to (@v - @i) with rel << 
364  *                                             << 
365  * Safe to use in noinstr code; prefer atomic_ << 
366  *                                             << 
367  * Return: The updated value of @v.            << 
368  */                                            << 
369 static __always_inline long                       122 static __always_inline long
370 raw_atomic_long_sub_return_release(long i, ato !! 123 arch_atomic_long_sub_return_release(long i, atomic_long_t *v)
371 {                                                 124 {
372 #ifdef CONFIG_64BIT                            !! 125         return arch_atomic64_sub_return_release(i, v);
373         return raw_atomic64_sub_return_release !! 126 }
374 #else                                          !! 127 
375         return raw_atomic_sub_return_release(i << 
376 #endif                                         << 
377 }                                              << 
378                                                << 
379 /**                                            << 
380  * raw_atomic_long_sub_return_relaxed() - atom << 
381  * @i: long value to subtract                  << 
382  * @v: pointer to atomic_long_t                << 
383  *                                             << 
384  * Atomically updates @v to (@v - @i) with rel << 
385  *                                             << 
386  * Safe to use in noinstr code; prefer atomic_ << 
387  *                                             << 
388  * Return: The updated value of @v.            << 
389  */                                            << 
390 static __always_inline long                       128 static __always_inline long
391 raw_atomic_long_sub_return_relaxed(long i, ato !! 129 arch_atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
392 {                                                 130 {
393 #ifdef CONFIG_64BIT                            !! 131         return arch_atomic64_sub_return_relaxed(i, v);
394         return raw_atomic64_sub_return_relaxed !! 132 }
395 #else                                          !! 133 
396         return raw_atomic_sub_return_relaxed(i << 
397 #endif                                         << 
398 }                                              << 
399                                                << 
400 /**                                            << 
401  * raw_atomic_long_fetch_sub() - atomic subtra << 
402  * @i: long value to subtract                  << 
403  * @v: pointer to atomic_long_t                << 
404  *                                             << 
405  * Atomically updates @v to (@v - @i) with ful << 
406  *                                             << 
407  * Safe to use in noinstr code; prefer atomic_ << 
408  *                                             << 
409  * Return: The original value of @v.           << 
410  */                                            << 
411 static __always_inline long                       134 static __always_inline long
412 raw_atomic_long_fetch_sub(long i, atomic_long_ !! 135 arch_atomic_long_fetch_sub(long i, atomic_long_t *v)
413 {                                                 136 {
414 #ifdef CONFIG_64BIT                            !! 137         return arch_atomic64_fetch_sub(i, v);
415         return raw_atomic64_fetch_sub(i, v);   !! 138 }
416 #else                                          !! 139 
417         return raw_atomic_fetch_sub(i, v);     << 
418 #endif                                         << 
419 }                                              << 
420                                                << 
421 /**                                            << 
422  * raw_atomic_long_fetch_sub_acquire() - atomi << 
423  * @i: long value to subtract                  << 
424  * @v: pointer to atomic_long_t                << 
425  *                                             << 
426  * Atomically updates @v to (@v - @i) with acq << 
427  *                                             << 
428  * Safe to use in noinstr code; prefer atomic_ << 
429  *                                             << 
430  * Return: The original value of @v.           << 
431  */                                            << 
432 static __always_inline long                       140 static __always_inline long
433 raw_atomic_long_fetch_sub_acquire(long i, atom !! 141 arch_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
434 {                                                 142 {
435 #ifdef CONFIG_64BIT                            !! 143         return arch_atomic64_fetch_sub_acquire(i, v);
436         return raw_atomic64_fetch_sub_acquire( !! 144 }
437 #else                                          !! 145 
438         return raw_atomic_fetch_sub_acquire(i, << 
439 #endif                                         << 
440 }                                              << 
441                                                << 
442 /**                                            << 
443  * raw_atomic_long_fetch_sub_release() - atomi << 
444  * @i: long value to subtract                  << 
445  * @v: pointer to atomic_long_t                << 
446  *                                             << 
447  * Atomically updates @v to (@v - @i) with rel << 
448  *                                             << 
449  * Safe to use in noinstr code; prefer atomic_ << 
450  *                                             << 
451  * Return: The original value of @v.           << 
452  */                                            << 
453 static __always_inline long                       146 static __always_inline long
454 raw_atomic_long_fetch_sub_release(long i, atom !! 147 arch_atomic_long_fetch_sub_release(long i, atomic_long_t *v)
455 {                                                 148 {
456 #ifdef CONFIG_64BIT                            !! 149         return arch_atomic64_fetch_sub_release(i, v);
457         return raw_atomic64_fetch_sub_release( !! 150 }
458 #else                                          !! 151 
459         return raw_atomic_fetch_sub_release(i, << 
460 #endif                                         << 
461 }                                              << 
462                                                << 
463 /**                                            << 
464  * raw_atomic_long_fetch_sub_relaxed() - atomi << 
465  * @i: long value to subtract                  << 
466  * @v: pointer to atomic_long_t                << 
467  *                                             << 
468  * Atomically updates @v to (@v - @i) with rel << 
469  *                                             << 
470  * Safe to use in noinstr code; prefer atomic_ << 
471  *                                             << 
472  * Return: The original value of @v.           << 
473  */                                            << 
474 static __always_inline long                       152 static __always_inline long
475 raw_atomic_long_fetch_sub_relaxed(long i, atom !! 153 arch_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
476 {                                                 154 {
477 #ifdef CONFIG_64BIT                            !! 155         return arch_atomic64_fetch_sub_relaxed(i, v);
478         return raw_atomic64_fetch_sub_relaxed( !! 156 }
479 #else                                          !! 157 
480         return raw_atomic_fetch_sub_relaxed(i, << 
481 #endif                                         << 
482 }                                              << 
483                                                << 
484 /**                                            << 
485  * raw_atomic_long_inc() - atomic increment wi << 
486  * @v: pointer to atomic_long_t                << 
487  *                                             << 
488  * Atomically updates @v to (@v + 1) with rela << 
489  *                                             << 
490  * Safe to use in noinstr code; prefer atomic_ << 
491  *                                             << 
492  * Return: Nothing.                            << 
493  */                                            << 
494 static __always_inline void                       158 static __always_inline void
495 raw_atomic_long_inc(atomic_long_t *v)          !! 159 arch_atomic_long_inc(atomic_long_t *v)
496 {                                                 160 {
497 #ifdef CONFIG_64BIT                            !! 161         arch_atomic64_inc(v);
498         raw_atomic64_inc(v);                   !! 162 }
499 #else                                          !! 163 
500         raw_atomic_inc(v);                     << 
501 #endif                                         << 
502 }                                              << 
503                                                << 
504 /**                                            << 
505  * raw_atomic_long_inc_return() - atomic incre << 
506  * @v: pointer to atomic_long_t                << 
507  *                                             << 
508  * Atomically updates @v to (@v + 1) with full << 
509  *                                             << 
510  * Safe to use in noinstr code; prefer atomic_ << 
511  *                                             << 
512  * Return: The updated value of @v.            << 
513  */                                            << 
514 static __always_inline long                       164 static __always_inline long
515 raw_atomic_long_inc_return(atomic_long_t *v)   !! 165 arch_atomic_long_inc_return(atomic_long_t *v)
516 {                                                 166 {
517 #ifdef CONFIG_64BIT                            !! 167         return arch_atomic64_inc_return(v);
518         return raw_atomic64_inc_return(v);     !! 168 }
519 #else                                          !! 169 
520         return raw_atomic_inc_return(v);       << 
521 #endif                                         << 
522 }                                              << 
523                                                << 
524 /**                                            << 
525  * raw_atomic_long_inc_return_acquire() - atom << 
526  * @v: pointer to atomic_long_t                << 
527  *                                             << 
528  * Atomically updates @v to (@v + 1) with acqu << 
529  *                                             << 
530  * Safe to use in noinstr code; prefer atomic_ << 
531  *                                             << 
532  * Return: The updated value of @v.            << 
533  */                                            << 
534 static __always_inline long                       170 static __always_inline long
535 raw_atomic_long_inc_return_acquire(atomic_long !! 171 arch_atomic_long_inc_return_acquire(atomic_long_t *v)
536 {                                                 172 {
537 #ifdef CONFIG_64BIT                            !! 173         return arch_atomic64_inc_return_acquire(v);
538         return raw_atomic64_inc_return_acquire !! 174 }
539 #else                                          !! 175 
540         return raw_atomic_inc_return_acquire(v << 
541 #endif                                         << 
542 }                                              << 
543                                                << 
544 /**                                            << 
545  * raw_atomic_long_inc_return_release() - atom << 
546  * @v: pointer to atomic_long_t                << 
547  *                                             << 
548  * Atomically updates @v to (@v + 1) with rele << 
549  *                                             << 
550  * Safe to use in noinstr code; prefer atomic_ << 
551  *                                             << 
552  * Return: The updated value of @v.            << 
553  */                                            << 
554 static __always_inline long                       176 static __always_inline long
555 raw_atomic_long_inc_return_release(atomic_long !! 177 arch_atomic_long_inc_return_release(atomic_long_t *v)
556 {                                                 178 {
557 #ifdef CONFIG_64BIT                            !! 179         return arch_atomic64_inc_return_release(v);
558         return raw_atomic64_inc_return_release !! 180 }
559 #else                                          !! 181 
560         return raw_atomic_inc_return_release(v << 
561 #endif                                         << 
562 }                                              << 
563                                                << 
564 /**                                            << 
565  * raw_atomic_long_inc_return_relaxed() - atom << 
566  * @v: pointer to atomic_long_t                << 
567  *                                             << 
568  * Atomically updates @v to (@v + 1) with rela << 
569  *                                             << 
570  * Safe to use in noinstr code; prefer atomic_ << 
571  *                                             << 
572  * Return: The updated value of @v.            << 
573  */                                            << 
574 static __always_inline long                       182 static __always_inline long
575 raw_atomic_long_inc_return_relaxed(atomic_long !! 183 arch_atomic_long_inc_return_relaxed(atomic_long_t *v)
576 {                                                 184 {
577 #ifdef CONFIG_64BIT                            !! 185         return arch_atomic64_inc_return_relaxed(v);
578         return raw_atomic64_inc_return_relaxed !! 186 }
579 #else                                          !! 187 
580         return raw_atomic_inc_return_relaxed(v << 
581 #endif                                         << 
582 }                                              << 
583                                                << 
584 /**                                            << 
585  * raw_atomic_long_fetch_inc() - atomic increm << 
586  * @v: pointer to atomic_long_t                << 
587  *                                             << 
588  * Atomically updates @v to (@v + 1) with full << 
589  *                                             << 
590  * Safe to use in noinstr code; prefer atomic_ << 
591  *                                             << 
592  * Return: The original value of @v.           << 
593  */                                            << 
594 static __always_inline long                       188 static __always_inline long
595 raw_atomic_long_fetch_inc(atomic_long_t *v)    !! 189 arch_atomic_long_fetch_inc(atomic_long_t *v)
596 {                                                 190 {
597 #ifdef CONFIG_64BIT                            !! 191         return arch_atomic64_fetch_inc(v);
598         return raw_atomic64_fetch_inc(v);      !! 192 }
599 #else                                          !! 193 
600         return raw_atomic_fetch_inc(v);        << 
601 #endif                                         << 
602 }                                              << 
603                                                << 
604 /**                                            << 
605  * raw_atomic_long_fetch_inc_acquire() - atomi << 
606  * @v: pointer to atomic_long_t                << 
607  *                                             << 
608  * Atomically updates @v to (@v + 1) with acqu << 
609  *                                             << 
610  * Safe to use in noinstr code; prefer atomic_ << 
611  *                                             << 
612  * Return: The original value of @v.           << 
613  */                                            << 
614 static __always_inline long                       194 static __always_inline long
615 raw_atomic_long_fetch_inc_acquire(atomic_long_ !! 195 arch_atomic_long_fetch_inc_acquire(atomic_long_t *v)
616 {                                                 196 {
617 #ifdef CONFIG_64BIT                            !! 197         return arch_atomic64_fetch_inc_acquire(v);
618         return raw_atomic64_fetch_inc_acquire( !! 198 }
619 #else                                          !! 199 
620         return raw_atomic_fetch_inc_acquire(v) << 
621 #endif                                         << 
622 }                                              << 
623                                                << 
624 /**                                            << 
625  * raw_atomic_long_fetch_inc_release() - atomi << 
626  * @v: pointer to atomic_long_t                << 
627  *                                             << 
628  * Atomically updates @v to (@v + 1) with rele << 
629  *                                             << 
630  * Safe to use in noinstr code; prefer atomic_ << 
631  *                                             << 
632  * Return: The original value of @v.           << 
633  */                                            << 
634 static __always_inline long                       200 static __always_inline long
635 raw_atomic_long_fetch_inc_release(atomic_long_ !! 201 arch_atomic_long_fetch_inc_release(atomic_long_t *v)
636 {                                                 202 {
637 #ifdef CONFIG_64BIT                            !! 203         return arch_atomic64_fetch_inc_release(v);
638         return raw_atomic64_fetch_inc_release( !! 204 }
639 #else                                          !! 205 
640         return raw_atomic_fetch_inc_release(v) << 
641 #endif                                         << 
642 }                                              << 
643                                                << 
644 /**                                            << 
645  * raw_atomic_long_fetch_inc_relaxed() - atomi << 
646  * @v: pointer to atomic_long_t                << 
647  *                                             << 
648  * Atomically updates @v to (@v + 1) with rela << 
649  *                                             << 
650  * Safe to use in noinstr code; prefer atomic_ << 
651  *                                             << 
652  * Return: The original value of @v.           << 
653  */                                            << 
654 static __always_inline long                       206 static __always_inline long
655 raw_atomic_long_fetch_inc_relaxed(atomic_long_ !! 207 arch_atomic_long_fetch_inc_relaxed(atomic_long_t *v)
656 {                                                 208 {
657 #ifdef CONFIG_64BIT                            !! 209         return arch_atomic64_fetch_inc_relaxed(v);
658         return raw_atomic64_fetch_inc_relaxed( !! 210 }
659 #else                                          !! 211 
660         return raw_atomic_fetch_inc_relaxed(v) << 
661 #endif                                         << 
662 }                                              << 
663                                                << 
664 /**                                            << 
665  * raw_atomic_long_dec() - atomic decrement wi << 
666  * @v: pointer to atomic_long_t                << 
667  *                                             << 
668  * Atomically updates @v to (@v - 1) with rela << 
669  *                                             << 
670  * Safe to use in noinstr code; prefer atomic_ << 
671  *                                             << 
672  * Return: Nothing.                            << 
673  */                                            << 
674 static __always_inline void                       212 static __always_inline void
675 raw_atomic_long_dec(atomic_long_t *v)          !! 213 arch_atomic_long_dec(atomic_long_t *v)
676 {                                                 214 {
677 #ifdef CONFIG_64BIT                            !! 215         arch_atomic64_dec(v);
678         raw_atomic64_dec(v);                   !! 216 }
679 #else                                          !! 217 
680         raw_atomic_dec(v);                     << 
681 #endif                                         << 
682 }                                              << 
683                                                << 
684 /**                                            << 
685  * raw_atomic_long_dec_return() - atomic decre << 
686  * @v: pointer to atomic_long_t                << 
687  *                                             << 
688  * Atomically updates @v to (@v - 1) with full << 
689  *                                             << 
690  * Safe to use in noinstr code; prefer atomic_ << 
691  *                                             << 
692  * Return: The updated value of @v.            << 
693  */                                            << 
694 static __always_inline long                       218 static __always_inline long
695 raw_atomic_long_dec_return(atomic_long_t *v)   !! 219 arch_atomic_long_dec_return(atomic_long_t *v)
696 {                                                 220 {
697 #ifdef CONFIG_64BIT                            !! 221         return arch_atomic64_dec_return(v);
698         return raw_atomic64_dec_return(v);     !! 222 }
699 #else                                          !! 223 
700         return raw_atomic_dec_return(v);       << 
701 #endif                                         << 
702 }                                              << 
703                                                << 
704 /**                                            << 
705  * raw_atomic_long_dec_return_acquire() - atom << 
706  * @v: pointer to atomic_long_t                << 
707  *                                             << 
708  * Atomically updates @v to (@v - 1) with acqu << 
709  *                                             << 
710  * Safe to use in noinstr code; prefer atomic_ << 
711  *                                             << 
712  * Return: The updated value of @v.            << 
713  */                                            << 
714 static __always_inline long                       224 static __always_inline long
715 raw_atomic_long_dec_return_acquire(atomic_long !! 225 arch_atomic_long_dec_return_acquire(atomic_long_t *v)
716 {                                                 226 {
717 #ifdef CONFIG_64BIT                            !! 227         return arch_atomic64_dec_return_acquire(v);
718         return raw_atomic64_dec_return_acquire !! 228 }
719 #else                                          !! 229 
720         return raw_atomic_dec_return_acquire(v << 
721 #endif                                         << 
722 }                                              << 
723                                                << 
724 /**                                            << 
725  * raw_atomic_long_dec_return_release() - atom << 
726  * @v: pointer to atomic_long_t                << 
727  *                                             << 
728  * Atomically updates @v to (@v - 1) with rele << 
729  *                                             << 
730  * Safe to use in noinstr code; prefer atomic_ << 
731  *                                             << 
732  * Return: The updated value of @v.            << 
733  */                                            << 
734 static __always_inline long                       230 static __always_inline long
735 raw_atomic_long_dec_return_release(atomic_long !! 231 arch_atomic_long_dec_return_release(atomic_long_t *v)
736 {                                                 232 {
737 #ifdef CONFIG_64BIT                            !! 233         return arch_atomic64_dec_return_release(v);
738         return raw_atomic64_dec_return_release !! 234 }
739 #else                                          !! 235 
740         return raw_atomic_dec_return_release(v << 
741 #endif                                         << 
742 }                                              << 
743                                                << 
744 /**                                            << 
745  * raw_atomic_long_dec_return_relaxed() - atom << 
746  * @v: pointer to atomic_long_t                << 
747  *                                             << 
748  * Atomically updates @v to (@v - 1) with rela << 
749  *                                             << 
750  * Safe to use in noinstr code; prefer atomic_ << 
751  *                                             << 
752  * Return: The updated value of @v.            << 
753  */                                            << 
754 static __always_inline long                       236 static __always_inline long
755 raw_atomic_long_dec_return_relaxed(atomic_long !! 237 arch_atomic_long_dec_return_relaxed(atomic_long_t *v)
756 {                                                 238 {
757 #ifdef CONFIG_64BIT                            !! 239         return arch_atomic64_dec_return_relaxed(v);
758         return raw_atomic64_dec_return_relaxed !! 240 }
759 #else                                          !! 241 
760         return raw_atomic_dec_return_relaxed(v << 
761 #endif                                         << 
762 }                                              << 
763                                                << 
764 /**                                            << 
765  * raw_atomic_long_fetch_dec() - atomic decrem << 
766  * @v: pointer to atomic_long_t                << 
767  *                                             << 
768  * Atomically updates @v to (@v - 1) with full << 
769  *                                             << 
770  * Safe to use in noinstr code; prefer atomic_ << 
771  *                                             << 
772  * Return: The original value of @v.           << 
773  */                                            << 
774 static __always_inline long                       242 static __always_inline long
775 raw_atomic_long_fetch_dec(atomic_long_t *v)    !! 243 arch_atomic_long_fetch_dec(atomic_long_t *v)
776 {                                                 244 {
777 #ifdef CONFIG_64BIT                            !! 245         return arch_atomic64_fetch_dec(v);
778         return raw_atomic64_fetch_dec(v);      !! 246 }
779 #else                                          !! 247 
780         return raw_atomic_fetch_dec(v);        << 
781 #endif                                         << 
782 }                                              << 
783                                                << 
784 /**                                            << 
785  * raw_atomic_long_fetch_dec_acquire() - atomi << 
786  * @v: pointer to atomic_long_t                << 
787  *                                             << 
788  * Atomically updates @v to (@v - 1) with acqu << 
789  *                                             << 
790  * Safe to use in noinstr code; prefer atomic_ << 
791  *                                             << 
792  * Return: The original value of @v.           << 
793  */                                            << 
794 static __always_inline long                       248 static __always_inline long
795 raw_atomic_long_fetch_dec_acquire(atomic_long_ !! 249 arch_atomic_long_fetch_dec_acquire(atomic_long_t *v)
796 {                                                 250 {
797 #ifdef CONFIG_64BIT                            !! 251         return arch_atomic64_fetch_dec_acquire(v);
798         return raw_atomic64_fetch_dec_acquire( !! 252 }
799 #else                                          !! 253 
800         return raw_atomic_fetch_dec_acquire(v) << 
801 #endif                                         << 
802 }                                              << 
803                                                << 
804 /**                                            << 
805  * raw_atomic_long_fetch_dec_release() - atomi << 
806  * @v: pointer to atomic_long_t                << 
807  *                                             << 
808  * Atomically updates @v to (@v - 1) with rele << 
809  *                                             << 
810  * Safe to use in noinstr code; prefer atomic_ << 
811  *                                             << 
812  * Return: The original value of @v.           << 
813  */                                            << 
814 static __always_inline long                       254 static __always_inline long
815 raw_atomic_long_fetch_dec_release(atomic_long_ !! 255 arch_atomic_long_fetch_dec_release(atomic_long_t *v)
816 {                                                 256 {
817 #ifdef CONFIG_64BIT                            !! 257         return arch_atomic64_fetch_dec_release(v);
818         return raw_atomic64_fetch_dec_release( !! 258 }
819 #else                                          !! 259 
820         return raw_atomic_fetch_dec_release(v) << 
821 #endif                                         << 
822 }                                              << 
823                                                << 
824 /**                                            << 
825  * raw_atomic_long_fetch_dec_relaxed() - atomi << 
826  * @v: pointer to atomic_long_t                << 
827  *                                             << 
828  * Atomically updates @v to (@v - 1) with rela << 
829  *                                             << 
830  * Safe to use in noinstr code; prefer atomic_ << 
831  *                                             << 
832  * Return: The original value of @v.           << 
833  */                                            << 
834 static __always_inline long                       260 static __always_inline long
835 raw_atomic_long_fetch_dec_relaxed(atomic_long_ !! 261 arch_atomic_long_fetch_dec_relaxed(atomic_long_t *v)
836 {                                                 262 {
837 #ifdef CONFIG_64BIT                            !! 263         return arch_atomic64_fetch_dec_relaxed(v);
838         return raw_atomic64_fetch_dec_relaxed( !! 264 }
839 #else                                          !! 265 
840         return raw_atomic_fetch_dec_relaxed(v) << 
841 #endif                                         << 
842 }                                              << 
843                                                << 
844 /**                                            << 
845  * raw_atomic_long_and() - atomic bitwise AND  << 
846  * @i: long value                              << 
847  * @v: pointer to atomic_long_t                << 
848  *                                             << 
849  * Atomically updates @v to (@v & @i) with rel << 
850  *                                             << 
851  * Safe to use in noinstr code; prefer atomic_ << 
852  *                                             << 
853  * Return: Nothing.                            << 
854  */                                            << 
855 static __always_inline void                       266 static __always_inline void
856 raw_atomic_long_and(long i, atomic_long_t *v)  !! 267 arch_atomic_long_and(long i, atomic_long_t *v)
857 {                                                 268 {
858 #ifdef CONFIG_64BIT                            !! 269         arch_atomic64_and(i, v);
859         raw_atomic64_and(i, v);                !! 270 }
860 #else                                          !! 271 
861         raw_atomic_and(i, v);                  << 
862 #endif                                         << 
863 }                                              << 
864                                                << 
865 /**                                            << 
866  * raw_atomic_long_fetch_and() - atomic bitwis << 
867  * @i: long value                              << 
868  * @v: pointer to atomic_long_t                << 
869  *                                             << 
870  * Atomically updates @v to (@v & @i) with ful << 
871  *                                             << 
872  * Safe to use in noinstr code; prefer atomic_ << 
873  *                                             << 
874  * Return: The original value of @v.           << 
875  */                                            << 
876 static __always_inline long                       272 static __always_inline long
877 raw_atomic_long_fetch_and(long i, atomic_long_ !! 273 arch_atomic_long_fetch_and(long i, atomic_long_t *v)
878 {                                                 274 {
879 #ifdef CONFIG_64BIT                            !! 275         return arch_atomic64_fetch_and(i, v);
880         return raw_atomic64_fetch_and(i, v);   !! 276 }
881 #else                                          !! 277 
882         return raw_atomic_fetch_and(i, v);     << 
883 #endif                                         << 
884 }                                              << 
885                                                << 
886 /**                                            << 
887  * raw_atomic_long_fetch_and_acquire() - atomi << 
888  * @i: long value                              << 
889  * @v: pointer to atomic_long_t                << 
890  *                                             << 
891  * Atomically updates @v to (@v & @i) with acq << 
892  *                                             << 
893  * Safe to use in noinstr code; prefer atomic_ << 
894  *                                             << 
895  * Return: The original value of @v.           << 
896  */                                            << 
897 static __always_inline long                       278 static __always_inline long
898 raw_atomic_long_fetch_and_acquire(long i, atom !! 279 arch_atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
899 {                                                 280 {
900 #ifdef CONFIG_64BIT                            !! 281         return arch_atomic64_fetch_and_acquire(i, v);
901         return raw_atomic64_fetch_and_acquire( !! 282 }
902 #else                                          !! 283 
903         return raw_atomic_fetch_and_acquire(i, << 
904 #endif                                         << 
905 }                                              << 
906                                                << 
907 /**                                            << 
908  * raw_atomic_long_fetch_and_release() - atomi << 
909  * @i: long value                              << 
910  * @v: pointer to atomic_long_t                << 
911  *                                             << 
912  * Atomically updates @v to (@v & @i) with rel << 
913  *                                             << 
914  * Safe to use in noinstr code; prefer atomic_ << 
915  *                                             << 
916  * Return: The original value of @v.           << 
917  */                                            << 
918 static __always_inline long                       284 static __always_inline long
919 raw_atomic_long_fetch_and_release(long i, atom !! 285 arch_atomic_long_fetch_and_release(long i, atomic_long_t *v)
920 {                                                 286 {
921 #ifdef CONFIG_64BIT                            !! 287         return arch_atomic64_fetch_and_release(i, v);
922         return raw_atomic64_fetch_and_release( !! 288 }
923 #else                                          !! 289 
924         return raw_atomic_fetch_and_release(i, << 
925 #endif                                         << 
926 }                                              << 
927                                                << 
928 /**                                            << 
929  * raw_atomic_long_fetch_and_relaxed() - atomi << 
930  * @i: long value                              << 
931  * @v: pointer to atomic_long_t                << 
932  *                                             << 
933  * Atomically updates @v to (@v & @i) with rel << 
934  *                                             << 
935  * Safe to use in noinstr code; prefer atomic_ << 
936  *                                             << 
937  * Return: The original value of @v.           << 
938  */                                            << 
939 static __always_inline long                       290 static __always_inline long
940 raw_atomic_long_fetch_and_relaxed(long i, atom !! 291 arch_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
941 {                                                 292 {
942 #ifdef CONFIG_64BIT                            !! 293         return arch_atomic64_fetch_and_relaxed(i, v);
943         return raw_atomic64_fetch_and_relaxed( !! 294 }
944 #else                                          !! 295 
945         return raw_atomic_fetch_and_relaxed(i, << 
946 #endif                                         << 
947 }                                              << 
948                                                << 
949 /**                                            << 
950  * raw_atomic_long_andnot() - atomic bitwise A << 
951  * @i: long value                              << 
952  * @v: pointer to atomic_long_t                << 
953  *                                             << 
954  * Atomically updates @v to (@v & ~@i) with re << 
955  *                                             << 
956  * Safe to use in noinstr code; prefer atomic_ << 
957  *                                             << 
958  * Return: Nothing.                            << 
959  */                                            << 
960 static __always_inline void                       296 static __always_inline void
961 raw_atomic_long_andnot(long i, atomic_long_t * !! 297 arch_atomic_long_andnot(long i, atomic_long_t *v)
962 {                                                 298 {
963 #ifdef CONFIG_64BIT                            !! 299         arch_atomic64_andnot(i, v);
964         raw_atomic64_andnot(i, v);             !! 300 }
965 #else                                          !! 301 
966         raw_atomic_andnot(i, v);               << 
967 #endif                                         << 
968 }                                              << 
969                                                << 
970 /**                                            << 
971  * raw_atomic_long_fetch_andnot() - atomic bit << 
972  * @i: long value                              << 
973  * @v: pointer to atomic_long_t                << 
974  *                                             << 
975  * Atomically updates @v to (@v & ~@i) with fu << 
976  *                                             << 
977  * Safe to use in noinstr code; prefer atomic_ << 
978  *                                             << 
979  * Return: The original value of @v.           << 
980  */                                            << 
981 static __always_inline long                       302 static __always_inline long
982 raw_atomic_long_fetch_andnot(long i, atomic_lo !! 303 arch_atomic_long_fetch_andnot(long i, atomic_long_t *v)
983 {                                                 304 {
984 #ifdef CONFIG_64BIT                            !! 305         return arch_atomic64_fetch_andnot(i, v);
985         return raw_atomic64_fetch_andnot(i, v) !! 306 }
986 #else                                          !! 307 
987         return raw_atomic_fetch_andnot(i, v);  << 
988 #endif                                         << 
989 }                                              << 
990                                                << 
991 /**                                            << 
992  * raw_atomic_long_fetch_andnot_acquire() - at << 
993  * @i: long value                              << 
994  * @v: pointer to atomic_long_t                << 
995  *                                             << 
996  * Atomically updates @v to (@v & ~@i) with ac << 
997  *                                             << 
998  * Safe to use in noinstr code; prefer atomic_ << 
999  *                                             << 
1000  * Return: The original value of @v.          << 
1001  */                                           << 
1002 static __always_inline long                      308 static __always_inline long
1003 raw_atomic_long_fetch_andnot_acquire(long i,  !! 309 arch_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
1004 {                                                310 {
1005 #ifdef CONFIG_64BIT                           !! 311         return arch_atomic64_fetch_andnot_acquire(i, v);
1006         return raw_atomic64_fetch_andnot_acqu !! 312 }
1007 #else                                         !! 313 
1008         return raw_atomic_fetch_andnot_acquir << 
1009 #endif                                        << 
1010 }                                             << 
1011                                               << 
1012 /**                                           << 
1013  * raw_atomic_long_fetch_andnot_release() - a << 
1014  * @i: long value                             << 
1015  * @v: pointer to atomic_long_t               << 
1016  *                                            << 
1017  * Atomically updates @v to (@v & ~@i) with r << 
1018  *                                            << 
1019  * Safe to use in noinstr code; prefer atomic << 
1020  *                                            << 
1021  * Return: The original value of @v.          << 
1022  */                                           << 
1023 static __always_inline long                      314 static __always_inline long
1024 raw_atomic_long_fetch_andnot_release(long i,  !! 315 arch_atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
1025 {                                                316 {
1026 #ifdef CONFIG_64BIT                           !! 317         return arch_atomic64_fetch_andnot_release(i, v);
1027         return raw_atomic64_fetch_andnot_rele !! 318 }
1028 #else                                         !! 319 
1029         return raw_atomic_fetch_andnot_releas << 
1030 #endif                                        << 
1031 }                                             << 
1032                                               << 
1033 /**                                           << 
1034  * raw_atomic_long_fetch_andnot_relaxed() - a << 
1035  * @i: long value                             << 
1036  * @v: pointer to atomic_long_t               << 
1037  *                                            << 
1038  * Atomically updates @v to (@v & ~@i) with r << 
1039  *                                            << 
1040  * Safe to use in noinstr code; prefer atomic << 
1041  *                                            << 
1042  * Return: The original value of @v.          << 
1043  */                                           << 
1044 static __always_inline long                      320 static __always_inline long
1045 raw_atomic_long_fetch_andnot_relaxed(long i,  !! 321 arch_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
1046 {                                                322 {
1047 #ifdef CONFIG_64BIT                           !! 323         return arch_atomic64_fetch_andnot_relaxed(i, v);
1048         return raw_atomic64_fetch_andnot_rela !! 324 }
1049 #else                                         !! 325 
1050         return raw_atomic_fetch_andnot_relaxe << 
1051 #endif                                        << 
1052 }                                             << 
1053                                               << 
1054 /**                                           << 
1055  * raw_atomic_long_or() - atomic bitwise OR w << 
1056  * @i: long value                             << 
1057  * @v: pointer to atomic_long_t               << 
1058  *                                            << 
1059  * Atomically updates @v to (@v | @i) with re << 
1060  *                                            << 
1061  * Safe to use in noinstr code; prefer atomic << 
1062  *                                            << 
1063  * Return: Nothing.                           << 
1064  */                                           << 
1065 static __always_inline void                      326 static __always_inline void
1066 raw_atomic_long_or(long i, atomic_long_t *v)  !! 327 arch_atomic_long_or(long i, atomic_long_t *v)
1067 {                                                328 {
1068 #ifdef CONFIG_64BIT                           !! 329         arch_atomic64_or(i, v);
1069         raw_atomic64_or(i, v);                !! 330 }
1070 #else                                         !! 331 
1071         raw_atomic_or(i, v);                  << 
1072 #endif                                        << 
1073 }                                             << 
1074                                               << 
1075 /**                                           << 
1076  * raw_atomic_long_fetch_or() - atomic bitwis << 
1077  * @i: long value                             << 
1078  * @v: pointer to atomic_long_t               << 
1079  *                                            << 
1080  * Atomically updates @v to (@v | @i) with fu << 
1081  *                                            << 
1082  * Safe to use in noinstr code; prefer atomic << 
1083  *                                            << 
1084  * Return: The original value of @v.          << 
1085  */                                           << 
1086 static __always_inline long                      332 static __always_inline long
1087 raw_atomic_long_fetch_or(long i, atomic_long_ !! 333 arch_atomic_long_fetch_or(long i, atomic_long_t *v)
1088 {                                                334 {
1089 #ifdef CONFIG_64BIT                           !! 335         return arch_atomic64_fetch_or(i, v);
1090         return raw_atomic64_fetch_or(i, v);   !! 336 }
1091 #else                                         !! 337 
1092         return raw_atomic_fetch_or(i, v);     << 
1093 #endif                                        << 
1094 }                                             << 
1095                                               << 
1096 /**                                           << 
1097  * raw_atomic_long_fetch_or_acquire() - atomi << 
1098  * @i: long value                             << 
1099  * @v: pointer to atomic_long_t               << 
1100  *                                            << 
1101  * Atomically updates @v to (@v | @i) with ac << 
1102  *                                            << 
1103  * Safe to use in noinstr code; prefer atomic << 
1104  *                                            << 
1105  * Return: The original value of @v.          << 
1106  */                                           << 
1107 static __always_inline long                      338 static __always_inline long
1108 raw_atomic_long_fetch_or_acquire(long i, atom !! 339 arch_atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
1109 {                                                340 {
1110 #ifdef CONFIG_64BIT                           !! 341         return arch_atomic64_fetch_or_acquire(i, v);
1111         return raw_atomic64_fetch_or_acquire( !! 342 }
1112 #else                                         !! 343 
1113         return raw_atomic_fetch_or_acquire(i, << 
1114 #endif                                        << 
1115 }                                             << 
1116                                               << 
1117 /**                                           << 
1118  * raw_atomic_long_fetch_or_release() - atomi << 
1119  * @i: long value                             << 
1120  * @v: pointer to atomic_long_t               << 
1121  *                                            << 
1122  * Atomically updates @v to (@v | @i) with re << 
1123  *                                            << 
1124  * Safe to use in noinstr code; prefer atomic << 
1125  *                                            << 
1126  * Return: The original value of @v.          << 
1127  */                                           << 
1128 static __always_inline long                      344 static __always_inline long
1129 raw_atomic_long_fetch_or_release(long i, atom !! 345 arch_atomic_long_fetch_or_release(long i, atomic_long_t *v)
1130 {                                                346 {
1131 #ifdef CONFIG_64BIT                           !! 347         return arch_atomic64_fetch_or_release(i, v);
1132         return raw_atomic64_fetch_or_release( !! 348 }
1133 #else                                         !! 349 
1134         return raw_atomic_fetch_or_release(i, << 
1135 #endif                                        << 
1136 }                                             << 
1137                                               << 
1138 /**                                           << 
1139  * raw_atomic_long_fetch_or_relaxed() - atomi << 
1140  * @i: long value                             << 
1141  * @v: pointer to atomic_long_t               << 
1142  *                                            << 
1143  * Atomically updates @v to (@v | @i) with re << 
1144  *                                            << 
1145  * Safe to use in noinstr code; prefer atomic << 
1146  *                                            << 
1147  * Return: The original value of @v.          << 
1148  */                                           << 
1149 static __always_inline long                      350 static __always_inline long
1150 raw_atomic_long_fetch_or_relaxed(long i, atom !! 351 arch_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
1151 {                                                352 {
1152 #ifdef CONFIG_64BIT                           !! 353         return arch_atomic64_fetch_or_relaxed(i, v);
1153         return raw_atomic64_fetch_or_relaxed( !! 354 }
1154 #else                                         !! 355 
1155         return raw_atomic_fetch_or_relaxed(i, << 
1156 #endif                                        << 
1157 }                                             << 
1158                                               << 
1159 /**                                           << 
1160  * raw_atomic_long_xor() - atomic bitwise XOR << 
1161  * @i: long value                             << 
1162  * @v: pointer to atomic_long_t               << 
1163  *                                            << 
1164  * Atomically updates @v to (@v ^ @i) with re << 
1165  *                                            << 
1166  * Safe to use in noinstr code; prefer atomic << 
1167  *                                            << 
1168  * Return: Nothing.                           << 
1169  */                                           << 
1170 static __always_inline void                      356 static __always_inline void
1171 raw_atomic_long_xor(long i, atomic_long_t *v) !! 357 arch_atomic_long_xor(long i, atomic_long_t *v)
1172 {                                                358 {
1173 #ifdef CONFIG_64BIT                           !! 359         arch_atomic64_xor(i, v);
1174         raw_atomic64_xor(i, v);               !! 360 }
1175 #else                                         !! 361 
1176         raw_atomic_xor(i, v);                 << 
1177 #endif                                        << 
1178 }                                             << 
1179                                               << 
1180 /**                                           << 
1181  * raw_atomic_long_fetch_xor() - atomic bitwi << 
1182  * @i: long value                             << 
1183  * @v: pointer to atomic_long_t               << 
1184  *                                            << 
1185  * Atomically updates @v to (@v ^ @i) with fu << 
1186  *                                            << 
1187  * Safe to use in noinstr code; prefer atomic << 
1188  *                                            << 
1189  * Return: The original value of @v.          << 
1190  */                                           << 
1191 static __always_inline long                      362 static __always_inline long
1192 raw_atomic_long_fetch_xor(long i, atomic_long !! 363 arch_atomic_long_fetch_xor(long i, atomic_long_t *v)
1193 {                                                364 {
1194 #ifdef CONFIG_64BIT                           !! 365         return arch_atomic64_fetch_xor(i, v);
1195         return raw_atomic64_fetch_xor(i, v);  !! 366 }
1196 #else                                         !! 367 
1197         return raw_atomic_fetch_xor(i, v);    << 
1198 #endif                                        << 
1199 }                                             << 
1200                                               << 
1201 /**                                           << 
1202  * raw_atomic_long_fetch_xor_acquire() - atom << 
1203  * @i: long value                             << 
1204  * @v: pointer to atomic_long_t               << 
1205  *                                            << 
1206  * Atomically updates @v to (@v ^ @i) with ac << 
1207  *                                            << 
1208  * Safe to use in noinstr code; prefer atomic << 
1209  *                                            << 
1210  * Return: The original value of @v.          << 
1211  */                                           << 
1212 static __always_inline long                      368 static __always_inline long
1213 raw_atomic_long_fetch_xor_acquire(long i, ato !! 369 arch_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
1214 {                                                370 {
1215 #ifdef CONFIG_64BIT                           !! 371         return arch_atomic64_fetch_xor_acquire(i, v);
1216         return raw_atomic64_fetch_xor_acquire !! 372 }
1217 #else                                         !! 373 
1218         return raw_atomic_fetch_xor_acquire(i << 
1219 #endif                                        << 
1220 }                                             << 
1221                                               << 
1222 /**                                           << 
1223  * raw_atomic_long_fetch_xor_release() - atom << 
1224  * @i: long value                             << 
1225  * @v: pointer to atomic_long_t               << 
1226  *                                            << 
1227  * Atomically updates @v to (@v ^ @i) with re << 
1228  *                                            << 
1229  * Safe to use in noinstr code; prefer atomic << 
1230  *                                            << 
1231  * Return: The original value of @v.          << 
1232  */                                           << 
1233 static __always_inline long                      374 static __always_inline long
1234 raw_atomic_long_fetch_xor_release(long i, ato !! 375 arch_atomic_long_fetch_xor_release(long i, atomic_long_t *v)
1235 {                                                376 {
1236 #ifdef CONFIG_64BIT                           !! 377         return arch_atomic64_fetch_xor_release(i, v);
1237         return raw_atomic64_fetch_xor_release !! 378 }
1238 #else                                         !! 379 
1239         return raw_atomic_fetch_xor_release(i << 
1240 #endif                                        << 
1241 }                                             << 
1242                                               << 
1243 /**                                           << 
1244  * raw_atomic_long_fetch_xor_relaxed() - atom << 
1245  * @i: long value                             << 
1246  * @v: pointer to atomic_long_t               << 
1247  *                                            << 
1248  * Atomically updates @v to (@v ^ @i) with re << 
1249  *                                            << 
1250  * Safe to use in noinstr code; prefer atomic << 
1251  *                                            << 
1252  * Return: The original value of @v.          << 
1253  */                                           << 
1254 static __always_inline long                      380 static __always_inline long
1255 raw_atomic_long_fetch_xor_relaxed(long i, ato !! 381 arch_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
1256 {                                                382 {
1257 #ifdef CONFIG_64BIT                           !! 383         return arch_atomic64_fetch_xor_relaxed(i, v);
1258         return raw_atomic64_fetch_xor_relaxed !! 384 }
1259 #else                                         !! 385 
1260         return raw_atomic_fetch_xor_relaxed(i << 
1261 #endif                                        << 
1262 }                                             << 
1263                                               << 
1264 /**                                           << 
1265  * raw_atomic_long_xchg() - atomic exchange w << 
1266  * @v: pointer to atomic_long_t               << 
1267  * @new: long value to assign                 << 
1268  *                                            << 
1269  * Atomically updates @v to @new with full or << 
1270  *                                            << 
1271  * Safe to use in noinstr code; prefer atomic << 
1272  *                                            << 
1273  * Return: The original value of @v.          << 
1274  */                                           << 
1275 static __always_inline long                      386 static __always_inline long
1276 raw_atomic_long_xchg(atomic_long_t *v, long n !! 387 arch_atomic_long_xchg(atomic_long_t *v, long i)
1277 {                                                388 {
1278 #ifdef CONFIG_64BIT                           !! 389         return arch_atomic64_xchg(v, i);
1279         return raw_atomic64_xchg(v, new);     !! 390 }
1280 #else                                         !! 391 
1281         return raw_atomic_xchg(v, new);       << 
1282 #endif                                        << 
1283 }                                             << 
1284                                               << 
1285 /**                                           << 
1286  * raw_atomic_long_xchg_acquire() - atomic ex << 
1287  * @v: pointer to atomic_long_t               << 
1288  * @new: long value to assign                 << 
1289  *                                            << 
1290  * Atomically updates @v to @new with acquire << 
1291  *                                            << 
1292  * Safe to use in noinstr code; prefer atomic << 
1293  *                                            << 
1294  * Return: The original value of @v.          << 
1295  */                                           << 
1296 static __always_inline long                      392 static __always_inline long
1297 raw_atomic_long_xchg_acquire(atomic_long_t *v !! 393 arch_atomic_long_xchg_acquire(atomic_long_t *v, long i)
1298 {                                                394 {
1299 #ifdef CONFIG_64BIT                           !! 395         return arch_atomic64_xchg_acquire(v, i);
1300         return raw_atomic64_xchg_acquire(v, n !! 396 }
1301 #else                                         !! 397 
1302         return raw_atomic_xchg_acquire(v, new << 
1303 #endif                                        << 
1304 }                                             << 
1305                                               << 
1306 /**                                           << 
1307  * raw_atomic_long_xchg_release() - atomic ex << 
1308  * @v: pointer to atomic_long_t               << 
1309  * @new: long value to assign                 << 
1310  *                                            << 
1311  * Atomically updates @v to @new with release << 
1312  *                                            << 
1313  * Safe to use in noinstr code; prefer atomic << 
1314  *                                            << 
1315  * Return: The original value of @v.          << 
1316  */                                           << 
1317 static __always_inline long                      398 static __always_inline long
1318 raw_atomic_long_xchg_release(atomic_long_t *v !! 399 arch_atomic_long_xchg_release(atomic_long_t *v, long i)
1319 {                                                400 {
1320 #ifdef CONFIG_64BIT                           !! 401         return arch_atomic64_xchg_release(v, i);
1321         return raw_atomic64_xchg_release(v, n !! 402 }
1322 #else                                         !! 403 
1323         return raw_atomic_xchg_release(v, new << 
1324 #endif                                        << 
1325 }                                             << 
1326                                               << 
1327 /**                                           << 
1328  * raw_atomic_long_xchg_relaxed() - atomic ex << 
1329  * @v: pointer to atomic_long_t               << 
1330  * @new: long value to assign                 << 
1331  *                                            << 
1332  * Atomically updates @v to @new with relaxed << 
1333  *                                            << 
1334  * Safe to use in noinstr code; prefer atomic << 
1335  *                                            << 
1336  * Return: The original value of @v.          << 
1337  */                                           << 
1338 static __always_inline long                      404 static __always_inline long
1339 raw_atomic_long_xchg_relaxed(atomic_long_t *v !! 405 arch_atomic_long_xchg_relaxed(atomic_long_t *v, long i)
1340 {                                                406 {
1341 #ifdef CONFIG_64BIT                           !! 407         return arch_atomic64_xchg_relaxed(v, i);
1342         return raw_atomic64_xchg_relaxed(v, n !! 408 }
1343 #else                                         !! 409 
1344         return raw_atomic_xchg_relaxed(v, new << 
1345 #endif                                        << 
1346 }                                             << 
1347                                               << 
1348 /**                                           << 
1349  * raw_atomic_long_cmpxchg() - atomic compare << 
1350  * @v: pointer to atomic_long_t               << 
1351  * @old: long value to compare with           << 
1352  * @new: long value to assign                 << 
1353  *                                            << 
1354  * If (@v == @old), atomically updates @v to  << 
1355  * Otherwise, @v is not modified and relaxed  << 
1356  *                                            << 
1357  * Safe to use in noinstr code; prefer atomic << 
1358  *                                            << 
1359  * Return: The original value of @v.          << 
1360  */                                           << 
1361 static __always_inline long                      410 static __always_inline long
1362 raw_atomic_long_cmpxchg(atomic_long_t *v, lon !! 411 arch_atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
1363 {                                                412 {
1364 #ifdef CONFIG_64BIT                           !! 413         return arch_atomic64_cmpxchg(v, old, new);
1365         return raw_atomic64_cmpxchg(v, old, n !! 414 }
1366 #else                                         !! 415 
1367         return raw_atomic_cmpxchg(v, old, new << 
1368 #endif                                        << 
1369 }                                             << 
1370                                               << 
1371 /**                                           << 
1372  * raw_atomic_long_cmpxchg_acquire() - atomic << 
1373  * @v: pointer to atomic_long_t               << 
1374  * @old: long value to compare with           << 
1375  * @new: long value to assign                 << 
1376  *                                            << 
1377  * If (@v == @old), atomically updates @v to  << 
1378  * Otherwise, @v is not modified and relaxed  << 
1379  *                                            << 
1380  * Safe to use in noinstr code; prefer atomic << 
1381  *                                            << 
1382  * Return: The original value of @v.          << 
1383  */                                           << 
1384 static __always_inline long                      416 static __always_inline long
1385 raw_atomic_long_cmpxchg_acquire(atomic_long_t !! 417 arch_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
1386 {                                                418 {
1387 #ifdef CONFIG_64BIT                           !! 419         return arch_atomic64_cmpxchg_acquire(v, old, new);
1388         return raw_atomic64_cmpxchg_acquire(v !! 420 }
1389 #else                                         !! 421 
1390         return raw_atomic_cmpxchg_acquire(v,  << 
1391 #endif                                        << 
1392 }                                             << 
1393                                               << 
1394 /**                                           << 
1395  * raw_atomic_long_cmpxchg_release() - atomic << 
1396  * @v: pointer to atomic_long_t               << 
1397  * @old: long value to compare with           << 
1398  * @new: long value to assign                 << 
1399  *                                            << 
1400  * If (@v == @old), atomically updates @v to  << 
1401  * Otherwise, @v is not modified and relaxed  << 
1402  *                                            << 
1403  * Safe to use in noinstr code; prefer atomic << 
1404  *                                            << 
1405  * Return: The original value of @v.          << 
1406  */                                           << 
1407 static __always_inline long                      422 static __always_inline long
1408 raw_atomic_long_cmpxchg_release(atomic_long_t !! 423 arch_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
1409 {                                                424 {
1410 #ifdef CONFIG_64BIT                           !! 425         return arch_atomic64_cmpxchg_release(v, old, new);
1411         return raw_atomic64_cmpxchg_release(v !! 426 }
1412 #else                                         !! 427 
1413         return raw_atomic_cmpxchg_release(v,  << 
1414 #endif                                        << 
1415 }                                             << 
1416                                               << 
1417 /**                                           << 
1418  * raw_atomic_long_cmpxchg_relaxed() - atomic << 
1419  * @v: pointer to atomic_long_t               << 
1420  * @old: long value to compare with           << 
1421  * @new: long value to assign                 << 
1422  *                                            << 
1423  * If (@v == @old), atomically updates @v to  << 
1424  * Otherwise, @v is not modified and relaxed  << 
1425  *                                            << 
1426  * Safe to use in noinstr code; prefer atomic << 
1427  *                                            << 
1428  * Return: The original value of @v.          << 
1429  */                                           << 
1430 static __always_inline long                      428 static __always_inline long
1431 raw_atomic_long_cmpxchg_relaxed(atomic_long_t !! 429 arch_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
1432 {                                                430 {
1433 #ifdef CONFIG_64BIT                           !! 431         return arch_atomic64_cmpxchg_relaxed(v, old, new);
1434         return raw_atomic64_cmpxchg_relaxed(v !! 432 }
1435 #else                                         !! 433 
1436         return raw_atomic_cmpxchg_relaxed(v,  << 
1437 #endif                                        << 
1438 }                                             << 
1439                                               << 
1440 /**                                           << 
1441  * raw_atomic_long_try_cmpxchg() - atomic com << 
1442  * @v: pointer to atomic_long_t               << 
1443  * @old: pointer to long value to compare wit << 
1444  * @new: long value to assign                 << 
1445  *                                            << 
1446  * If (@v == @old), atomically updates @v to  << 
1447  * Otherwise, @v is not modified, @old is upd << 
1448  * and relaxed ordering is provided.          << 
1449  *                                            << 
1450  * Safe to use in noinstr code; prefer atomic << 
1451  *                                            << 
1452  * Return: @true if the exchange occured, @fa << 
1453  */                                           << 
1454 static __always_inline bool                      434 static __always_inline bool
1455 raw_atomic_long_try_cmpxchg(atomic_long_t *v, !! 435 arch_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
1456 {                                                436 {
1457 #ifdef CONFIG_64BIT                           !! 437         return arch_atomic64_try_cmpxchg(v, (s64 *)old, new);
1458         return raw_atomic64_try_cmpxchg(v, (s !! 438 }
1459 #else                                         !! 439 
1460         return raw_atomic_try_cmpxchg(v, (int << 
1461 #endif                                        << 
1462 }                                             << 
1463                                               << 
1464 /**                                           << 
1465  * raw_atomic_long_try_cmpxchg_acquire() - at << 
1466  * @v: pointer to atomic_long_t               << 
1467  * @old: pointer to long value to compare wit << 
1468  * @new: long value to assign                 << 
1469  *                                            << 
1470  * If (@v == @old), atomically updates @v to  << 
1471  * Otherwise, @v is not modified, @old is upd << 
1472  * and relaxed ordering is provided.          << 
1473  *                                            << 
1474  * Safe to use in noinstr code; prefer atomic << 
1475  *                                            << 
1476  * Return: @true if the exchange occured, @fa << 
1477  */                                           << 
1478 static __always_inline bool                      440 static __always_inline bool
1479 raw_atomic_long_try_cmpxchg_acquire(atomic_lo !! 441 arch_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
1480 {                                                442 {
1481 #ifdef CONFIG_64BIT                           !! 443         return arch_atomic64_try_cmpxchg_acquire(v, (s64 *)old, new);
1482         return raw_atomic64_try_cmpxchg_acqui !! 444 }
1483 #else                                         !! 445 
1484         return raw_atomic_try_cmpxchg_acquire << 
1485 #endif                                        << 
1486 }                                             << 
1487                                               << 
1488 /**                                           << 
1489  * raw_atomic_long_try_cmpxchg_release() - at << 
1490  * @v: pointer to atomic_long_t               << 
1491  * @old: pointer to long value to compare wit << 
1492  * @new: long value to assign                 << 
1493  *                                            << 
1494  * If (@v == @old), atomically updates @v to  << 
1495  * Otherwise, @v is not modified, @old is upd << 
1496  * and relaxed ordering is provided.          << 
1497  *                                            << 
1498  * Safe to use in noinstr code; prefer atomic << 
1499  *                                            << 
1500  * Return: @true if the exchange occured, @fa << 
1501  */                                           << 
1502 static __always_inline bool                      446 static __always_inline bool
1503 raw_atomic_long_try_cmpxchg_release(atomic_lo !! 447 arch_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
1504 {                                                448 {
1505 #ifdef CONFIG_64BIT                           !! 449         return arch_atomic64_try_cmpxchg_release(v, (s64 *)old, new);
1506         return raw_atomic64_try_cmpxchg_relea !! 450 }
1507 #else                                         !! 451 
1508         return raw_atomic_try_cmpxchg_release << 
1509 #endif                                        << 
1510 }                                             << 
1511                                               << 
1512 /**                                           << 
1513  * raw_atomic_long_try_cmpxchg_relaxed() - at << 
1514  * @v: pointer to atomic_long_t               << 
1515  * @old: pointer to long value to compare wit << 
1516  * @new: long value to assign                 << 
1517  *                                            << 
1518  * If (@v == @old), atomically updates @v to  << 
1519  * Otherwise, @v is not modified, @old is upd << 
1520  * and relaxed ordering is provided.          << 
1521  *                                            << 
1522  * Safe to use in noinstr code; prefer atomic << 
1523  *                                            << 
1524  * Return: @true if the exchange occured, @fa << 
1525  */                                           << 
1526 static __always_inline bool                      452 static __always_inline bool
1527 raw_atomic_long_try_cmpxchg_relaxed(atomic_lo !! 453 arch_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
1528 {                                                454 {
1529 #ifdef CONFIG_64BIT                           !! 455         return arch_atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new);
1530         return raw_atomic64_try_cmpxchg_relax !! 456 }
1531 #else                                         !! 457 
1532         return raw_atomic_try_cmpxchg_relaxed << 
1533 #endif                                        << 
1534 }                                             << 
1535                                               << 
1536 /**                                           << 
1537  * raw_atomic_long_sub_and_test() - atomic su << 
1538  * @i: long value to subtract                 << 
1539  * @v: pointer to atomic_long_t               << 
1540  *                                            << 
1541  * Atomically updates @v to (@v - @i) with fu << 
1542  *                                            << 
1543  * Safe to use in noinstr code; prefer atomic << 
1544  *                                            << 
1545  * Return: @true if the resulting value of @v << 
1546  */                                           << 
1547 static __always_inline bool                      458 static __always_inline bool
1548 raw_atomic_long_sub_and_test(long i, atomic_l !! 459 arch_atomic_long_sub_and_test(long i, atomic_long_t *v)
1549 {                                                460 {
1550 #ifdef CONFIG_64BIT                           !! 461         return arch_atomic64_sub_and_test(i, v);
1551         return raw_atomic64_sub_and_test(i, v !! 462 }
1552 #else                                         !! 463 
1553         return raw_atomic_sub_and_test(i, v); << 
1554 #endif                                        << 
1555 }                                             << 
1556                                               << 
1557 /**                                           << 
1558  * raw_atomic_long_dec_and_test() - atomic de << 
1559  * @v: pointer to atomic_long_t               << 
1560  *                                            << 
1561  * Atomically updates @v to (@v - 1) with ful << 
1562  *                                            << 
1563  * Safe to use in noinstr code; prefer atomic << 
1564  *                                            << 
1565  * Return: @true if the resulting value of @v << 
1566  */                                           << 
1567 static __always_inline bool                      464 static __always_inline bool
1568 raw_atomic_long_dec_and_test(atomic_long_t *v !! 465 arch_atomic_long_dec_and_test(atomic_long_t *v)
1569 {                                                466 {
1570 #ifdef CONFIG_64BIT                           !! 467         return arch_atomic64_dec_and_test(v);
1571         return raw_atomic64_dec_and_test(v);  !! 468 }
1572 #else                                         !! 469 
1573         return raw_atomic_dec_and_test(v);    << 
1574 #endif                                        << 
1575 }                                             << 
1576                                               << 
1577 /**                                           << 
1578  * raw_atomic_long_inc_and_test() - atomic in << 
1579  * @v: pointer to atomic_long_t               << 
1580  *                                            << 
1581  * Atomically updates @v to (@v + 1) with ful << 
1582  *                                            << 
1583  * Safe to use in noinstr code; prefer atomic << 
1584  *                                            << 
1585  * Return: @true if the resulting value of @v << 
1586  */                                           << 
1587 static __always_inline bool                      470 static __always_inline bool
1588 raw_atomic_long_inc_and_test(atomic_long_t *v !! 471 arch_atomic_long_inc_and_test(atomic_long_t *v)
1589 {                                                472 {
1590 #ifdef CONFIG_64BIT                           !! 473         return arch_atomic64_inc_and_test(v);
1591         return raw_atomic64_inc_and_test(v);  !! 474 }
1592 #else                                         !! 475 
1593         return raw_atomic_inc_and_test(v);    << 
1594 #endif                                        << 
1595 }                                             << 
1596                                               << 
1597 /**                                           << 
1598  * raw_atomic_long_add_negative() - atomic ad << 
1599  * @i: long value to add                      << 
1600  * @v: pointer to atomic_long_t               << 
1601  *                                            << 
1602  * Atomically updates @v to (@v + @i) with fu << 
1603  *                                            << 
1604  * Safe to use in noinstr code; prefer atomic << 
1605  *                                            << 
1606  * Return: @true if the resulting value of @v << 
1607  */                                           << 
1608 static __always_inline bool                      476 static __always_inline bool
1609 raw_atomic_long_add_negative(long i, atomic_l !! 477 arch_atomic_long_add_negative(long i, atomic_long_t *v)
1610 {                                                478 {
1611 #ifdef CONFIG_64BIT                           !! 479         return arch_atomic64_add_negative(i, v);
1612         return raw_atomic64_add_negative(i, v !! 480 }
1613 #else                                         !! 481 
1614         return raw_atomic_add_negative(i, v); !! 482 static __always_inline long
1615 #endif                                        !! 483 arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
1616 }                                             !! 484 {
1617                                               !! 485         return arch_atomic64_fetch_add_unless(v, a, u);
1618 /**                                           !! 486 }
1619  * raw_atomic_long_add_negative_acquire() - a !! 487 
1620  * @i: long value to add                      << 
1621  * @v: pointer to atomic_long_t               << 
1622  *                                            << 
1623  * Atomically updates @v to (@v + @i) with ac << 
1624  *                                            << 
1625  * Safe to use in noinstr code; prefer atomic << 
1626  *                                            << 
1627  * Return: @true if the resulting value of @v << 
1628  */                                           << 
1629 static __always_inline bool                      488 static __always_inline bool
1630 raw_atomic_long_add_negative_acquire(long i,  !! 489 arch_atomic_long_add_unless(atomic_long_t *v, long a, long u)
1631 {                                                490 {
1632 #ifdef CONFIG_64BIT                           !! 491         return arch_atomic64_add_unless(v, a, u);
1633         return raw_atomic64_add_negative_acqu !! 492 }
1634 #else                                         !! 493 
1635         return raw_atomic_add_negative_acquir << 
1636 #endif                                        << 
1637 }                                             << 
1638                                               << 
1639 /**                                           << 
1640  * raw_atomic_long_add_negative_release() - a << 
1641  * @i: long value to add                      << 
1642  * @v: pointer to atomic_long_t               << 
1643  *                                            << 
1644  * Atomically updates @v to (@v + @i) with re << 
1645  *                                            << 
1646  * Safe to use in noinstr code; prefer atomic << 
1647  *                                            << 
1648  * Return: @true if the resulting value of @v << 
1649  */                                           << 
1650 static __always_inline bool                      494 static __always_inline bool
1651 raw_atomic_long_add_negative_release(long i,  !! 495 arch_atomic_long_inc_not_zero(atomic_long_t *v)
1652 {                                                496 {
1653 #ifdef CONFIG_64BIT                           !! 497         return arch_atomic64_inc_not_zero(v);
1654         return raw_atomic64_add_negative_rele !! 498 }
1655 #else                                         !! 499 
1656         return raw_atomic_add_negative_releas << 
1657 #endif                                        << 
1658 }                                             << 
1659                                               << 
1660 /**                                           << 
1661  * raw_atomic_long_add_negative_relaxed() - a << 
1662  * @i: long value to add                      << 
1663  * @v: pointer to atomic_long_t               << 
1664  *                                            << 
1665  * Atomically updates @v to (@v + @i) with re << 
1666  *                                            << 
1667  * Safe to use in noinstr code; prefer atomic << 
1668  *                                            << 
1669  * Return: @true if the resulting value of @v << 
1670  */                                           << 
1671 static __always_inline bool                      500 static __always_inline bool
1672 raw_atomic_long_add_negative_relaxed(long i,  !! 501 arch_atomic_long_inc_unless_negative(atomic_long_t *v)
1673 {                                                502 {
1674 #ifdef CONFIG_64BIT                           !! 503         return arch_atomic64_inc_unless_negative(v);
1675         return raw_atomic64_add_negative_rela !! 504 }
1676 #else                                         !! 505 
1677         return raw_atomic_add_negative_relaxe !! 506 static __always_inline bool
1678 #endif                                        !! 507 arch_atomic_long_dec_unless_positive(atomic_long_t *v)
1679 }                                             !! 508 {
1680                                               !! 509         return arch_atomic64_dec_unless_positive(v);
1681 /**                                           !! 510 }
1682  * raw_atomic_long_fetch_add_unless() - atomi !! 511 
1683  * @v: pointer to atomic_long_t               << 
1684  * @a: long value to add                      << 
1685  * @u: long value to compare with             << 
1686  *                                            << 
1687  * If (@v != @u), atomically updates @v to (@ << 
1688  * Otherwise, @v is not modified and relaxed  << 
1689  *                                            << 
1690  * Safe to use in noinstr code; prefer atomic << 
1691  *                                            << 
1692  * Return: The original value of @v.          << 
1693  */                                           << 
1694 static __always_inline long                      512 static __always_inline long
1695 raw_atomic_long_fetch_add_unless(atomic_long_ !! 513 arch_atomic_long_dec_if_positive(atomic_long_t *v)
1696 {                                                514 {
1697 #ifdef CONFIG_64BIT                           !! 515         return arch_atomic64_dec_if_positive(v);
1698         return raw_atomic64_fetch_add_unless( !! 516 }
1699 #else                                         !! 517 
1700         return raw_atomic_fetch_add_unless(v, !! 518 #else /* CONFIG_64BIT */
1701 #endif                                        !! 519 
1702 }                                             !! 520 static __always_inline long
1703                                               !! 521 arch_atomic_long_read(const atomic_long_t *v)
1704 /**                                           !! 522 {
1705  * raw_atomic_long_add_unless() - atomic add  !! 523         return arch_atomic_read(v);
1706  * @v: pointer to atomic_long_t               !! 524 }
1707  * @a: long value to add                      !! 525 
1708  * @u: long value to compare with             !! 526 static __always_inline long
1709  *                                            !! 527 arch_atomic_long_read_acquire(const atomic_long_t *v)
1710  * If (@v != @u), atomically updates @v to (@ !! 528 {
1711  * Otherwise, @v is not modified and relaxed  !! 529         return arch_atomic_read_acquire(v);
1712  *                                            !! 530 }
1713  * Safe to use in noinstr code; prefer atomic !! 531 
1714  *                                            !! 532 static __always_inline void
1715  * Return: @true if @v was updated, @false ot !! 533 arch_atomic_long_set(atomic_long_t *v, long i)
1716  */                                           !! 534 {
                                                   >> 535         arch_atomic_set(v, i);
                                                   >> 536 }
                                                   >> 537 
                                                   >> 538 static __always_inline void
                                                   >> 539 arch_atomic_long_set_release(atomic_long_t *v, long i)
                                                   >> 540 {
                                                   >> 541         arch_atomic_set_release(v, i);
                                                   >> 542 }
                                                   >> 543 
                                                   >> 544 static __always_inline void
                                                   >> 545 arch_atomic_long_add(long i, atomic_long_t *v)
                                                   >> 546 {
                                                   >> 547         arch_atomic_add(i, v);
                                                   >> 548 }
                                                   >> 549 
                                                   >> 550 static __always_inline long
                                                   >> 551 arch_atomic_long_add_return(long i, atomic_long_t *v)
                                                   >> 552 {
                                                   >> 553         return arch_atomic_add_return(i, v);
                                                   >> 554 }
                                                   >> 555 
                                                   >> 556 static __always_inline long
                                                   >> 557 arch_atomic_long_add_return_acquire(long i, atomic_long_t *v)
                                                   >> 558 {
                                                   >> 559         return arch_atomic_add_return_acquire(i, v);
                                                   >> 560 }
                                                   >> 561 
                                                   >> 562 static __always_inline long
                                                   >> 563 arch_atomic_long_add_return_release(long i, atomic_long_t *v)
                                                   >> 564 {
                                                   >> 565         return arch_atomic_add_return_release(i, v);
                                                   >> 566 }
                                                   >> 567 
                                                   >> 568 static __always_inline long
                                                   >> 569 arch_atomic_long_add_return_relaxed(long i, atomic_long_t *v)
                                                   >> 570 {
                                                   >> 571         return arch_atomic_add_return_relaxed(i, v);
                                                   >> 572 }
                                                   >> 573 
                                                   >> 574 static __always_inline long
                                                   >> 575 arch_atomic_long_fetch_add(long i, atomic_long_t *v)
                                                   >> 576 {
                                                   >> 577         return arch_atomic_fetch_add(i, v);
                                                   >> 578 }
                                                   >> 579 
                                                   >> 580 static __always_inline long
                                                   >> 581 arch_atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
                                                   >> 582 {
                                                   >> 583         return arch_atomic_fetch_add_acquire(i, v);
                                                   >> 584 }
                                                   >> 585 
                                                   >> 586 static __always_inline long
                                                   >> 587 arch_atomic_long_fetch_add_release(long i, atomic_long_t *v)
                                                   >> 588 {
                                                   >> 589         return arch_atomic_fetch_add_release(i, v);
                                                   >> 590 }
                                                   >> 591 
                                                   >> 592 static __always_inline long
                                                   >> 593 arch_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
                                                   >> 594 {
                                                   >> 595         return arch_atomic_fetch_add_relaxed(i, v);
                                                   >> 596 }
                                                   >> 597 
                                                   >> 598 static __always_inline void
                                                   >> 599 arch_atomic_long_sub(long i, atomic_long_t *v)
                                                   >> 600 {
                                                   >> 601         arch_atomic_sub(i, v);
                                                   >> 602 }
                                                   >> 603 
                                                   >> 604 static __always_inline long
                                                   >> 605 arch_atomic_long_sub_return(long i, atomic_long_t *v)
                                                   >> 606 {
                                                   >> 607         return arch_atomic_sub_return(i, v);
                                                   >> 608 }
                                                   >> 609 
                                                   >> 610 static __always_inline long
                                                   >> 611 arch_atomic_long_sub_return_acquire(long i, atomic_long_t *v)
                                                   >> 612 {
                                                   >> 613         return arch_atomic_sub_return_acquire(i, v);
                                                   >> 614 }
                                                   >> 615 
                                                   >> 616 static __always_inline long
                                                   >> 617 arch_atomic_long_sub_return_release(long i, atomic_long_t *v)
                                                   >> 618 {
                                                   >> 619         return arch_atomic_sub_return_release(i, v);
                                                   >> 620 }
                                                   >> 621 
                                                   >> 622 static __always_inline long
                                                   >> 623 arch_atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
                                                   >> 624 {
                                                   >> 625         return arch_atomic_sub_return_relaxed(i, v);
                                                   >> 626 }
                                                   >> 627 
                                                   >> 628 static __always_inline long
                                                   >> 629 arch_atomic_long_fetch_sub(long i, atomic_long_t *v)
                                                   >> 630 {
                                                   >> 631         return arch_atomic_fetch_sub(i, v);
                                                   >> 632 }
                                                   >> 633 
                                                   >> 634 static __always_inline long
                                                   >> 635 arch_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
                                                   >> 636 {
                                                   >> 637         return arch_atomic_fetch_sub_acquire(i, v);
                                                   >> 638 }
                                                   >> 639 
                                                   >> 640 static __always_inline long
                                                   >> 641 arch_atomic_long_fetch_sub_release(long i, atomic_long_t *v)
                                                   >> 642 {
                                                   >> 643         return arch_atomic_fetch_sub_release(i, v);
                                                   >> 644 }
                                                   >> 645 
                                                   >> 646 static __always_inline long
                                                   >> 647 arch_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
                                                   >> 648 {
                                                   >> 649         return arch_atomic_fetch_sub_relaxed(i, v);
                                                   >> 650 }
                                                   >> 651 
                                                   >> 652 static __always_inline void
                                                   >> 653 arch_atomic_long_inc(atomic_long_t *v)
                                                   >> 654 {
                                                   >> 655         arch_atomic_inc(v);
                                                   >> 656 }
                                                   >> 657 
                                                   >> 658 static __always_inline long
                                                   >> 659 arch_atomic_long_inc_return(atomic_long_t *v)
                                                   >> 660 {
                                                   >> 661         return arch_atomic_inc_return(v);
                                                   >> 662 }
                                                   >> 663 
                                                   >> 664 static __always_inline long
                                                   >> 665 arch_atomic_long_inc_return_acquire(atomic_long_t *v)
                                                   >> 666 {
                                                   >> 667         return arch_atomic_inc_return_acquire(v);
                                                   >> 668 }
                                                   >> 669 
                                                   >> 670 static __always_inline long
                                                   >> 671 arch_atomic_long_inc_return_release(atomic_long_t *v)
                                                   >> 672 {
                                                   >> 673         return arch_atomic_inc_return_release(v);
                                                   >> 674 }
                                                   >> 675 
                                                   >> 676 static __always_inline long
                                                   >> 677 arch_atomic_long_inc_return_relaxed(atomic_long_t *v)
                                                   >> 678 {
                                                   >> 679         return arch_atomic_inc_return_relaxed(v);
                                                   >> 680 }
                                                   >> 681 
                                                   >> 682 static __always_inline long
                                                   >> 683 arch_atomic_long_fetch_inc(atomic_long_t *v)
                                                   >> 684 {
                                                   >> 685         return arch_atomic_fetch_inc(v);
                                                   >> 686 }
                                                   >> 687 
                                                   >> 688 static __always_inline long
                                                   >> 689 arch_atomic_long_fetch_inc_acquire(atomic_long_t *v)
                                                   >> 690 {
                                                   >> 691         return arch_atomic_fetch_inc_acquire(v);
                                                   >> 692 }
                                                   >> 693 
                                                   >> 694 static __always_inline long
                                                   >> 695 arch_atomic_long_fetch_inc_release(atomic_long_t *v)
                                                   >> 696 {
                                                   >> 697         return arch_atomic_fetch_inc_release(v);
                                                   >> 698 }
                                                   >> 699 
                                                   >> 700 static __always_inline long
                                                   >> 701 arch_atomic_long_fetch_inc_relaxed(atomic_long_t *v)
                                                   >> 702 {
                                                   >> 703         return arch_atomic_fetch_inc_relaxed(v);
                                                   >> 704 }
                                                   >> 705 
                                                   >> 706 static __always_inline void
                                                   >> 707 arch_atomic_long_dec(atomic_long_t *v)
                                                   >> 708 {
                                                   >> 709         arch_atomic_dec(v);
                                                   >> 710 }
                                                   >> 711 
                                                   >> 712 static __always_inline long
                                                   >> 713 arch_atomic_long_dec_return(atomic_long_t *v)
                                                   >> 714 {
                                                   >> 715         return arch_atomic_dec_return(v);
                                                   >> 716 }
                                                   >> 717 
                                                   >> 718 static __always_inline long
                                                   >> 719 arch_atomic_long_dec_return_acquire(atomic_long_t *v)
                                                   >> 720 {
                                                   >> 721         return arch_atomic_dec_return_acquire(v);
                                                   >> 722 }
                                                   >> 723 
                                                   >> 724 static __always_inline long
                                                   >> 725 arch_atomic_long_dec_return_release(atomic_long_t *v)
                                                   >> 726 {
                                                   >> 727         return arch_atomic_dec_return_release(v);
                                                   >> 728 }
                                                   >> 729 
                                                   >> 730 static __always_inline long
                                                   >> 731 arch_atomic_long_dec_return_relaxed(atomic_long_t *v)
                                                   >> 732 {
                                                   >> 733         return arch_atomic_dec_return_relaxed(v);
                                                   >> 734 }
                                                   >> 735 
                                                   >> 736 static __always_inline long
                                                   >> 737 arch_atomic_long_fetch_dec(atomic_long_t *v)
                                                   >> 738 {
                                                   >> 739         return arch_atomic_fetch_dec(v);
                                                   >> 740 }
                                                   >> 741 
                                                   >> 742 static __always_inline long
                                                   >> 743 arch_atomic_long_fetch_dec_acquire(atomic_long_t *v)
                                                   >> 744 {
                                                   >> 745         return arch_atomic_fetch_dec_acquire(v);
                                                   >> 746 }
                                                   >> 747 
                                                   >> 748 static __always_inline long
                                                   >> 749 arch_atomic_long_fetch_dec_release(atomic_long_t *v)
                                                   >> 750 {
                                                   >> 751         return arch_atomic_fetch_dec_release(v);
                                                   >> 752 }
                                                   >> 753 
                                                   >> 754 static __always_inline long
                                                   >> 755 arch_atomic_long_fetch_dec_relaxed(atomic_long_t *v)
                                                   >> 756 {
                                                   >> 757         return arch_atomic_fetch_dec_relaxed(v);
                                                   >> 758 }
                                                   >> 759 
                                                   >> 760 static __always_inline void
                                                   >> 761 arch_atomic_long_and(long i, atomic_long_t *v)
                                                   >> 762 {
                                                   >> 763         arch_atomic_and(i, v);
                                                   >> 764 }
                                                   >> 765 
                                                   >> 766 static __always_inline long
                                                   >> 767 arch_atomic_long_fetch_and(long i, atomic_long_t *v)
                                                   >> 768 {
                                                   >> 769         return arch_atomic_fetch_and(i, v);
                                                   >> 770 }
                                                   >> 771 
                                                   >> 772 static __always_inline long
                                                   >> 773 arch_atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
                                                   >> 774 {
                                                   >> 775         return arch_atomic_fetch_and_acquire(i, v);
                                                   >> 776 }
                                                   >> 777 
                                                   >> 778 static __always_inline long
                                                   >> 779 arch_atomic_long_fetch_and_release(long i, atomic_long_t *v)
                                                   >> 780 {
                                                   >> 781         return arch_atomic_fetch_and_release(i, v);
                                                   >> 782 }
                                                   >> 783 
                                                   >> 784 static __always_inline long
                                                   >> 785 arch_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
                                                   >> 786 {
                                                   >> 787         return arch_atomic_fetch_and_relaxed(i, v);
                                                   >> 788 }
                                                   >> 789 
                                                   >> 790 static __always_inline void
                                                   >> 791 arch_atomic_long_andnot(long i, atomic_long_t *v)
                                                   >> 792 {
                                                   >> 793         arch_atomic_andnot(i, v);
                                                   >> 794 }
                                                   >> 795 
                                                   >> 796 static __always_inline long
                                                   >> 797 arch_atomic_long_fetch_andnot(long i, atomic_long_t *v)
                                                   >> 798 {
                                                   >> 799         return arch_atomic_fetch_andnot(i, v);
                                                   >> 800 }
                                                   >> 801 
                                                   >> 802 static __always_inline long
                                                   >> 803 arch_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
                                                   >> 804 {
                                                   >> 805         return arch_atomic_fetch_andnot_acquire(i, v);
                                                   >> 806 }
                                                   >> 807 
                                                   >> 808 static __always_inline long
                                                   >> 809 arch_atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
                                                   >> 810 {
                                                   >> 811         return arch_atomic_fetch_andnot_release(i, v);
                                                   >> 812 }
                                                   >> 813 
                                                   >> 814 static __always_inline long
                                                   >> 815 arch_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
                                                   >> 816 {
                                                   >> 817         return arch_atomic_fetch_andnot_relaxed(i, v);
                                                   >> 818 }
                                                   >> 819 
                                                   >> 820 static __always_inline void
                                                   >> 821 arch_atomic_long_or(long i, atomic_long_t *v)
                                                   >> 822 {
                                                   >> 823         arch_atomic_or(i, v);
                                                   >> 824 }
                                                   >> 825 
                                                   >> 826 static __always_inline long
                                                   >> 827 arch_atomic_long_fetch_or(long i, atomic_long_t *v)
                                                   >> 828 {
                                                   >> 829         return arch_atomic_fetch_or(i, v);
                                                   >> 830 }
                                                   >> 831 
                                                   >> 832 static __always_inline long
                                                   >> 833 arch_atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
                                                   >> 834 {
                                                   >> 835         return arch_atomic_fetch_or_acquire(i, v);
                                                   >> 836 }
                                                   >> 837 
                                                   >> 838 static __always_inline long
                                                   >> 839 arch_atomic_long_fetch_or_release(long i, atomic_long_t *v)
                                                   >> 840 {
                                                   >> 841         return arch_atomic_fetch_or_release(i, v);
                                                   >> 842 }
                                                   >> 843 
                                                   >> 844 static __always_inline long
                                                   >> 845 arch_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
                                                   >> 846 {
                                                   >> 847         return arch_atomic_fetch_or_relaxed(i, v);
                                                   >> 848 }
                                                   >> 849 
                                                   >> 850 static __always_inline void
                                                   >> 851 arch_atomic_long_xor(long i, atomic_long_t *v)
                                                   >> 852 {
                                                   >> 853         arch_atomic_xor(i, v);
                                                   >> 854 }
                                                   >> 855 
                                                   >> 856 static __always_inline long
                                                   >> 857 arch_atomic_long_fetch_xor(long i, atomic_long_t *v)
                                                   >> 858 {
                                                   >> 859         return arch_atomic_fetch_xor(i, v);
                                                   >> 860 }
                                                   >> 861 
                                                   >> 862 static __always_inline long
                                                   >> 863 arch_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
                                                   >> 864 {
                                                   >> 865         return arch_atomic_fetch_xor_acquire(i, v);
                                                   >> 866 }
                                                   >> 867 
                                                   >> 868 static __always_inline long
                                                   >> 869 arch_atomic_long_fetch_xor_release(long i, atomic_long_t *v)
                                                   >> 870 {
                                                   >> 871         return arch_atomic_fetch_xor_release(i, v);
                                                   >> 872 }
                                                   >> 873 
                                                   >> 874 static __always_inline long
                                                   >> 875 arch_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
                                                   >> 876 {
                                                   >> 877         return arch_atomic_fetch_xor_relaxed(i, v);
                                                   >> 878 }
                                                   >> 879 
                                                   >> 880 static __always_inline long
                                                   >> 881 arch_atomic_long_xchg(atomic_long_t *v, long i)
                                                   >> 882 {
                                                   >> 883         return arch_atomic_xchg(v, i);
                                                   >> 884 }
                                                   >> 885 
                                                   >> 886 static __always_inline long
                                                   >> 887 arch_atomic_long_xchg_acquire(atomic_long_t *v, long i)
                                                   >> 888 {
                                                   >> 889         return arch_atomic_xchg_acquire(v, i);
                                                   >> 890 }
                                                   >> 891 
                                                   >> 892 static __always_inline long
                                                   >> 893 arch_atomic_long_xchg_release(atomic_long_t *v, long i)
                                                   >> 894 {
                                                   >> 895         return arch_atomic_xchg_release(v, i);
                                                   >> 896 }
                                                   >> 897 
                                                   >> 898 static __always_inline long
                                                   >> 899 arch_atomic_long_xchg_relaxed(atomic_long_t *v, long i)
                                                   >> 900 {
                                                   >> 901         return arch_atomic_xchg_relaxed(v, i);
                                                   >> 902 }
                                                   >> 903 
                                                   >> 904 static __always_inline long
                                                   >> 905 arch_atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
                                                   >> 906 {
                                                   >> 907         return arch_atomic_cmpxchg(v, old, new);
                                                   >> 908 }
                                                   >> 909 
                                                   >> 910 static __always_inline long
                                                   >> 911 arch_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
                                                   >> 912 {
                                                   >> 913         return arch_atomic_cmpxchg_acquire(v, old, new);
                                                   >> 914 }
                                                   >> 915 
                                                   >> 916 static __always_inline long
                                                   >> 917 arch_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
                                                   >> 918 {
                                                   >> 919         return arch_atomic_cmpxchg_release(v, old, new);
                                                   >> 920 }
                                                   >> 921 
                                                   >> 922 static __always_inline long
                                                   >> 923 arch_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
                                                   >> 924 {
                                                   >> 925         return arch_atomic_cmpxchg_relaxed(v, old, new);
                                                   >> 926 }
                                                   >> 927 
1717 static __always_inline bool                      928 static __always_inline bool
1718 raw_atomic_long_add_unless(atomic_long_t *v,  !! 929 arch_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
1719 {                                                930 {
1720 #ifdef CONFIG_64BIT                           !! 931         return arch_atomic_try_cmpxchg(v, (int *)old, new);
1721         return raw_atomic64_add_unless(v, a,  !! 932 }
1722 #else                                         !! 933 
1723         return raw_atomic_add_unless(v, a, u) << 
1724 #endif                                        << 
1725 }                                             << 
1726                                               << 
1727 /**                                           << 
1728  * raw_atomic_long_inc_not_zero() - atomic in << 
1729  * @v: pointer to atomic_long_t               << 
1730  *                                            << 
1731  * If (@v != 0), atomically updates @v to (@v << 
1732  * Otherwise, @v is not modified and relaxed  << 
1733  *                                            << 
1734  * Safe to use in noinstr code; prefer atomic << 
1735  *                                            << 
1736  * Return: @true if @v was updated, @false ot << 
1737  */                                           << 
1738 static __always_inline bool                      934 static __always_inline bool
1739 raw_atomic_long_inc_not_zero(atomic_long_t *v !! 935 arch_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
1740 {                                                936 {
1741 #ifdef CONFIG_64BIT                           !! 937         return arch_atomic_try_cmpxchg_acquire(v, (int *)old, new);
1742         return raw_atomic64_inc_not_zero(v);  !! 938 }
1743 #else                                         !! 939 
1744         return raw_atomic_inc_not_zero(v);    << 
1745 #endif                                        << 
1746 }                                             << 
1747                                               << 
1748 /**                                           << 
1749  * raw_atomic_long_inc_unless_negative() - at << 
1750  * @v: pointer to atomic_long_t               << 
1751  *                                            << 
1752  * If (@v >= 0), atomically updates @v to (@v << 
1753  * Otherwise, @v is not modified and relaxed  << 
1754  *                                            << 
1755  * Safe to use in noinstr code; prefer atomic << 
1756  *                                            << 
1757  * Return: @true if @v was updated, @false ot << 
1758  */                                           << 
1759 static __always_inline bool                      940 static __always_inline bool
1760 raw_atomic_long_inc_unless_negative(atomic_lo !! 941 arch_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
1761 {                                                942 {
1762 #ifdef CONFIG_64BIT                           !! 943         return arch_atomic_try_cmpxchg_release(v, (int *)old, new);
1763         return raw_atomic64_inc_unless_negati !! 944 }
1764 #else                                         !! 945 
1765         return raw_atomic_inc_unless_negative << 
1766 #endif                                        << 
1767 }                                             << 
1768                                               << 
1769 /**                                           << 
1770  * raw_atomic_long_dec_unless_positive() - at << 
1771  * @v: pointer to atomic_long_t               << 
1772  *                                            << 
1773  * If (@v <= 0), atomically updates @v to (@v << 
1774  * Otherwise, @v is not modified and relaxed  << 
1775  *                                            << 
1776  * Safe to use in noinstr code; prefer atomic << 
1777  *                                            << 
1778  * Return: @true if @v was updated, @false ot << 
1779  */                                           << 
1780 static __always_inline bool                      946 static __always_inline bool
1781 raw_atomic_long_dec_unless_positive(atomic_lo !! 947 arch_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
1782 {                                                948 {
1783 #ifdef CONFIG_64BIT                           !! 949         return arch_atomic_try_cmpxchg_relaxed(v, (int *)old, new);
1784         return raw_atomic64_dec_unless_positi !! 950 }
1785 #else                                         !! 951 
1786         return raw_atomic_dec_unless_positive !! 952 static __always_inline bool
1787 #endif                                        !! 953 arch_atomic_long_sub_and_test(long i, atomic_long_t *v)
1788 }                                             !! 954 {
1789                                               !! 955         return arch_atomic_sub_and_test(i, v);
1790 /**                                           !! 956 }
1791  * raw_atomic_long_dec_if_positive() - atomic !! 957 
1792  * @v: pointer to atomic_long_t               !! 958 static __always_inline bool
1793  *                                            !! 959 arch_atomic_long_dec_and_test(atomic_long_t *v)
1794  * If (@v > 0), atomically updates @v to (@v  !! 960 {
1795  * Otherwise, @v is not modified and relaxed  !! 961         return arch_atomic_dec_and_test(v);
1796  *                                            !! 962 }
1797  * Safe to use in noinstr code; prefer atomic !! 963 
1798  *                                            !! 964 static __always_inline bool
1799  * Return: The old value of (@v - 1), regardl !! 965 arch_atomic_long_inc_and_test(atomic_long_t *v)
1800  */                                           !! 966 {
                                                   >> 967         return arch_atomic_inc_and_test(v);
                                                   >> 968 }
                                                   >> 969 
                                                   >> 970 static __always_inline bool
                                                   >> 971 arch_atomic_long_add_negative(long i, atomic_long_t *v)
                                                   >> 972 {
                                                   >> 973         return arch_atomic_add_negative(i, v);
                                                   >> 974 }
                                                   >> 975 
1801 static __always_inline long                      976 static __always_inline long
1802 raw_atomic_long_dec_if_positive(atomic_long_t !! 977 arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
1803 {                                                978 {
1804 #ifdef CONFIG_64BIT                           !! 979         return arch_atomic_fetch_add_unless(v, a, u);
1805         return raw_atomic64_dec_if_positive(v !! 980 }
1806 #else                                         !! 981 
1807         return raw_atomic_dec_if_positive(v); !! 982 static __always_inline bool
1808 #endif                                        !! 983 arch_atomic_long_add_unless(atomic_long_t *v, long a, long u)
                                                   >> 984 {
                                                   >> 985         return arch_atomic_add_unless(v, a, u);
                                                   >> 986 }
                                                   >> 987 
                                                   >> 988 static __always_inline bool
                                                   >> 989 arch_atomic_long_inc_not_zero(atomic_long_t *v)
                                                   >> 990 {
                                                   >> 991         return arch_atomic_inc_not_zero(v);
                                                   >> 992 }
                                                   >> 993 
                                                   >> 994 static __always_inline bool
                                                   >> 995 arch_atomic_long_inc_unless_negative(atomic_long_t *v)
                                                   >> 996 {
                                                   >> 997         return arch_atomic_inc_unless_negative(v);
                                                   >> 998 }
                                                   >> 999 
                                                   >> 1000 static __always_inline bool
                                                   >> 1001 arch_atomic_long_dec_unless_positive(atomic_long_t *v)
                                                   >> 1002 {
                                                   >> 1003         return arch_atomic_dec_unless_positive(v);
                                                   >> 1004 }
                                                   >> 1005 
                                                   >> 1006 static __always_inline long
                                                   >> 1007 arch_atomic_long_dec_if_positive(atomic_long_t *v)
                                                   >> 1008 {
                                                   >> 1009         return arch_atomic_dec_if_positive(v);
1809 }                                                1010 }
1810                                                  1011 
                                                   >> 1012 #endif /* CONFIG_64BIT */
1811 #endif /* _LINUX_ATOMIC_LONG_H */                1013 #endif /* _LINUX_ATOMIC_LONG_H */
1812 // eadf183c3600b8b92b91839dd3be6bcc560c752d   !! 1014 // e8f0e08ff072b74d180eabe2ad001282b38c2c88
1813                                                  1015 

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php