1 // SPDX-License-Identifier: GPL-2.0 1 // SPDX-License-Identifier: GPL-2.0 2 2 3 // Generated by scripts/atomic/gen-atomic-long 3 // Generated by scripts/atomic/gen-atomic-long.sh 4 // DO NOT MODIFY THIS FILE DIRECTLY 4 // DO NOT MODIFY THIS FILE DIRECTLY 5 5 6 #ifndef _LINUX_ATOMIC_LONG_H 6 #ifndef _LINUX_ATOMIC_LONG_H 7 #define _LINUX_ATOMIC_LONG_H 7 #define _LINUX_ATOMIC_LONG_H 8 8 9 #include <linux/compiler.h> 9 #include <linux/compiler.h> 10 #include <asm/types.h> 10 #include <asm/types.h> 11 11 12 #ifdef CONFIG_64BIT 12 #ifdef CONFIG_64BIT 13 typedef atomic64_t atomic_long_t; 13 typedef atomic64_t atomic_long_t; 14 #define ATOMIC_LONG_INIT(i) ATOMIC 14 #define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i) 15 #define atomic_long_cond_read_acquire atomic 15 #define atomic_long_cond_read_acquire atomic64_cond_read_acquire 16 #define atomic_long_cond_read_relaxed atomic 16 #define atomic_long_cond_read_relaxed atomic64_cond_read_relaxed 17 #else 17 #else 18 typedef atomic_t atomic_long_t; 18 typedef atomic_t atomic_long_t; 19 #define ATOMIC_LONG_INIT(i) ATOMIC 19 #define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i) 20 #define atomic_long_cond_read_acquire atomic 20 #define atomic_long_cond_read_acquire atomic_cond_read_acquire 21 #define atomic_long_cond_read_relaxed atomic 21 #define atomic_long_cond_read_relaxed atomic_cond_read_relaxed 22 #endif 22 #endif 23 23 24 /** 24 /** 25 * raw_atomic_long_read() - atomic load with r 25 * raw_atomic_long_read() - atomic load with relaxed ordering 26 * @v: pointer to atomic_long_t 26 * @v: pointer to atomic_long_t 27 * 27 * 28 * Atomically loads the value of @v with relax 28 * Atomically loads the value of @v with relaxed ordering. 29 * 29 * 30 * Safe to use in noinstr code; prefer atomic_ 30 * Safe to use in noinstr code; prefer atomic_long_read() elsewhere. 31 * 31 * 32 * Return: The value loaded from @v. 32 * Return: The value loaded from @v. 33 */ 33 */ 34 static __always_inline long 34 static __always_inline long 35 raw_atomic_long_read(const atomic_long_t *v) 35 raw_atomic_long_read(const atomic_long_t *v) 36 { 36 { 37 #ifdef CONFIG_64BIT 37 #ifdef CONFIG_64BIT 38 return raw_atomic64_read(v); 38 return raw_atomic64_read(v); 39 #else 39 #else 40 return raw_atomic_read(v); 40 return raw_atomic_read(v); 41 #endif 41 #endif 42 } 42 } 43 43 44 /** 44 /** 45 * raw_atomic_long_read_acquire() - atomic loa 45 * raw_atomic_long_read_acquire() - atomic load with acquire ordering 46 * @v: pointer to atomic_long_t 46 * @v: pointer to atomic_long_t 47 * 47 * 48 * Atomically loads the value of @v with acqui 48 * Atomically loads the value of @v with acquire ordering. 49 * 49 * 50 * Safe to use in noinstr code; prefer atomic_ 50 * Safe to use in noinstr code; prefer atomic_long_read_acquire() elsewhere. 51 * 51 * 52 * Return: The value loaded from @v. 52 * Return: The value loaded from @v. 53 */ 53 */ 54 static __always_inline long 54 static __always_inline long 55 raw_atomic_long_read_acquire(const atomic_long 55 raw_atomic_long_read_acquire(const atomic_long_t *v) 56 { 56 { 57 #ifdef CONFIG_64BIT 57 #ifdef CONFIG_64BIT 58 return raw_atomic64_read_acquire(v); 58 return raw_atomic64_read_acquire(v); 59 #else 59 #else 60 return raw_atomic_read_acquire(v); 60 return raw_atomic_read_acquire(v); 61 #endif 61 #endif 62 } 62 } 63 63 64 /** 64 /** 65 * raw_atomic_long_set() - atomic set with rel 65 * raw_atomic_long_set() - atomic set with relaxed ordering 66 * @v: pointer to atomic_long_t 66 * @v: pointer to atomic_long_t 67 * @i: long value to assign 67 * @i: long value to assign 68 * 68 * 69 * Atomically sets @v to @i with relaxed order 69 * Atomically sets @v to @i with relaxed ordering. 70 * 70 * 71 * Safe to use in noinstr code; prefer atomic_ 71 * Safe to use in noinstr code; prefer atomic_long_set() elsewhere. 72 * 72 * 73 * Return: Nothing. 73 * Return: Nothing. 74 */ 74 */ 75 static __always_inline void 75 static __always_inline void 76 raw_atomic_long_set(atomic_long_t *v, long i) 76 raw_atomic_long_set(atomic_long_t *v, long i) 77 { 77 { 78 #ifdef CONFIG_64BIT 78 #ifdef CONFIG_64BIT 79 raw_atomic64_set(v, i); 79 raw_atomic64_set(v, i); 80 #else 80 #else 81 raw_atomic_set(v, i); 81 raw_atomic_set(v, i); 82 #endif 82 #endif 83 } 83 } 84 84 85 /** 85 /** 86 * raw_atomic_long_set_release() - atomic set 86 * raw_atomic_long_set_release() - atomic set with release ordering 87 * @v: pointer to atomic_long_t 87 * @v: pointer to atomic_long_t 88 * @i: long value to assign 88 * @i: long value to assign 89 * 89 * 90 * Atomically sets @v to @i with release order 90 * Atomically sets @v to @i with release ordering. 91 * 91 * 92 * Safe to use in noinstr code; prefer atomic_ 92 * Safe to use in noinstr code; prefer atomic_long_set_release() elsewhere. 93 * 93 * 94 * Return: Nothing. 94 * Return: Nothing. 95 */ 95 */ 96 static __always_inline void 96 static __always_inline void 97 raw_atomic_long_set_release(atomic_long_t *v, 97 raw_atomic_long_set_release(atomic_long_t *v, long i) 98 { 98 { 99 #ifdef CONFIG_64BIT 99 #ifdef CONFIG_64BIT 100 raw_atomic64_set_release(v, i); 100 raw_atomic64_set_release(v, i); 101 #else 101 #else 102 raw_atomic_set_release(v, i); 102 raw_atomic_set_release(v, i); 103 #endif 103 #endif 104 } 104 } 105 105 106 /** 106 /** 107 * raw_atomic_long_add() - atomic add with rel 107 * raw_atomic_long_add() - atomic add with relaxed ordering 108 * @i: long value to add 108 * @i: long value to add 109 * @v: pointer to atomic_long_t 109 * @v: pointer to atomic_long_t 110 * 110 * 111 * Atomically updates @v to (@v + @i) with rel 111 * Atomically updates @v to (@v + @i) with relaxed ordering. 112 * 112 * 113 * Safe to use in noinstr code; prefer atomic_ 113 * Safe to use in noinstr code; prefer atomic_long_add() elsewhere. 114 * 114 * 115 * Return: Nothing. 115 * Return: Nothing. 116 */ 116 */ 117 static __always_inline void 117 static __always_inline void 118 raw_atomic_long_add(long i, atomic_long_t *v) 118 raw_atomic_long_add(long i, atomic_long_t *v) 119 { 119 { 120 #ifdef CONFIG_64BIT 120 #ifdef CONFIG_64BIT 121 raw_atomic64_add(i, v); 121 raw_atomic64_add(i, v); 122 #else 122 #else 123 raw_atomic_add(i, v); 123 raw_atomic_add(i, v); 124 #endif 124 #endif 125 } 125 } 126 126 127 /** 127 /** 128 * raw_atomic_long_add_return() - atomic add w 128 * raw_atomic_long_add_return() - atomic add with full ordering 129 * @i: long value to add 129 * @i: long value to add 130 * @v: pointer to atomic_long_t 130 * @v: pointer to atomic_long_t 131 * 131 * 132 * Atomically updates @v to (@v + @i) with ful 132 * Atomically updates @v to (@v + @i) with full ordering. 133 * 133 * 134 * Safe to use in noinstr code; prefer atomic_ 134 * Safe to use in noinstr code; prefer atomic_long_add_return() elsewhere. 135 * 135 * 136 * Return: The updated value of @v. 136 * Return: The updated value of @v. 137 */ 137 */ 138 static __always_inline long 138 static __always_inline long 139 raw_atomic_long_add_return(long i, atomic_long 139 raw_atomic_long_add_return(long i, atomic_long_t *v) 140 { 140 { 141 #ifdef CONFIG_64BIT 141 #ifdef CONFIG_64BIT 142 return raw_atomic64_add_return(i, v); 142 return raw_atomic64_add_return(i, v); 143 #else 143 #else 144 return raw_atomic_add_return(i, v); 144 return raw_atomic_add_return(i, v); 145 #endif 145 #endif 146 } 146 } 147 147 148 /** 148 /** 149 * raw_atomic_long_add_return_acquire() - atom 149 * raw_atomic_long_add_return_acquire() - atomic add with acquire ordering 150 * @i: long value to add 150 * @i: long value to add 151 * @v: pointer to atomic_long_t 151 * @v: pointer to atomic_long_t 152 * 152 * 153 * Atomically updates @v to (@v + @i) with acq 153 * Atomically updates @v to (@v + @i) with acquire ordering. 154 * 154 * 155 * Safe to use in noinstr code; prefer atomic_ 155 * Safe to use in noinstr code; prefer atomic_long_add_return_acquire() elsewhere. 156 * 156 * 157 * Return: The updated value of @v. 157 * Return: The updated value of @v. 158 */ 158 */ 159 static __always_inline long 159 static __always_inline long 160 raw_atomic_long_add_return_acquire(long i, ato 160 raw_atomic_long_add_return_acquire(long i, atomic_long_t *v) 161 { 161 { 162 #ifdef CONFIG_64BIT 162 #ifdef CONFIG_64BIT 163 return raw_atomic64_add_return_acquire 163 return raw_atomic64_add_return_acquire(i, v); 164 #else 164 #else 165 return raw_atomic_add_return_acquire(i 165 return raw_atomic_add_return_acquire(i, v); 166 #endif 166 #endif 167 } 167 } 168 168 169 /** 169 /** 170 * raw_atomic_long_add_return_release() - atom 170 * raw_atomic_long_add_return_release() - atomic add with release ordering 171 * @i: long value to add 171 * @i: long value to add 172 * @v: pointer to atomic_long_t 172 * @v: pointer to atomic_long_t 173 * 173 * 174 * Atomically updates @v to (@v + @i) with rel 174 * Atomically updates @v to (@v + @i) with release ordering. 175 * 175 * 176 * Safe to use in noinstr code; prefer atomic_ 176 * Safe to use in noinstr code; prefer atomic_long_add_return_release() elsewhere. 177 * 177 * 178 * Return: The updated value of @v. 178 * Return: The updated value of @v. 179 */ 179 */ 180 static __always_inline long 180 static __always_inline long 181 raw_atomic_long_add_return_release(long i, ato 181 raw_atomic_long_add_return_release(long i, atomic_long_t *v) 182 { 182 { 183 #ifdef CONFIG_64BIT 183 #ifdef CONFIG_64BIT 184 return raw_atomic64_add_return_release 184 return raw_atomic64_add_return_release(i, v); 185 #else 185 #else 186 return raw_atomic_add_return_release(i 186 return raw_atomic_add_return_release(i, v); 187 #endif 187 #endif 188 } 188 } 189 189 190 /** 190 /** 191 * raw_atomic_long_add_return_relaxed() - atom 191 * raw_atomic_long_add_return_relaxed() - atomic add with relaxed ordering 192 * @i: long value to add 192 * @i: long value to add 193 * @v: pointer to atomic_long_t 193 * @v: pointer to atomic_long_t 194 * 194 * 195 * Atomically updates @v to (@v + @i) with rel 195 * Atomically updates @v to (@v + @i) with relaxed ordering. 196 * 196 * 197 * Safe to use in noinstr code; prefer atomic_ 197 * Safe to use in noinstr code; prefer atomic_long_add_return_relaxed() elsewhere. 198 * 198 * 199 * Return: The updated value of @v. 199 * Return: The updated value of @v. 200 */ 200 */ 201 static __always_inline long 201 static __always_inline long 202 raw_atomic_long_add_return_relaxed(long i, ato 202 raw_atomic_long_add_return_relaxed(long i, atomic_long_t *v) 203 { 203 { 204 #ifdef CONFIG_64BIT 204 #ifdef CONFIG_64BIT 205 return raw_atomic64_add_return_relaxed 205 return raw_atomic64_add_return_relaxed(i, v); 206 #else 206 #else 207 return raw_atomic_add_return_relaxed(i 207 return raw_atomic_add_return_relaxed(i, v); 208 #endif 208 #endif 209 } 209 } 210 210 211 /** 211 /** 212 * raw_atomic_long_fetch_add() - atomic add wi 212 * raw_atomic_long_fetch_add() - atomic add with full ordering 213 * @i: long value to add 213 * @i: long value to add 214 * @v: pointer to atomic_long_t 214 * @v: pointer to atomic_long_t 215 * 215 * 216 * Atomically updates @v to (@v + @i) with ful 216 * Atomically updates @v to (@v + @i) with full ordering. 217 * 217 * 218 * Safe to use in noinstr code; prefer atomic_ 218 * Safe to use in noinstr code; prefer atomic_long_fetch_add() elsewhere. 219 * 219 * 220 * Return: The original value of @v. 220 * Return: The original value of @v. 221 */ 221 */ 222 static __always_inline long 222 static __always_inline long 223 raw_atomic_long_fetch_add(long i, atomic_long_ 223 raw_atomic_long_fetch_add(long i, atomic_long_t *v) 224 { 224 { 225 #ifdef CONFIG_64BIT 225 #ifdef CONFIG_64BIT 226 return raw_atomic64_fetch_add(i, v); 226 return raw_atomic64_fetch_add(i, v); 227 #else 227 #else 228 return raw_atomic_fetch_add(i, v); 228 return raw_atomic_fetch_add(i, v); 229 #endif 229 #endif 230 } 230 } 231 231 232 /** 232 /** 233 * raw_atomic_long_fetch_add_acquire() - atomi 233 * raw_atomic_long_fetch_add_acquire() - atomic add with acquire ordering 234 * @i: long value to add 234 * @i: long value to add 235 * @v: pointer to atomic_long_t 235 * @v: pointer to atomic_long_t 236 * 236 * 237 * Atomically updates @v to (@v + @i) with acq 237 * Atomically updates @v to (@v + @i) with acquire ordering. 238 * 238 * 239 * Safe to use in noinstr code; prefer atomic_ 239 * Safe to use in noinstr code; prefer atomic_long_fetch_add_acquire() elsewhere. 240 * 240 * 241 * Return: The original value of @v. 241 * Return: The original value of @v. 242 */ 242 */ 243 static __always_inline long 243 static __always_inline long 244 raw_atomic_long_fetch_add_acquire(long i, atom 244 raw_atomic_long_fetch_add_acquire(long i, atomic_long_t *v) 245 { 245 { 246 #ifdef CONFIG_64BIT 246 #ifdef CONFIG_64BIT 247 return raw_atomic64_fetch_add_acquire( 247 return raw_atomic64_fetch_add_acquire(i, v); 248 #else 248 #else 249 return raw_atomic_fetch_add_acquire(i, 249 return raw_atomic_fetch_add_acquire(i, v); 250 #endif 250 #endif 251 } 251 } 252 252 253 /** 253 /** 254 * raw_atomic_long_fetch_add_release() - atomi 254 * raw_atomic_long_fetch_add_release() - atomic add with release ordering 255 * @i: long value to add 255 * @i: long value to add 256 * @v: pointer to atomic_long_t 256 * @v: pointer to atomic_long_t 257 * 257 * 258 * Atomically updates @v to (@v + @i) with rel 258 * Atomically updates @v to (@v + @i) with release ordering. 259 * 259 * 260 * Safe to use in noinstr code; prefer atomic_ 260 * Safe to use in noinstr code; prefer atomic_long_fetch_add_release() elsewhere. 261 * 261 * 262 * Return: The original value of @v. 262 * Return: The original value of @v. 263 */ 263 */ 264 static __always_inline long 264 static __always_inline long 265 raw_atomic_long_fetch_add_release(long i, atom 265 raw_atomic_long_fetch_add_release(long i, atomic_long_t *v) 266 { 266 { 267 #ifdef CONFIG_64BIT 267 #ifdef CONFIG_64BIT 268 return raw_atomic64_fetch_add_release( 268 return raw_atomic64_fetch_add_release(i, v); 269 #else 269 #else 270 return raw_atomic_fetch_add_release(i, 270 return raw_atomic_fetch_add_release(i, v); 271 #endif 271 #endif 272 } 272 } 273 273 274 /** 274 /** 275 * raw_atomic_long_fetch_add_relaxed() - atomi 275 * raw_atomic_long_fetch_add_relaxed() - atomic add with relaxed ordering 276 * @i: long value to add 276 * @i: long value to add 277 * @v: pointer to atomic_long_t 277 * @v: pointer to atomic_long_t 278 * 278 * 279 * Atomically updates @v to (@v + @i) with rel 279 * Atomically updates @v to (@v + @i) with relaxed ordering. 280 * 280 * 281 * Safe to use in noinstr code; prefer atomic_ 281 * Safe to use in noinstr code; prefer atomic_long_fetch_add_relaxed() elsewhere. 282 * 282 * 283 * Return: The original value of @v. 283 * Return: The original value of @v. 284 */ 284 */ 285 static __always_inline long 285 static __always_inline long 286 raw_atomic_long_fetch_add_relaxed(long i, atom 286 raw_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v) 287 { 287 { 288 #ifdef CONFIG_64BIT 288 #ifdef CONFIG_64BIT 289 return raw_atomic64_fetch_add_relaxed( 289 return raw_atomic64_fetch_add_relaxed(i, v); 290 #else 290 #else 291 return raw_atomic_fetch_add_relaxed(i, 291 return raw_atomic_fetch_add_relaxed(i, v); 292 #endif 292 #endif 293 } 293 } 294 294 295 /** 295 /** 296 * raw_atomic_long_sub() - atomic subtract wit 296 * raw_atomic_long_sub() - atomic subtract with relaxed ordering 297 * @i: long value to subtract 297 * @i: long value to subtract 298 * @v: pointer to atomic_long_t 298 * @v: pointer to atomic_long_t 299 * 299 * 300 * Atomically updates @v to (@v - @i) with rel 300 * Atomically updates @v to (@v - @i) with relaxed ordering. 301 * 301 * 302 * Safe to use in noinstr code; prefer atomic_ 302 * Safe to use in noinstr code; prefer atomic_long_sub() elsewhere. 303 * 303 * 304 * Return: Nothing. 304 * Return: Nothing. 305 */ 305 */ 306 static __always_inline void 306 static __always_inline void 307 raw_atomic_long_sub(long i, atomic_long_t *v) 307 raw_atomic_long_sub(long i, atomic_long_t *v) 308 { 308 { 309 #ifdef CONFIG_64BIT 309 #ifdef CONFIG_64BIT 310 raw_atomic64_sub(i, v); 310 raw_atomic64_sub(i, v); 311 #else 311 #else 312 raw_atomic_sub(i, v); 312 raw_atomic_sub(i, v); 313 #endif 313 #endif 314 } 314 } 315 315 316 /** 316 /** 317 * raw_atomic_long_sub_return() - atomic subtr 317 * raw_atomic_long_sub_return() - atomic subtract with full ordering 318 * @i: long value to subtract 318 * @i: long value to subtract 319 * @v: pointer to atomic_long_t 319 * @v: pointer to atomic_long_t 320 * 320 * 321 * Atomically updates @v to (@v - @i) with ful 321 * Atomically updates @v to (@v - @i) with full ordering. 322 * 322 * 323 * Safe to use in noinstr code; prefer atomic_ 323 * Safe to use in noinstr code; prefer atomic_long_sub_return() elsewhere. 324 * 324 * 325 * Return: The updated value of @v. 325 * Return: The updated value of @v. 326 */ 326 */ 327 static __always_inline long 327 static __always_inline long 328 raw_atomic_long_sub_return(long i, atomic_long 328 raw_atomic_long_sub_return(long i, atomic_long_t *v) 329 { 329 { 330 #ifdef CONFIG_64BIT 330 #ifdef CONFIG_64BIT 331 return raw_atomic64_sub_return(i, v); 331 return raw_atomic64_sub_return(i, v); 332 #else 332 #else 333 return raw_atomic_sub_return(i, v); 333 return raw_atomic_sub_return(i, v); 334 #endif 334 #endif 335 } 335 } 336 336 337 /** 337 /** 338 * raw_atomic_long_sub_return_acquire() - atom 338 * raw_atomic_long_sub_return_acquire() - atomic subtract with acquire ordering 339 * @i: long value to subtract 339 * @i: long value to subtract 340 * @v: pointer to atomic_long_t 340 * @v: pointer to atomic_long_t 341 * 341 * 342 * Atomically updates @v to (@v - @i) with acq 342 * Atomically updates @v to (@v - @i) with acquire ordering. 343 * 343 * 344 * Safe to use in noinstr code; prefer atomic_ 344 * Safe to use in noinstr code; prefer atomic_long_sub_return_acquire() elsewhere. 345 * 345 * 346 * Return: The updated value of @v. 346 * Return: The updated value of @v. 347 */ 347 */ 348 static __always_inline long 348 static __always_inline long 349 raw_atomic_long_sub_return_acquire(long i, ato 349 raw_atomic_long_sub_return_acquire(long i, atomic_long_t *v) 350 { 350 { 351 #ifdef CONFIG_64BIT 351 #ifdef CONFIG_64BIT 352 return raw_atomic64_sub_return_acquire 352 return raw_atomic64_sub_return_acquire(i, v); 353 #else 353 #else 354 return raw_atomic_sub_return_acquire(i 354 return raw_atomic_sub_return_acquire(i, v); 355 #endif 355 #endif 356 } 356 } 357 357 358 /** 358 /** 359 * raw_atomic_long_sub_return_release() - atom 359 * raw_atomic_long_sub_return_release() - atomic subtract with release ordering 360 * @i: long value to subtract 360 * @i: long value to subtract 361 * @v: pointer to atomic_long_t 361 * @v: pointer to atomic_long_t 362 * 362 * 363 * Atomically updates @v to (@v - @i) with rel 363 * Atomically updates @v to (@v - @i) with release ordering. 364 * 364 * 365 * Safe to use in noinstr code; prefer atomic_ 365 * Safe to use in noinstr code; prefer atomic_long_sub_return_release() elsewhere. 366 * 366 * 367 * Return: The updated value of @v. 367 * Return: The updated value of @v. 368 */ 368 */ 369 static __always_inline long 369 static __always_inline long 370 raw_atomic_long_sub_return_release(long i, ato 370 raw_atomic_long_sub_return_release(long i, atomic_long_t *v) 371 { 371 { 372 #ifdef CONFIG_64BIT 372 #ifdef CONFIG_64BIT 373 return raw_atomic64_sub_return_release 373 return raw_atomic64_sub_return_release(i, v); 374 #else 374 #else 375 return raw_atomic_sub_return_release(i 375 return raw_atomic_sub_return_release(i, v); 376 #endif 376 #endif 377 } 377 } 378 378 379 /** 379 /** 380 * raw_atomic_long_sub_return_relaxed() - atom 380 * raw_atomic_long_sub_return_relaxed() - atomic subtract with relaxed ordering 381 * @i: long value to subtract 381 * @i: long value to subtract 382 * @v: pointer to atomic_long_t 382 * @v: pointer to atomic_long_t 383 * 383 * 384 * Atomically updates @v to (@v - @i) with rel 384 * Atomically updates @v to (@v - @i) with relaxed ordering. 385 * 385 * 386 * Safe to use in noinstr code; prefer atomic_ 386 * Safe to use in noinstr code; prefer atomic_long_sub_return_relaxed() elsewhere. 387 * 387 * 388 * Return: The updated value of @v. 388 * Return: The updated value of @v. 389 */ 389 */ 390 static __always_inline long 390 static __always_inline long 391 raw_atomic_long_sub_return_relaxed(long i, ato 391 raw_atomic_long_sub_return_relaxed(long i, atomic_long_t *v) 392 { 392 { 393 #ifdef CONFIG_64BIT 393 #ifdef CONFIG_64BIT 394 return raw_atomic64_sub_return_relaxed 394 return raw_atomic64_sub_return_relaxed(i, v); 395 #else 395 #else 396 return raw_atomic_sub_return_relaxed(i 396 return raw_atomic_sub_return_relaxed(i, v); 397 #endif 397 #endif 398 } 398 } 399 399 400 /** 400 /** 401 * raw_atomic_long_fetch_sub() - atomic subtra 401 * raw_atomic_long_fetch_sub() - atomic subtract with full ordering 402 * @i: long value to subtract 402 * @i: long value to subtract 403 * @v: pointer to atomic_long_t 403 * @v: pointer to atomic_long_t 404 * 404 * 405 * Atomically updates @v to (@v - @i) with ful 405 * Atomically updates @v to (@v - @i) with full ordering. 406 * 406 * 407 * Safe to use in noinstr code; prefer atomic_ 407 * Safe to use in noinstr code; prefer atomic_long_fetch_sub() elsewhere. 408 * 408 * 409 * Return: The original value of @v. 409 * Return: The original value of @v. 410 */ 410 */ 411 static __always_inline long 411 static __always_inline long 412 raw_atomic_long_fetch_sub(long i, atomic_long_ 412 raw_atomic_long_fetch_sub(long i, atomic_long_t *v) 413 { 413 { 414 #ifdef CONFIG_64BIT 414 #ifdef CONFIG_64BIT 415 return raw_atomic64_fetch_sub(i, v); 415 return raw_atomic64_fetch_sub(i, v); 416 #else 416 #else 417 return raw_atomic_fetch_sub(i, v); 417 return raw_atomic_fetch_sub(i, v); 418 #endif 418 #endif 419 } 419 } 420 420 421 /** 421 /** 422 * raw_atomic_long_fetch_sub_acquire() - atomi 422 * raw_atomic_long_fetch_sub_acquire() - atomic subtract with acquire ordering 423 * @i: long value to subtract 423 * @i: long value to subtract 424 * @v: pointer to atomic_long_t 424 * @v: pointer to atomic_long_t 425 * 425 * 426 * Atomically updates @v to (@v - @i) with acq 426 * Atomically updates @v to (@v - @i) with acquire ordering. 427 * 427 * 428 * Safe to use in noinstr code; prefer atomic_ 428 * Safe to use in noinstr code; prefer atomic_long_fetch_sub_acquire() elsewhere. 429 * 429 * 430 * Return: The original value of @v. 430 * Return: The original value of @v. 431 */ 431 */ 432 static __always_inline long 432 static __always_inline long 433 raw_atomic_long_fetch_sub_acquire(long i, atom 433 raw_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v) 434 { 434 { 435 #ifdef CONFIG_64BIT 435 #ifdef CONFIG_64BIT 436 return raw_atomic64_fetch_sub_acquire( 436 return raw_atomic64_fetch_sub_acquire(i, v); 437 #else 437 #else 438 return raw_atomic_fetch_sub_acquire(i, 438 return raw_atomic_fetch_sub_acquire(i, v); 439 #endif 439 #endif 440 } 440 } 441 441 442 /** 442 /** 443 * raw_atomic_long_fetch_sub_release() - atomi 443 * raw_atomic_long_fetch_sub_release() - atomic subtract with release ordering 444 * @i: long value to subtract 444 * @i: long value to subtract 445 * @v: pointer to atomic_long_t 445 * @v: pointer to atomic_long_t 446 * 446 * 447 * Atomically updates @v to (@v - @i) with rel 447 * Atomically updates @v to (@v - @i) with release ordering. 448 * 448 * 449 * Safe to use in noinstr code; prefer atomic_ 449 * Safe to use in noinstr code; prefer atomic_long_fetch_sub_release() elsewhere. 450 * 450 * 451 * Return: The original value of @v. 451 * Return: The original value of @v. 452 */ 452 */ 453 static __always_inline long 453 static __always_inline long 454 raw_atomic_long_fetch_sub_release(long i, atom 454 raw_atomic_long_fetch_sub_release(long i, atomic_long_t *v) 455 { 455 { 456 #ifdef CONFIG_64BIT 456 #ifdef CONFIG_64BIT 457 return raw_atomic64_fetch_sub_release( 457 return raw_atomic64_fetch_sub_release(i, v); 458 #else 458 #else 459 return raw_atomic_fetch_sub_release(i, 459 return raw_atomic_fetch_sub_release(i, v); 460 #endif 460 #endif 461 } 461 } 462 462 463 /** 463 /** 464 * raw_atomic_long_fetch_sub_relaxed() - atomi 464 * raw_atomic_long_fetch_sub_relaxed() - atomic subtract with relaxed ordering 465 * @i: long value to subtract 465 * @i: long value to subtract 466 * @v: pointer to atomic_long_t 466 * @v: pointer to atomic_long_t 467 * 467 * 468 * Atomically updates @v to (@v - @i) with rel 468 * Atomically updates @v to (@v - @i) with relaxed ordering. 469 * 469 * 470 * Safe to use in noinstr code; prefer atomic_ 470 * Safe to use in noinstr code; prefer atomic_long_fetch_sub_relaxed() elsewhere. 471 * 471 * 472 * Return: The original value of @v. 472 * Return: The original value of @v. 473 */ 473 */ 474 static __always_inline long 474 static __always_inline long 475 raw_atomic_long_fetch_sub_relaxed(long i, atom 475 raw_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v) 476 { 476 { 477 #ifdef CONFIG_64BIT 477 #ifdef CONFIG_64BIT 478 return raw_atomic64_fetch_sub_relaxed( 478 return raw_atomic64_fetch_sub_relaxed(i, v); 479 #else 479 #else 480 return raw_atomic_fetch_sub_relaxed(i, 480 return raw_atomic_fetch_sub_relaxed(i, v); 481 #endif 481 #endif 482 } 482 } 483 483 484 /** 484 /** 485 * raw_atomic_long_inc() - atomic increment wi 485 * raw_atomic_long_inc() - atomic increment with relaxed ordering 486 * @v: pointer to atomic_long_t 486 * @v: pointer to atomic_long_t 487 * 487 * 488 * Atomically updates @v to (@v + 1) with rela 488 * Atomically updates @v to (@v + 1) with relaxed ordering. 489 * 489 * 490 * Safe to use in noinstr code; prefer atomic_ 490 * Safe to use in noinstr code; prefer atomic_long_inc() elsewhere. 491 * 491 * 492 * Return: Nothing. 492 * Return: Nothing. 493 */ 493 */ 494 static __always_inline void 494 static __always_inline void 495 raw_atomic_long_inc(atomic_long_t *v) 495 raw_atomic_long_inc(atomic_long_t *v) 496 { 496 { 497 #ifdef CONFIG_64BIT 497 #ifdef CONFIG_64BIT 498 raw_atomic64_inc(v); 498 raw_atomic64_inc(v); 499 #else 499 #else 500 raw_atomic_inc(v); 500 raw_atomic_inc(v); 501 #endif 501 #endif 502 } 502 } 503 503 504 /** 504 /** 505 * raw_atomic_long_inc_return() - atomic incre 505 * raw_atomic_long_inc_return() - atomic increment with full ordering 506 * @v: pointer to atomic_long_t 506 * @v: pointer to atomic_long_t 507 * 507 * 508 * Atomically updates @v to (@v + 1) with full 508 * Atomically updates @v to (@v + 1) with full ordering. 509 * 509 * 510 * Safe to use in noinstr code; prefer atomic_ 510 * Safe to use in noinstr code; prefer atomic_long_inc_return() elsewhere. 511 * 511 * 512 * Return: The updated value of @v. 512 * Return: The updated value of @v. 513 */ 513 */ 514 static __always_inline long 514 static __always_inline long 515 raw_atomic_long_inc_return(atomic_long_t *v) 515 raw_atomic_long_inc_return(atomic_long_t *v) 516 { 516 { 517 #ifdef CONFIG_64BIT 517 #ifdef CONFIG_64BIT 518 return raw_atomic64_inc_return(v); 518 return raw_atomic64_inc_return(v); 519 #else 519 #else 520 return raw_atomic_inc_return(v); 520 return raw_atomic_inc_return(v); 521 #endif 521 #endif 522 } 522 } 523 523 524 /** 524 /** 525 * raw_atomic_long_inc_return_acquire() - atom 525 * raw_atomic_long_inc_return_acquire() - atomic increment with acquire ordering 526 * @v: pointer to atomic_long_t 526 * @v: pointer to atomic_long_t 527 * 527 * 528 * Atomically updates @v to (@v + 1) with acqu 528 * Atomically updates @v to (@v + 1) with acquire ordering. 529 * 529 * 530 * Safe to use in noinstr code; prefer atomic_ 530 * Safe to use in noinstr code; prefer atomic_long_inc_return_acquire() elsewhere. 531 * 531 * 532 * Return: The updated value of @v. 532 * Return: The updated value of @v. 533 */ 533 */ 534 static __always_inline long 534 static __always_inline long 535 raw_atomic_long_inc_return_acquire(atomic_long 535 raw_atomic_long_inc_return_acquire(atomic_long_t *v) 536 { 536 { 537 #ifdef CONFIG_64BIT 537 #ifdef CONFIG_64BIT 538 return raw_atomic64_inc_return_acquire 538 return raw_atomic64_inc_return_acquire(v); 539 #else 539 #else 540 return raw_atomic_inc_return_acquire(v 540 return raw_atomic_inc_return_acquire(v); 541 #endif 541 #endif 542 } 542 } 543 543 544 /** 544 /** 545 * raw_atomic_long_inc_return_release() - atom 545 * raw_atomic_long_inc_return_release() - atomic increment with release ordering 546 * @v: pointer to atomic_long_t 546 * @v: pointer to atomic_long_t 547 * 547 * 548 * Atomically updates @v to (@v + 1) with rele 548 * Atomically updates @v to (@v + 1) with release ordering. 549 * 549 * 550 * Safe to use in noinstr code; prefer atomic_ 550 * Safe to use in noinstr code; prefer atomic_long_inc_return_release() elsewhere. 551 * 551 * 552 * Return: The updated value of @v. 552 * Return: The updated value of @v. 553 */ 553 */ 554 static __always_inline long 554 static __always_inline long 555 raw_atomic_long_inc_return_release(atomic_long 555 raw_atomic_long_inc_return_release(atomic_long_t *v) 556 { 556 { 557 #ifdef CONFIG_64BIT 557 #ifdef CONFIG_64BIT 558 return raw_atomic64_inc_return_release 558 return raw_atomic64_inc_return_release(v); 559 #else 559 #else 560 return raw_atomic_inc_return_release(v 560 return raw_atomic_inc_return_release(v); 561 #endif 561 #endif 562 } 562 } 563 563 564 /** 564 /** 565 * raw_atomic_long_inc_return_relaxed() - atom 565 * raw_atomic_long_inc_return_relaxed() - atomic increment with relaxed ordering 566 * @v: pointer to atomic_long_t 566 * @v: pointer to atomic_long_t 567 * 567 * 568 * Atomically updates @v to (@v + 1) with rela 568 * Atomically updates @v to (@v + 1) with relaxed ordering. 569 * 569 * 570 * Safe to use in noinstr code; prefer atomic_ 570 * Safe to use in noinstr code; prefer atomic_long_inc_return_relaxed() elsewhere. 571 * 571 * 572 * Return: The updated value of @v. 572 * Return: The updated value of @v. 573 */ 573 */ 574 static __always_inline long 574 static __always_inline long 575 raw_atomic_long_inc_return_relaxed(atomic_long 575 raw_atomic_long_inc_return_relaxed(atomic_long_t *v) 576 { 576 { 577 #ifdef CONFIG_64BIT 577 #ifdef CONFIG_64BIT 578 return raw_atomic64_inc_return_relaxed 578 return raw_atomic64_inc_return_relaxed(v); 579 #else 579 #else 580 return raw_atomic_inc_return_relaxed(v 580 return raw_atomic_inc_return_relaxed(v); 581 #endif 581 #endif 582 } 582 } 583 583 584 /** 584 /** 585 * raw_atomic_long_fetch_inc() - atomic increm 585 * raw_atomic_long_fetch_inc() - atomic increment with full ordering 586 * @v: pointer to atomic_long_t 586 * @v: pointer to atomic_long_t 587 * 587 * 588 * Atomically updates @v to (@v + 1) with full 588 * Atomically updates @v to (@v + 1) with full ordering. 589 * 589 * 590 * Safe to use in noinstr code; prefer atomic_ 590 * Safe to use in noinstr code; prefer atomic_long_fetch_inc() elsewhere. 591 * 591 * 592 * Return: The original value of @v. 592 * Return: The original value of @v. 593 */ 593 */ 594 static __always_inline long 594 static __always_inline long 595 raw_atomic_long_fetch_inc(atomic_long_t *v) 595 raw_atomic_long_fetch_inc(atomic_long_t *v) 596 { 596 { 597 #ifdef CONFIG_64BIT 597 #ifdef CONFIG_64BIT 598 return raw_atomic64_fetch_inc(v); 598 return raw_atomic64_fetch_inc(v); 599 #else 599 #else 600 return raw_atomic_fetch_inc(v); 600 return raw_atomic_fetch_inc(v); 601 #endif 601 #endif 602 } 602 } 603 603 604 /** 604 /** 605 * raw_atomic_long_fetch_inc_acquire() - atomi 605 * raw_atomic_long_fetch_inc_acquire() - atomic increment with acquire ordering 606 * @v: pointer to atomic_long_t 606 * @v: pointer to atomic_long_t 607 * 607 * 608 * Atomically updates @v to (@v + 1) with acqu 608 * Atomically updates @v to (@v + 1) with acquire ordering. 609 * 609 * 610 * Safe to use in noinstr code; prefer atomic_ 610 * Safe to use in noinstr code; prefer atomic_long_fetch_inc_acquire() elsewhere. 611 * 611 * 612 * Return: The original value of @v. 612 * Return: The original value of @v. 613 */ 613 */ 614 static __always_inline long 614 static __always_inline long 615 raw_atomic_long_fetch_inc_acquire(atomic_long_ 615 raw_atomic_long_fetch_inc_acquire(atomic_long_t *v) 616 { 616 { 617 #ifdef CONFIG_64BIT 617 #ifdef CONFIG_64BIT 618 return raw_atomic64_fetch_inc_acquire( 618 return raw_atomic64_fetch_inc_acquire(v); 619 #else 619 #else 620 return raw_atomic_fetch_inc_acquire(v) 620 return raw_atomic_fetch_inc_acquire(v); 621 #endif 621 #endif 622 } 622 } 623 623 624 /** 624 /** 625 * raw_atomic_long_fetch_inc_release() - atomi 625 * raw_atomic_long_fetch_inc_release() - atomic increment with release ordering 626 * @v: pointer to atomic_long_t 626 * @v: pointer to atomic_long_t 627 * 627 * 628 * Atomically updates @v to (@v + 1) with rele 628 * Atomically updates @v to (@v + 1) with release ordering. 629 * 629 * 630 * Safe to use in noinstr code; prefer atomic_ 630 * Safe to use in noinstr code; prefer atomic_long_fetch_inc_release() elsewhere. 631 * 631 * 632 * Return: The original value of @v. 632 * Return: The original value of @v. 633 */ 633 */ 634 static __always_inline long 634 static __always_inline long 635 raw_atomic_long_fetch_inc_release(atomic_long_ 635 raw_atomic_long_fetch_inc_release(atomic_long_t *v) 636 { 636 { 637 #ifdef CONFIG_64BIT 637 #ifdef CONFIG_64BIT 638 return raw_atomic64_fetch_inc_release( 638 return raw_atomic64_fetch_inc_release(v); 639 #else 639 #else 640 return raw_atomic_fetch_inc_release(v) 640 return raw_atomic_fetch_inc_release(v); 641 #endif 641 #endif 642 } 642 } 643 643 644 /** 644 /** 645 * raw_atomic_long_fetch_inc_relaxed() - atomi 645 * raw_atomic_long_fetch_inc_relaxed() - atomic increment with relaxed ordering 646 * @v: pointer to atomic_long_t 646 * @v: pointer to atomic_long_t 647 * 647 * 648 * Atomically updates @v to (@v + 1) with rela 648 * Atomically updates @v to (@v + 1) with relaxed ordering. 649 * 649 * 650 * Safe to use in noinstr code; prefer atomic_ 650 * Safe to use in noinstr code; prefer atomic_long_fetch_inc_relaxed() elsewhere. 651 * 651 * 652 * Return: The original value of @v. 652 * Return: The original value of @v. 653 */ 653 */ 654 static __always_inline long 654 static __always_inline long 655 raw_atomic_long_fetch_inc_relaxed(atomic_long_ 655 raw_atomic_long_fetch_inc_relaxed(atomic_long_t *v) 656 { 656 { 657 #ifdef CONFIG_64BIT 657 #ifdef CONFIG_64BIT 658 return raw_atomic64_fetch_inc_relaxed( 658 return raw_atomic64_fetch_inc_relaxed(v); 659 #else 659 #else 660 return raw_atomic_fetch_inc_relaxed(v) 660 return raw_atomic_fetch_inc_relaxed(v); 661 #endif 661 #endif 662 } 662 } 663 663 664 /** 664 /** 665 * raw_atomic_long_dec() - atomic decrement wi 665 * raw_atomic_long_dec() - atomic decrement with relaxed ordering 666 * @v: pointer to atomic_long_t 666 * @v: pointer to atomic_long_t 667 * 667 * 668 * Atomically updates @v to (@v - 1) with rela 668 * Atomically updates @v to (@v - 1) with relaxed ordering. 669 * 669 * 670 * Safe to use in noinstr code; prefer atomic_ 670 * Safe to use in noinstr code; prefer atomic_long_dec() elsewhere. 671 * 671 * 672 * Return: Nothing. 672 * Return: Nothing. 673 */ 673 */ 674 static __always_inline void 674 static __always_inline void 675 raw_atomic_long_dec(atomic_long_t *v) 675 raw_atomic_long_dec(atomic_long_t *v) 676 { 676 { 677 #ifdef CONFIG_64BIT 677 #ifdef CONFIG_64BIT 678 raw_atomic64_dec(v); 678 raw_atomic64_dec(v); 679 #else 679 #else 680 raw_atomic_dec(v); 680 raw_atomic_dec(v); 681 #endif 681 #endif 682 } 682 } 683 683 684 /** 684 /** 685 * raw_atomic_long_dec_return() - atomic decre 685 * raw_atomic_long_dec_return() - atomic decrement with full ordering 686 * @v: pointer to atomic_long_t 686 * @v: pointer to atomic_long_t 687 * 687 * 688 * Atomically updates @v to (@v - 1) with full 688 * Atomically updates @v to (@v - 1) with full ordering. 689 * 689 * 690 * Safe to use in noinstr code; prefer atomic_ 690 * Safe to use in noinstr code; prefer atomic_long_dec_return() elsewhere. 691 * 691 * 692 * Return: The updated value of @v. 692 * Return: The updated value of @v. 693 */ 693 */ 694 static __always_inline long 694 static __always_inline long 695 raw_atomic_long_dec_return(atomic_long_t *v) 695 raw_atomic_long_dec_return(atomic_long_t *v) 696 { 696 { 697 #ifdef CONFIG_64BIT 697 #ifdef CONFIG_64BIT 698 return raw_atomic64_dec_return(v); 698 return raw_atomic64_dec_return(v); 699 #else 699 #else 700 return raw_atomic_dec_return(v); 700 return raw_atomic_dec_return(v); 701 #endif 701 #endif 702 } 702 } 703 703 704 /** 704 /** 705 * raw_atomic_long_dec_return_acquire() - atom 705 * raw_atomic_long_dec_return_acquire() - atomic decrement with acquire ordering 706 * @v: pointer to atomic_long_t 706 * @v: pointer to atomic_long_t 707 * 707 * 708 * Atomically updates @v to (@v - 1) with acqu 708 * Atomically updates @v to (@v - 1) with acquire ordering. 709 * 709 * 710 * Safe to use in noinstr code; prefer atomic_ 710 * Safe to use in noinstr code; prefer atomic_long_dec_return_acquire() elsewhere. 711 * 711 * 712 * Return: The updated value of @v. 712 * Return: The updated value of @v. 713 */ 713 */ 714 static __always_inline long 714 static __always_inline long 715 raw_atomic_long_dec_return_acquire(atomic_long 715 raw_atomic_long_dec_return_acquire(atomic_long_t *v) 716 { 716 { 717 #ifdef CONFIG_64BIT 717 #ifdef CONFIG_64BIT 718 return raw_atomic64_dec_return_acquire 718 return raw_atomic64_dec_return_acquire(v); 719 #else 719 #else 720 return raw_atomic_dec_return_acquire(v 720 return raw_atomic_dec_return_acquire(v); 721 #endif 721 #endif 722 } 722 } 723 723 724 /** 724 /** 725 * raw_atomic_long_dec_return_release() - atom 725 * raw_atomic_long_dec_return_release() - atomic decrement with release ordering 726 * @v: pointer to atomic_long_t 726 * @v: pointer to atomic_long_t 727 * 727 * 728 * Atomically updates @v to (@v - 1) with rele 728 * Atomically updates @v to (@v - 1) with release ordering. 729 * 729 * 730 * Safe to use in noinstr code; prefer atomic_ 730 * Safe to use in noinstr code; prefer atomic_long_dec_return_release() elsewhere. 731 * 731 * 732 * Return: The updated value of @v. 732 * Return: The updated value of @v. 733 */ 733 */ 734 static __always_inline long 734 static __always_inline long 735 raw_atomic_long_dec_return_release(atomic_long 735 raw_atomic_long_dec_return_release(atomic_long_t *v) 736 { 736 { 737 #ifdef CONFIG_64BIT 737 #ifdef CONFIG_64BIT 738 return raw_atomic64_dec_return_release 738 return raw_atomic64_dec_return_release(v); 739 #else 739 #else 740 return raw_atomic_dec_return_release(v 740 return raw_atomic_dec_return_release(v); 741 #endif 741 #endif 742 } 742 } 743 743 744 /** 744 /** 745 * raw_atomic_long_dec_return_relaxed() - atom 745 * raw_atomic_long_dec_return_relaxed() - atomic decrement with relaxed ordering 746 * @v: pointer to atomic_long_t 746 * @v: pointer to atomic_long_t 747 * 747 * 748 * Atomically updates @v to (@v - 1) with rela 748 * Atomically updates @v to (@v - 1) with relaxed ordering. 749 * 749 * 750 * Safe to use in noinstr code; prefer atomic_ 750 * Safe to use in noinstr code; prefer atomic_long_dec_return_relaxed() elsewhere. 751 * 751 * 752 * Return: The updated value of @v. 752 * Return: The updated value of @v. 753 */ 753 */ 754 static __always_inline long 754 static __always_inline long 755 raw_atomic_long_dec_return_relaxed(atomic_long 755 raw_atomic_long_dec_return_relaxed(atomic_long_t *v) 756 { 756 { 757 #ifdef CONFIG_64BIT 757 #ifdef CONFIG_64BIT 758 return raw_atomic64_dec_return_relaxed 758 return raw_atomic64_dec_return_relaxed(v); 759 #else 759 #else 760 return raw_atomic_dec_return_relaxed(v 760 return raw_atomic_dec_return_relaxed(v); 761 #endif 761 #endif 762 } 762 } 763 763 764 /** 764 /** 765 * raw_atomic_long_fetch_dec() - atomic decrem 765 * raw_atomic_long_fetch_dec() - atomic decrement with full ordering 766 * @v: pointer to atomic_long_t 766 * @v: pointer to atomic_long_t 767 * 767 * 768 * Atomically updates @v to (@v - 1) with full 768 * Atomically updates @v to (@v - 1) with full ordering. 769 * 769 * 770 * Safe to use in noinstr code; prefer atomic_ 770 * Safe to use in noinstr code; prefer atomic_long_fetch_dec() elsewhere. 771 * 771 * 772 * Return: The original value of @v. 772 * Return: The original value of @v. 773 */ 773 */ 774 static __always_inline long 774 static __always_inline long 775 raw_atomic_long_fetch_dec(atomic_long_t *v) 775 raw_atomic_long_fetch_dec(atomic_long_t *v) 776 { 776 { 777 #ifdef CONFIG_64BIT 777 #ifdef CONFIG_64BIT 778 return raw_atomic64_fetch_dec(v); 778 return raw_atomic64_fetch_dec(v); 779 #else 779 #else 780 return raw_atomic_fetch_dec(v); 780 return raw_atomic_fetch_dec(v); 781 #endif 781 #endif 782 } 782 } 783 783 784 /** 784 /** 785 * raw_atomic_long_fetch_dec_acquire() - atomi 785 * raw_atomic_long_fetch_dec_acquire() - atomic decrement with acquire ordering 786 * @v: pointer to atomic_long_t 786 * @v: pointer to atomic_long_t 787 * 787 * 788 * Atomically updates @v to (@v - 1) with acqu 788 * Atomically updates @v to (@v - 1) with acquire ordering. 789 * 789 * 790 * Safe to use in noinstr code; prefer atomic_ 790 * Safe to use in noinstr code; prefer atomic_long_fetch_dec_acquire() elsewhere. 791 * 791 * 792 * Return: The original value of @v. 792 * Return: The original value of @v. 793 */ 793 */ 794 static __always_inline long 794 static __always_inline long 795 raw_atomic_long_fetch_dec_acquire(atomic_long_ 795 raw_atomic_long_fetch_dec_acquire(atomic_long_t *v) 796 { 796 { 797 #ifdef CONFIG_64BIT 797 #ifdef CONFIG_64BIT 798 return raw_atomic64_fetch_dec_acquire( 798 return raw_atomic64_fetch_dec_acquire(v); 799 #else 799 #else 800 return raw_atomic_fetch_dec_acquire(v) 800 return raw_atomic_fetch_dec_acquire(v); 801 #endif 801 #endif 802 } 802 } 803 803 804 /** 804 /** 805 * raw_atomic_long_fetch_dec_release() - atomi 805 * raw_atomic_long_fetch_dec_release() - atomic decrement with release ordering 806 * @v: pointer to atomic_long_t 806 * @v: pointer to atomic_long_t 807 * 807 * 808 * Atomically updates @v to (@v - 1) with rele 808 * Atomically updates @v to (@v - 1) with release ordering. 809 * 809 * 810 * Safe to use in noinstr code; prefer atomic_ 810 * Safe to use in noinstr code; prefer atomic_long_fetch_dec_release() elsewhere. 811 * 811 * 812 * Return: The original value of @v. 812 * Return: The original value of @v. 813 */ 813 */ 814 static __always_inline long 814 static __always_inline long 815 raw_atomic_long_fetch_dec_release(atomic_long_ 815 raw_atomic_long_fetch_dec_release(atomic_long_t *v) 816 { 816 { 817 #ifdef CONFIG_64BIT 817 #ifdef CONFIG_64BIT 818 return raw_atomic64_fetch_dec_release( 818 return raw_atomic64_fetch_dec_release(v); 819 #else 819 #else 820 return raw_atomic_fetch_dec_release(v) 820 return raw_atomic_fetch_dec_release(v); 821 #endif 821 #endif 822 } 822 } 823 823 824 /** 824 /** 825 * raw_atomic_long_fetch_dec_relaxed() - atomi 825 * raw_atomic_long_fetch_dec_relaxed() - atomic decrement with relaxed ordering 826 * @v: pointer to atomic_long_t 826 * @v: pointer to atomic_long_t 827 * 827 * 828 * Atomically updates @v to (@v - 1) with rela 828 * Atomically updates @v to (@v - 1) with relaxed ordering. 829 * 829 * 830 * Safe to use in noinstr code; prefer atomic_ 830 * Safe to use in noinstr code; prefer atomic_long_fetch_dec_relaxed() elsewhere. 831 * 831 * 832 * Return: The original value of @v. 832 * Return: The original value of @v. 833 */ 833 */ 834 static __always_inline long 834 static __always_inline long 835 raw_atomic_long_fetch_dec_relaxed(atomic_long_ 835 raw_atomic_long_fetch_dec_relaxed(atomic_long_t *v) 836 { 836 { 837 #ifdef CONFIG_64BIT 837 #ifdef CONFIG_64BIT 838 return raw_atomic64_fetch_dec_relaxed( 838 return raw_atomic64_fetch_dec_relaxed(v); 839 #else 839 #else 840 return raw_atomic_fetch_dec_relaxed(v) 840 return raw_atomic_fetch_dec_relaxed(v); 841 #endif 841 #endif 842 } 842 } 843 843 844 /** 844 /** 845 * raw_atomic_long_and() - atomic bitwise AND 845 * raw_atomic_long_and() - atomic bitwise AND with relaxed ordering 846 * @i: long value 846 * @i: long value 847 * @v: pointer to atomic_long_t 847 * @v: pointer to atomic_long_t 848 * 848 * 849 * Atomically updates @v to (@v & @i) with rel 849 * Atomically updates @v to (@v & @i) with relaxed ordering. 850 * 850 * 851 * Safe to use in noinstr code; prefer atomic_ 851 * Safe to use in noinstr code; prefer atomic_long_and() elsewhere. 852 * 852 * 853 * Return: Nothing. 853 * Return: Nothing. 854 */ 854 */ 855 static __always_inline void 855 static __always_inline void 856 raw_atomic_long_and(long i, atomic_long_t *v) 856 raw_atomic_long_and(long i, atomic_long_t *v) 857 { 857 { 858 #ifdef CONFIG_64BIT 858 #ifdef CONFIG_64BIT 859 raw_atomic64_and(i, v); 859 raw_atomic64_and(i, v); 860 #else 860 #else 861 raw_atomic_and(i, v); 861 raw_atomic_and(i, v); 862 #endif 862 #endif 863 } 863 } 864 864 865 /** 865 /** 866 * raw_atomic_long_fetch_and() - atomic bitwis 866 * raw_atomic_long_fetch_and() - atomic bitwise AND with full ordering 867 * @i: long value 867 * @i: long value 868 * @v: pointer to atomic_long_t 868 * @v: pointer to atomic_long_t 869 * 869 * 870 * Atomically updates @v to (@v & @i) with ful 870 * Atomically updates @v to (@v & @i) with full ordering. 871 * 871 * 872 * Safe to use in noinstr code; prefer atomic_ 872 * Safe to use in noinstr code; prefer atomic_long_fetch_and() elsewhere. 873 * 873 * 874 * Return: The original value of @v. 874 * Return: The original value of @v. 875 */ 875 */ 876 static __always_inline long 876 static __always_inline long 877 raw_atomic_long_fetch_and(long i, atomic_long_ 877 raw_atomic_long_fetch_and(long i, atomic_long_t *v) 878 { 878 { 879 #ifdef CONFIG_64BIT 879 #ifdef CONFIG_64BIT 880 return raw_atomic64_fetch_and(i, v); 880 return raw_atomic64_fetch_and(i, v); 881 #else 881 #else 882 return raw_atomic_fetch_and(i, v); 882 return raw_atomic_fetch_and(i, v); 883 #endif 883 #endif 884 } 884 } 885 885 886 /** 886 /** 887 * raw_atomic_long_fetch_and_acquire() - atomi 887 * raw_atomic_long_fetch_and_acquire() - atomic bitwise AND with acquire ordering 888 * @i: long value 888 * @i: long value 889 * @v: pointer to atomic_long_t 889 * @v: pointer to atomic_long_t 890 * 890 * 891 * Atomically updates @v to (@v & @i) with acq 891 * Atomically updates @v to (@v & @i) with acquire ordering. 892 * 892 * 893 * Safe to use in noinstr code; prefer atomic_ 893 * Safe to use in noinstr code; prefer atomic_long_fetch_and_acquire() elsewhere. 894 * 894 * 895 * Return: The original value of @v. 895 * Return: The original value of @v. 896 */ 896 */ 897 static __always_inline long 897 static __always_inline long 898 raw_atomic_long_fetch_and_acquire(long i, atom 898 raw_atomic_long_fetch_and_acquire(long i, atomic_long_t *v) 899 { 899 { 900 #ifdef CONFIG_64BIT 900 #ifdef CONFIG_64BIT 901 return raw_atomic64_fetch_and_acquire( 901 return raw_atomic64_fetch_and_acquire(i, v); 902 #else 902 #else 903 return raw_atomic_fetch_and_acquire(i, 903 return raw_atomic_fetch_and_acquire(i, v); 904 #endif 904 #endif 905 } 905 } 906 906 907 /** 907 /** 908 * raw_atomic_long_fetch_and_release() - atomi 908 * raw_atomic_long_fetch_and_release() - atomic bitwise AND with release ordering 909 * @i: long value 909 * @i: long value 910 * @v: pointer to atomic_long_t 910 * @v: pointer to atomic_long_t 911 * 911 * 912 * Atomically updates @v to (@v & @i) with rel 912 * Atomically updates @v to (@v & @i) with release ordering. 913 * 913 * 914 * Safe to use in noinstr code; prefer atomic_ 914 * Safe to use in noinstr code; prefer atomic_long_fetch_and_release() elsewhere. 915 * 915 * 916 * Return: The original value of @v. 916 * Return: The original value of @v. 917 */ 917 */ 918 static __always_inline long 918 static __always_inline long 919 raw_atomic_long_fetch_and_release(long i, atom 919 raw_atomic_long_fetch_and_release(long i, atomic_long_t *v) 920 { 920 { 921 #ifdef CONFIG_64BIT 921 #ifdef CONFIG_64BIT 922 return raw_atomic64_fetch_and_release( 922 return raw_atomic64_fetch_and_release(i, v); 923 #else 923 #else 924 return raw_atomic_fetch_and_release(i, 924 return raw_atomic_fetch_and_release(i, v); 925 #endif 925 #endif 926 } 926 } 927 927 928 /** 928 /** 929 * raw_atomic_long_fetch_and_relaxed() - atomi 929 * raw_atomic_long_fetch_and_relaxed() - atomic bitwise AND with relaxed ordering 930 * @i: long value 930 * @i: long value 931 * @v: pointer to atomic_long_t 931 * @v: pointer to atomic_long_t 932 * 932 * 933 * Atomically updates @v to (@v & @i) with rel 933 * Atomically updates @v to (@v & @i) with relaxed ordering. 934 * 934 * 935 * Safe to use in noinstr code; prefer atomic_ 935 * Safe to use in noinstr code; prefer atomic_long_fetch_and_relaxed() elsewhere. 936 * 936 * 937 * Return: The original value of @v. 937 * Return: The original value of @v. 938 */ 938 */ 939 static __always_inline long 939 static __always_inline long 940 raw_atomic_long_fetch_and_relaxed(long i, atom 940 raw_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v) 941 { 941 { 942 #ifdef CONFIG_64BIT 942 #ifdef CONFIG_64BIT 943 return raw_atomic64_fetch_and_relaxed( 943 return raw_atomic64_fetch_and_relaxed(i, v); 944 #else 944 #else 945 return raw_atomic_fetch_and_relaxed(i, 945 return raw_atomic_fetch_and_relaxed(i, v); 946 #endif 946 #endif 947 } 947 } 948 948 949 /** 949 /** 950 * raw_atomic_long_andnot() - atomic bitwise A 950 * raw_atomic_long_andnot() - atomic bitwise AND NOT with relaxed ordering 951 * @i: long value 951 * @i: long value 952 * @v: pointer to atomic_long_t 952 * @v: pointer to atomic_long_t 953 * 953 * 954 * Atomically updates @v to (@v & ~@i) with re 954 * Atomically updates @v to (@v & ~@i) with relaxed ordering. 955 * 955 * 956 * Safe to use in noinstr code; prefer atomic_ 956 * Safe to use in noinstr code; prefer atomic_long_andnot() elsewhere. 957 * 957 * 958 * Return: Nothing. 958 * Return: Nothing. 959 */ 959 */ 960 static __always_inline void 960 static __always_inline void 961 raw_atomic_long_andnot(long i, atomic_long_t * 961 raw_atomic_long_andnot(long i, atomic_long_t *v) 962 { 962 { 963 #ifdef CONFIG_64BIT 963 #ifdef CONFIG_64BIT 964 raw_atomic64_andnot(i, v); 964 raw_atomic64_andnot(i, v); 965 #else 965 #else 966 raw_atomic_andnot(i, v); 966 raw_atomic_andnot(i, v); 967 #endif 967 #endif 968 } 968 } 969 969 970 /** 970 /** 971 * raw_atomic_long_fetch_andnot() - atomic bit 971 * raw_atomic_long_fetch_andnot() - atomic bitwise AND NOT with full ordering 972 * @i: long value 972 * @i: long value 973 * @v: pointer to atomic_long_t 973 * @v: pointer to atomic_long_t 974 * 974 * 975 * Atomically updates @v to (@v & ~@i) with fu 975 * Atomically updates @v to (@v & ~@i) with full ordering. 976 * 976 * 977 * Safe to use in noinstr code; prefer atomic_ 977 * Safe to use in noinstr code; prefer atomic_long_fetch_andnot() elsewhere. 978 * 978 * 979 * Return: The original value of @v. 979 * Return: The original value of @v. 980 */ 980 */ 981 static __always_inline long 981 static __always_inline long 982 raw_atomic_long_fetch_andnot(long i, atomic_lo 982 raw_atomic_long_fetch_andnot(long i, atomic_long_t *v) 983 { 983 { 984 #ifdef CONFIG_64BIT 984 #ifdef CONFIG_64BIT 985 return raw_atomic64_fetch_andnot(i, v) 985 return raw_atomic64_fetch_andnot(i, v); 986 #else 986 #else 987 return raw_atomic_fetch_andnot(i, v); 987 return raw_atomic_fetch_andnot(i, v); 988 #endif 988 #endif 989 } 989 } 990 990 991 /** 991 /** 992 * raw_atomic_long_fetch_andnot_acquire() - at 992 * raw_atomic_long_fetch_andnot_acquire() - atomic bitwise AND NOT with acquire ordering 993 * @i: long value 993 * @i: long value 994 * @v: pointer to atomic_long_t 994 * @v: pointer to atomic_long_t 995 * 995 * 996 * Atomically updates @v to (@v & ~@i) with ac 996 * Atomically updates @v to (@v & ~@i) with acquire ordering. 997 * 997 * 998 * Safe to use in noinstr code; prefer atomic_ 998 * Safe to use in noinstr code; prefer atomic_long_fetch_andnot_acquire() elsewhere. 999 * 999 * 1000 * Return: The original value of @v. 1000 * Return: The original value of @v. 1001 */ 1001 */ 1002 static __always_inline long 1002 static __always_inline long 1003 raw_atomic_long_fetch_andnot_acquire(long i, 1003 raw_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v) 1004 { 1004 { 1005 #ifdef CONFIG_64BIT 1005 #ifdef CONFIG_64BIT 1006 return raw_atomic64_fetch_andnot_acqu 1006 return raw_atomic64_fetch_andnot_acquire(i, v); 1007 #else 1007 #else 1008 return raw_atomic_fetch_andnot_acquir 1008 return raw_atomic_fetch_andnot_acquire(i, v); 1009 #endif 1009 #endif 1010 } 1010 } 1011 1011 1012 /** 1012 /** 1013 * raw_atomic_long_fetch_andnot_release() - a 1013 * raw_atomic_long_fetch_andnot_release() - atomic bitwise AND NOT with release ordering 1014 * @i: long value 1014 * @i: long value 1015 * @v: pointer to atomic_long_t 1015 * @v: pointer to atomic_long_t 1016 * 1016 * 1017 * Atomically updates @v to (@v & ~@i) with r 1017 * Atomically updates @v to (@v & ~@i) with release ordering. 1018 * 1018 * 1019 * Safe to use in noinstr code; prefer atomic 1019 * Safe to use in noinstr code; prefer atomic_long_fetch_andnot_release() elsewhere. 1020 * 1020 * 1021 * Return: The original value of @v. 1021 * Return: The original value of @v. 1022 */ 1022 */ 1023 static __always_inline long 1023 static __always_inline long 1024 raw_atomic_long_fetch_andnot_release(long i, 1024 raw_atomic_long_fetch_andnot_release(long i, atomic_long_t *v) 1025 { 1025 { 1026 #ifdef CONFIG_64BIT 1026 #ifdef CONFIG_64BIT 1027 return raw_atomic64_fetch_andnot_rele 1027 return raw_atomic64_fetch_andnot_release(i, v); 1028 #else 1028 #else 1029 return raw_atomic_fetch_andnot_releas 1029 return raw_atomic_fetch_andnot_release(i, v); 1030 #endif 1030 #endif 1031 } 1031 } 1032 1032 1033 /** 1033 /** 1034 * raw_atomic_long_fetch_andnot_relaxed() - a 1034 * raw_atomic_long_fetch_andnot_relaxed() - atomic bitwise AND NOT with relaxed ordering 1035 * @i: long value 1035 * @i: long value 1036 * @v: pointer to atomic_long_t 1036 * @v: pointer to atomic_long_t 1037 * 1037 * 1038 * Atomically updates @v to (@v & ~@i) with r 1038 * Atomically updates @v to (@v & ~@i) with relaxed ordering. 1039 * 1039 * 1040 * Safe to use in noinstr code; prefer atomic 1040 * Safe to use in noinstr code; prefer atomic_long_fetch_andnot_relaxed() elsewhere. 1041 * 1041 * 1042 * Return: The original value of @v. 1042 * Return: The original value of @v. 1043 */ 1043 */ 1044 static __always_inline long 1044 static __always_inline long 1045 raw_atomic_long_fetch_andnot_relaxed(long i, 1045 raw_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v) 1046 { 1046 { 1047 #ifdef CONFIG_64BIT 1047 #ifdef CONFIG_64BIT 1048 return raw_atomic64_fetch_andnot_rela 1048 return raw_atomic64_fetch_andnot_relaxed(i, v); 1049 #else 1049 #else 1050 return raw_atomic_fetch_andnot_relaxe 1050 return raw_atomic_fetch_andnot_relaxed(i, v); 1051 #endif 1051 #endif 1052 } 1052 } 1053 1053 1054 /** 1054 /** 1055 * raw_atomic_long_or() - atomic bitwise OR w 1055 * raw_atomic_long_or() - atomic bitwise OR with relaxed ordering 1056 * @i: long value 1056 * @i: long value 1057 * @v: pointer to atomic_long_t 1057 * @v: pointer to atomic_long_t 1058 * 1058 * 1059 * Atomically updates @v to (@v | @i) with re 1059 * Atomically updates @v to (@v | @i) with relaxed ordering. 1060 * 1060 * 1061 * Safe to use in noinstr code; prefer atomic 1061 * Safe to use in noinstr code; prefer atomic_long_or() elsewhere. 1062 * 1062 * 1063 * Return: Nothing. 1063 * Return: Nothing. 1064 */ 1064 */ 1065 static __always_inline void 1065 static __always_inline void 1066 raw_atomic_long_or(long i, atomic_long_t *v) 1066 raw_atomic_long_or(long i, atomic_long_t *v) 1067 { 1067 { 1068 #ifdef CONFIG_64BIT 1068 #ifdef CONFIG_64BIT 1069 raw_atomic64_or(i, v); 1069 raw_atomic64_or(i, v); 1070 #else 1070 #else 1071 raw_atomic_or(i, v); 1071 raw_atomic_or(i, v); 1072 #endif 1072 #endif 1073 } 1073 } 1074 1074 1075 /** 1075 /** 1076 * raw_atomic_long_fetch_or() - atomic bitwis 1076 * raw_atomic_long_fetch_or() - atomic bitwise OR with full ordering 1077 * @i: long value 1077 * @i: long value 1078 * @v: pointer to atomic_long_t 1078 * @v: pointer to atomic_long_t 1079 * 1079 * 1080 * Atomically updates @v to (@v | @i) with fu 1080 * Atomically updates @v to (@v | @i) with full ordering. 1081 * 1081 * 1082 * Safe to use in noinstr code; prefer atomic 1082 * Safe to use in noinstr code; prefer atomic_long_fetch_or() elsewhere. 1083 * 1083 * 1084 * Return: The original value of @v. 1084 * Return: The original value of @v. 1085 */ 1085 */ 1086 static __always_inline long 1086 static __always_inline long 1087 raw_atomic_long_fetch_or(long i, atomic_long_ 1087 raw_atomic_long_fetch_or(long i, atomic_long_t *v) 1088 { 1088 { 1089 #ifdef CONFIG_64BIT 1089 #ifdef CONFIG_64BIT 1090 return raw_atomic64_fetch_or(i, v); 1090 return raw_atomic64_fetch_or(i, v); 1091 #else 1091 #else 1092 return raw_atomic_fetch_or(i, v); 1092 return raw_atomic_fetch_or(i, v); 1093 #endif 1093 #endif 1094 } 1094 } 1095 1095 1096 /** 1096 /** 1097 * raw_atomic_long_fetch_or_acquire() - atomi 1097 * raw_atomic_long_fetch_or_acquire() - atomic bitwise OR with acquire ordering 1098 * @i: long value 1098 * @i: long value 1099 * @v: pointer to atomic_long_t 1099 * @v: pointer to atomic_long_t 1100 * 1100 * 1101 * Atomically updates @v to (@v | @i) with ac 1101 * Atomically updates @v to (@v | @i) with acquire ordering. 1102 * 1102 * 1103 * Safe to use in noinstr code; prefer atomic 1103 * Safe to use in noinstr code; prefer atomic_long_fetch_or_acquire() elsewhere. 1104 * 1104 * 1105 * Return: The original value of @v. 1105 * Return: The original value of @v. 1106 */ 1106 */ 1107 static __always_inline long 1107 static __always_inline long 1108 raw_atomic_long_fetch_or_acquire(long i, atom 1108 raw_atomic_long_fetch_or_acquire(long i, atomic_long_t *v) 1109 { 1109 { 1110 #ifdef CONFIG_64BIT 1110 #ifdef CONFIG_64BIT 1111 return raw_atomic64_fetch_or_acquire( 1111 return raw_atomic64_fetch_or_acquire(i, v); 1112 #else 1112 #else 1113 return raw_atomic_fetch_or_acquire(i, 1113 return raw_atomic_fetch_or_acquire(i, v); 1114 #endif 1114 #endif 1115 } 1115 } 1116 1116 1117 /** 1117 /** 1118 * raw_atomic_long_fetch_or_release() - atomi 1118 * raw_atomic_long_fetch_or_release() - atomic bitwise OR with release ordering 1119 * @i: long value 1119 * @i: long value 1120 * @v: pointer to atomic_long_t 1120 * @v: pointer to atomic_long_t 1121 * 1121 * 1122 * Atomically updates @v to (@v | @i) with re 1122 * Atomically updates @v to (@v | @i) with release ordering. 1123 * 1123 * 1124 * Safe to use in noinstr code; prefer atomic 1124 * Safe to use in noinstr code; prefer atomic_long_fetch_or_release() elsewhere. 1125 * 1125 * 1126 * Return: The original value of @v. 1126 * Return: The original value of @v. 1127 */ 1127 */ 1128 static __always_inline long 1128 static __always_inline long 1129 raw_atomic_long_fetch_or_release(long i, atom 1129 raw_atomic_long_fetch_or_release(long i, atomic_long_t *v) 1130 { 1130 { 1131 #ifdef CONFIG_64BIT 1131 #ifdef CONFIG_64BIT 1132 return raw_atomic64_fetch_or_release( 1132 return raw_atomic64_fetch_or_release(i, v); 1133 #else 1133 #else 1134 return raw_atomic_fetch_or_release(i, 1134 return raw_atomic_fetch_or_release(i, v); 1135 #endif 1135 #endif 1136 } 1136 } 1137 1137 1138 /** 1138 /** 1139 * raw_atomic_long_fetch_or_relaxed() - atomi 1139 * raw_atomic_long_fetch_or_relaxed() - atomic bitwise OR with relaxed ordering 1140 * @i: long value 1140 * @i: long value 1141 * @v: pointer to atomic_long_t 1141 * @v: pointer to atomic_long_t 1142 * 1142 * 1143 * Atomically updates @v to (@v | @i) with re 1143 * Atomically updates @v to (@v | @i) with relaxed ordering. 1144 * 1144 * 1145 * Safe to use in noinstr code; prefer atomic 1145 * Safe to use in noinstr code; prefer atomic_long_fetch_or_relaxed() elsewhere. 1146 * 1146 * 1147 * Return: The original value of @v. 1147 * Return: The original value of @v. 1148 */ 1148 */ 1149 static __always_inline long 1149 static __always_inline long 1150 raw_atomic_long_fetch_or_relaxed(long i, atom 1150 raw_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v) 1151 { 1151 { 1152 #ifdef CONFIG_64BIT 1152 #ifdef CONFIG_64BIT 1153 return raw_atomic64_fetch_or_relaxed( 1153 return raw_atomic64_fetch_or_relaxed(i, v); 1154 #else 1154 #else 1155 return raw_atomic_fetch_or_relaxed(i, 1155 return raw_atomic_fetch_or_relaxed(i, v); 1156 #endif 1156 #endif 1157 } 1157 } 1158 1158 1159 /** 1159 /** 1160 * raw_atomic_long_xor() - atomic bitwise XOR 1160 * raw_atomic_long_xor() - atomic bitwise XOR with relaxed ordering 1161 * @i: long value 1161 * @i: long value 1162 * @v: pointer to atomic_long_t 1162 * @v: pointer to atomic_long_t 1163 * 1163 * 1164 * Atomically updates @v to (@v ^ @i) with re 1164 * Atomically updates @v to (@v ^ @i) with relaxed ordering. 1165 * 1165 * 1166 * Safe to use in noinstr code; prefer atomic 1166 * Safe to use in noinstr code; prefer atomic_long_xor() elsewhere. 1167 * 1167 * 1168 * Return: Nothing. 1168 * Return: Nothing. 1169 */ 1169 */ 1170 static __always_inline void 1170 static __always_inline void 1171 raw_atomic_long_xor(long i, atomic_long_t *v) 1171 raw_atomic_long_xor(long i, atomic_long_t *v) 1172 { 1172 { 1173 #ifdef CONFIG_64BIT 1173 #ifdef CONFIG_64BIT 1174 raw_atomic64_xor(i, v); 1174 raw_atomic64_xor(i, v); 1175 #else 1175 #else 1176 raw_atomic_xor(i, v); 1176 raw_atomic_xor(i, v); 1177 #endif 1177 #endif 1178 } 1178 } 1179 1179 1180 /** 1180 /** 1181 * raw_atomic_long_fetch_xor() - atomic bitwi 1181 * raw_atomic_long_fetch_xor() - atomic bitwise XOR with full ordering 1182 * @i: long value 1182 * @i: long value 1183 * @v: pointer to atomic_long_t 1183 * @v: pointer to atomic_long_t 1184 * 1184 * 1185 * Atomically updates @v to (@v ^ @i) with fu 1185 * Atomically updates @v to (@v ^ @i) with full ordering. 1186 * 1186 * 1187 * Safe to use in noinstr code; prefer atomic 1187 * Safe to use in noinstr code; prefer atomic_long_fetch_xor() elsewhere. 1188 * 1188 * 1189 * Return: The original value of @v. 1189 * Return: The original value of @v. 1190 */ 1190 */ 1191 static __always_inline long 1191 static __always_inline long 1192 raw_atomic_long_fetch_xor(long i, atomic_long 1192 raw_atomic_long_fetch_xor(long i, atomic_long_t *v) 1193 { 1193 { 1194 #ifdef CONFIG_64BIT 1194 #ifdef CONFIG_64BIT 1195 return raw_atomic64_fetch_xor(i, v); 1195 return raw_atomic64_fetch_xor(i, v); 1196 #else 1196 #else 1197 return raw_atomic_fetch_xor(i, v); 1197 return raw_atomic_fetch_xor(i, v); 1198 #endif 1198 #endif 1199 } 1199 } 1200 1200 1201 /** 1201 /** 1202 * raw_atomic_long_fetch_xor_acquire() - atom 1202 * raw_atomic_long_fetch_xor_acquire() - atomic bitwise XOR with acquire ordering 1203 * @i: long value 1203 * @i: long value 1204 * @v: pointer to atomic_long_t 1204 * @v: pointer to atomic_long_t 1205 * 1205 * 1206 * Atomically updates @v to (@v ^ @i) with ac 1206 * Atomically updates @v to (@v ^ @i) with acquire ordering. 1207 * 1207 * 1208 * Safe to use in noinstr code; prefer atomic 1208 * Safe to use in noinstr code; prefer atomic_long_fetch_xor_acquire() elsewhere. 1209 * 1209 * 1210 * Return: The original value of @v. 1210 * Return: The original value of @v. 1211 */ 1211 */ 1212 static __always_inline long 1212 static __always_inline long 1213 raw_atomic_long_fetch_xor_acquire(long i, ato 1213 raw_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v) 1214 { 1214 { 1215 #ifdef CONFIG_64BIT 1215 #ifdef CONFIG_64BIT 1216 return raw_atomic64_fetch_xor_acquire 1216 return raw_atomic64_fetch_xor_acquire(i, v); 1217 #else 1217 #else 1218 return raw_atomic_fetch_xor_acquire(i 1218 return raw_atomic_fetch_xor_acquire(i, v); 1219 #endif 1219 #endif 1220 } 1220 } 1221 1221 1222 /** 1222 /** 1223 * raw_atomic_long_fetch_xor_release() - atom 1223 * raw_atomic_long_fetch_xor_release() - atomic bitwise XOR with release ordering 1224 * @i: long value 1224 * @i: long value 1225 * @v: pointer to atomic_long_t 1225 * @v: pointer to atomic_long_t 1226 * 1226 * 1227 * Atomically updates @v to (@v ^ @i) with re 1227 * Atomically updates @v to (@v ^ @i) with release ordering. 1228 * 1228 * 1229 * Safe to use in noinstr code; prefer atomic 1229 * Safe to use in noinstr code; prefer atomic_long_fetch_xor_release() elsewhere. 1230 * 1230 * 1231 * Return: The original value of @v. 1231 * Return: The original value of @v. 1232 */ 1232 */ 1233 static __always_inline long 1233 static __always_inline long 1234 raw_atomic_long_fetch_xor_release(long i, ato 1234 raw_atomic_long_fetch_xor_release(long i, atomic_long_t *v) 1235 { 1235 { 1236 #ifdef CONFIG_64BIT 1236 #ifdef CONFIG_64BIT 1237 return raw_atomic64_fetch_xor_release 1237 return raw_atomic64_fetch_xor_release(i, v); 1238 #else 1238 #else 1239 return raw_atomic_fetch_xor_release(i 1239 return raw_atomic_fetch_xor_release(i, v); 1240 #endif 1240 #endif 1241 } 1241 } 1242 1242 1243 /** 1243 /** 1244 * raw_atomic_long_fetch_xor_relaxed() - atom 1244 * raw_atomic_long_fetch_xor_relaxed() - atomic bitwise XOR with relaxed ordering 1245 * @i: long value 1245 * @i: long value 1246 * @v: pointer to atomic_long_t 1246 * @v: pointer to atomic_long_t 1247 * 1247 * 1248 * Atomically updates @v to (@v ^ @i) with re 1248 * Atomically updates @v to (@v ^ @i) with relaxed ordering. 1249 * 1249 * 1250 * Safe to use in noinstr code; prefer atomic 1250 * Safe to use in noinstr code; prefer atomic_long_fetch_xor_relaxed() elsewhere. 1251 * 1251 * 1252 * Return: The original value of @v. 1252 * Return: The original value of @v. 1253 */ 1253 */ 1254 static __always_inline long 1254 static __always_inline long 1255 raw_atomic_long_fetch_xor_relaxed(long i, ato 1255 raw_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v) 1256 { 1256 { 1257 #ifdef CONFIG_64BIT 1257 #ifdef CONFIG_64BIT 1258 return raw_atomic64_fetch_xor_relaxed 1258 return raw_atomic64_fetch_xor_relaxed(i, v); 1259 #else 1259 #else 1260 return raw_atomic_fetch_xor_relaxed(i 1260 return raw_atomic_fetch_xor_relaxed(i, v); 1261 #endif 1261 #endif 1262 } 1262 } 1263 1263 1264 /** 1264 /** 1265 * raw_atomic_long_xchg() - atomic exchange w 1265 * raw_atomic_long_xchg() - atomic exchange with full ordering 1266 * @v: pointer to atomic_long_t 1266 * @v: pointer to atomic_long_t 1267 * @new: long value to assign 1267 * @new: long value to assign 1268 * 1268 * 1269 * Atomically updates @v to @new with full or 1269 * Atomically updates @v to @new with full ordering. 1270 * 1270 * 1271 * Safe to use in noinstr code; prefer atomic 1271 * Safe to use in noinstr code; prefer atomic_long_xchg() elsewhere. 1272 * 1272 * 1273 * Return: The original value of @v. 1273 * Return: The original value of @v. 1274 */ 1274 */ 1275 static __always_inline long 1275 static __always_inline long 1276 raw_atomic_long_xchg(atomic_long_t *v, long n 1276 raw_atomic_long_xchg(atomic_long_t *v, long new) 1277 { 1277 { 1278 #ifdef CONFIG_64BIT 1278 #ifdef CONFIG_64BIT 1279 return raw_atomic64_xchg(v, new); 1279 return raw_atomic64_xchg(v, new); 1280 #else 1280 #else 1281 return raw_atomic_xchg(v, new); 1281 return raw_atomic_xchg(v, new); 1282 #endif 1282 #endif 1283 } 1283 } 1284 1284 1285 /** 1285 /** 1286 * raw_atomic_long_xchg_acquire() - atomic ex 1286 * raw_atomic_long_xchg_acquire() - atomic exchange with acquire ordering 1287 * @v: pointer to atomic_long_t 1287 * @v: pointer to atomic_long_t 1288 * @new: long value to assign 1288 * @new: long value to assign 1289 * 1289 * 1290 * Atomically updates @v to @new with acquire 1290 * Atomically updates @v to @new with acquire ordering. 1291 * 1291 * 1292 * Safe to use in noinstr code; prefer atomic 1292 * Safe to use in noinstr code; prefer atomic_long_xchg_acquire() elsewhere. 1293 * 1293 * 1294 * Return: The original value of @v. 1294 * Return: The original value of @v. 1295 */ 1295 */ 1296 static __always_inline long 1296 static __always_inline long 1297 raw_atomic_long_xchg_acquire(atomic_long_t *v 1297 raw_atomic_long_xchg_acquire(atomic_long_t *v, long new) 1298 { 1298 { 1299 #ifdef CONFIG_64BIT 1299 #ifdef CONFIG_64BIT 1300 return raw_atomic64_xchg_acquire(v, n 1300 return raw_atomic64_xchg_acquire(v, new); 1301 #else 1301 #else 1302 return raw_atomic_xchg_acquire(v, new 1302 return raw_atomic_xchg_acquire(v, new); 1303 #endif 1303 #endif 1304 } 1304 } 1305 1305 1306 /** 1306 /** 1307 * raw_atomic_long_xchg_release() - atomic ex 1307 * raw_atomic_long_xchg_release() - atomic exchange with release ordering 1308 * @v: pointer to atomic_long_t 1308 * @v: pointer to atomic_long_t 1309 * @new: long value to assign 1309 * @new: long value to assign 1310 * 1310 * 1311 * Atomically updates @v to @new with release 1311 * Atomically updates @v to @new with release ordering. 1312 * 1312 * 1313 * Safe to use in noinstr code; prefer atomic 1313 * Safe to use in noinstr code; prefer atomic_long_xchg_release() elsewhere. 1314 * 1314 * 1315 * Return: The original value of @v. 1315 * Return: The original value of @v. 1316 */ 1316 */ 1317 static __always_inline long 1317 static __always_inline long 1318 raw_atomic_long_xchg_release(atomic_long_t *v 1318 raw_atomic_long_xchg_release(atomic_long_t *v, long new) 1319 { 1319 { 1320 #ifdef CONFIG_64BIT 1320 #ifdef CONFIG_64BIT 1321 return raw_atomic64_xchg_release(v, n 1321 return raw_atomic64_xchg_release(v, new); 1322 #else 1322 #else 1323 return raw_atomic_xchg_release(v, new 1323 return raw_atomic_xchg_release(v, new); 1324 #endif 1324 #endif 1325 } 1325 } 1326 1326 1327 /** 1327 /** 1328 * raw_atomic_long_xchg_relaxed() - atomic ex 1328 * raw_atomic_long_xchg_relaxed() - atomic exchange with relaxed ordering 1329 * @v: pointer to atomic_long_t 1329 * @v: pointer to atomic_long_t 1330 * @new: long value to assign 1330 * @new: long value to assign 1331 * 1331 * 1332 * Atomically updates @v to @new with relaxed 1332 * Atomically updates @v to @new with relaxed ordering. 1333 * 1333 * 1334 * Safe to use in noinstr code; prefer atomic 1334 * Safe to use in noinstr code; prefer atomic_long_xchg_relaxed() elsewhere. 1335 * 1335 * 1336 * Return: The original value of @v. 1336 * Return: The original value of @v. 1337 */ 1337 */ 1338 static __always_inline long 1338 static __always_inline long 1339 raw_atomic_long_xchg_relaxed(atomic_long_t *v 1339 raw_atomic_long_xchg_relaxed(atomic_long_t *v, long new) 1340 { 1340 { 1341 #ifdef CONFIG_64BIT 1341 #ifdef CONFIG_64BIT 1342 return raw_atomic64_xchg_relaxed(v, n 1342 return raw_atomic64_xchg_relaxed(v, new); 1343 #else 1343 #else 1344 return raw_atomic_xchg_relaxed(v, new 1344 return raw_atomic_xchg_relaxed(v, new); 1345 #endif 1345 #endif 1346 } 1346 } 1347 1347 1348 /** 1348 /** 1349 * raw_atomic_long_cmpxchg() - atomic compare 1349 * raw_atomic_long_cmpxchg() - atomic compare and exchange with full ordering 1350 * @v: pointer to atomic_long_t 1350 * @v: pointer to atomic_long_t 1351 * @old: long value to compare with 1351 * @old: long value to compare with 1352 * @new: long value to assign 1352 * @new: long value to assign 1353 * 1353 * 1354 * If (@v == @old), atomically updates @v to 1354 * If (@v == @old), atomically updates @v to @new with full ordering. 1355 * Otherwise, @v is not modified and relaxed 1355 * Otherwise, @v is not modified and relaxed ordering is provided. 1356 * 1356 * 1357 * Safe to use in noinstr code; prefer atomic 1357 * Safe to use in noinstr code; prefer atomic_long_cmpxchg() elsewhere. 1358 * 1358 * 1359 * Return: The original value of @v. 1359 * Return: The original value of @v. 1360 */ 1360 */ 1361 static __always_inline long 1361 static __always_inline long 1362 raw_atomic_long_cmpxchg(atomic_long_t *v, lon 1362 raw_atomic_long_cmpxchg(atomic_long_t *v, long old, long new) 1363 { 1363 { 1364 #ifdef CONFIG_64BIT 1364 #ifdef CONFIG_64BIT 1365 return raw_atomic64_cmpxchg(v, old, n 1365 return raw_atomic64_cmpxchg(v, old, new); 1366 #else 1366 #else 1367 return raw_atomic_cmpxchg(v, old, new 1367 return raw_atomic_cmpxchg(v, old, new); 1368 #endif 1368 #endif 1369 } 1369 } 1370 1370 1371 /** 1371 /** 1372 * raw_atomic_long_cmpxchg_acquire() - atomic 1372 * raw_atomic_long_cmpxchg_acquire() - atomic compare and exchange with acquire ordering 1373 * @v: pointer to atomic_long_t 1373 * @v: pointer to atomic_long_t 1374 * @old: long value to compare with 1374 * @old: long value to compare with 1375 * @new: long value to assign 1375 * @new: long value to assign 1376 * 1376 * 1377 * If (@v == @old), atomically updates @v to 1377 * If (@v == @old), atomically updates @v to @new with acquire ordering. 1378 * Otherwise, @v is not modified and relaxed 1378 * Otherwise, @v is not modified and relaxed ordering is provided. 1379 * 1379 * 1380 * Safe to use in noinstr code; prefer atomic 1380 * Safe to use in noinstr code; prefer atomic_long_cmpxchg_acquire() elsewhere. 1381 * 1381 * 1382 * Return: The original value of @v. 1382 * Return: The original value of @v. 1383 */ 1383 */ 1384 static __always_inline long 1384 static __always_inline long 1385 raw_atomic_long_cmpxchg_acquire(atomic_long_t 1385 raw_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new) 1386 { 1386 { 1387 #ifdef CONFIG_64BIT 1387 #ifdef CONFIG_64BIT 1388 return raw_atomic64_cmpxchg_acquire(v 1388 return raw_atomic64_cmpxchg_acquire(v, old, new); 1389 #else 1389 #else 1390 return raw_atomic_cmpxchg_acquire(v, 1390 return raw_atomic_cmpxchg_acquire(v, old, new); 1391 #endif 1391 #endif 1392 } 1392 } 1393 1393 1394 /** 1394 /** 1395 * raw_atomic_long_cmpxchg_release() - atomic 1395 * raw_atomic_long_cmpxchg_release() - atomic compare and exchange with release ordering 1396 * @v: pointer to atomic_long_t 1396 * @v: pointer to atomic_long_t 1397 * @old: long value to compare with 1397 * @old: long value to compare with 1398 * @new: long value to assign 1398 * @new: long value to assign 1399 * 1399 * 1400 * If (@v == @old), atomically updates @v to 1400 * If (@v == @old), atomically updates @v to @new with release ordering. 1401 * Otherwise, @v is not modified and relaxed 1401 * Otherwise, @v is not modified and relaxed ordering is provided. 1402 * 1402 * 1403 * Safe to use in noinstr code; prefer atomic 1403 * Safe to use in noinstr code; prefer atomic_long_cmpxchg_release() elsewhere. 1404 * 1404 * 1405 * Return: The original value of @v. 1405 * Return: The original value of @v. 1406 */ 1406 */ 1407 static __always_inline long 1407 static __always_inline long 1408 raw_atomic_long_cmpxchg_release(atomic_long_t 1408 raw_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new) 1409 { 1409 { 1410 #ifdef CONFIG_64BIT 1410 #ifdef CONFIG_64BIT 1411 return raw_atomic64_cmpxchg_release(v 1411 return raw_atomic64_cmpxchg_release(v, old, new); 1412 #else 1412 #else 1413 return raw_atomic_cmpxchg_release(v, 1413 return raw_atomic_cmpxchg_release(v, old, new); 1414 #endif 1414 #endif 1415 } 1415 } 1416 1416 1417 /** 1417 /** 1418 * raw_atomic_long_cmpxchg_relaxed() - atomic 1418 * raw_atomic_long_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering 1419 * @v: pointer to atomic_long_t 1419 * @v: pointer to atomic_long_t 1420 * @old: long value to compare with 1420 * @old: long value to compare with 1421 * @new: long value to assign 1421 * @new: long value to assign 1422 * 1422 * 1423 * If (@v == @old), atomically updates @v to 1423 * If (@v == @old), atomically updates @v to @new with relaxed ordering. 1424 * Otherwise, @v is not modified and relaxed 1424 * Otherwise, @v is not modified and relaxed ordering is provided. 1425 * 1425 * 1426 * Safe to use in noinstr code; prefer atomic 1426 * Safe to use in noinstr code; prefer atomic_long_cmpxchg_relaxed() elsewhere. 1427 * 1427 * 1428 * Return: The original value of @v. 1428 * Return: The original value of @v. 1429 */ 1429 */ 1430 static __always_inline long 1430 static __always_inline long 1431 raw_atomic_long_cmpxchg_relaxed(atomic_long_t 1431 raw_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new) 1432 { 1432 { 1433 #ifdef CONFIG_64BIT 1433 #ifdef CONFIG_64BIT 1434 return raw_atomic64_cmpxchg_relaxed(v 1434 return raw_atomic64_cmpxchg_relaxed(v, old, new); 1435 #else 1435 #else 1436 return raw_atomic_cmpxchg_relaxed(v, 1436 return raw_atomic_cmpxchg_relaxed(v, old, new); 1437 #endif 1437 #endif 1438 } 1438 } 1439 1439 1440 /** 1440 /** 1441 * raw_atomic_long_try_cmpxchg() - atomic com 1441 * raw_atomic_long_try_cmpxchg() - atomic compare and exchange with full ordering 1442 * @v: pointer to atomic_long_t 1442 * @v: pointer to atomic_long_t 1443 * @old: pointer to long value to compare wit 1443 * @old: pointer to long value to compare with 1444 * @new: long value to assign 1444 * @new: long value to assign 1445 * 1445 * 1446 * If (@v == @old), atomically updates @v to 1446 * If (@v == @old), atomically updates @v to @new with full ordering. 1447 * Otherwise, @v is not modified, @old is upd 1447 * Otherwise, @v is not modified, @old is updated to the current value of @v, 1448 * and relaxed ordering is provided. 1448 * and relaxed ordering is provided. 1449 * 1449 * 1450 * Safe to use in noinstr code; prefer atomic 1450 * Safe to use in noinstr code; prefer atomic_long_try_cmpxchg() elsewhere. 1451 * 1451 * 1452 * Return: @true if the exchange occured, @fa 1452 * Return: @true if the exchange occured, @false otherwise. 1453 */ 1453 */ 1454 static __always_inline bool 1454 static __always_inline bool 1455 raw_atomic_long_try_cmpxchg(atomic_long_t *v, 1455 raw_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new) 1456 { 1456 { 1457 #ifdef CONFIG_64BIT 1457 #ifdef CONFIG_64BIT 1458 return raw_atomic64_try_cmpxchg(v, (s 1458 return raw_atomic64_try_cmpxchg(v, (s64 *)old, new); 1459 #else 1459 #else 1460 return raw_atomic_try_cmpxchg(v, (int 1460 return raw_atomic_try_cmpxchg(v, (int *)old, new); 1461 #endif 1461 #endif 1462 } 1462 } 1463 1463 1464 /** 1464 /** 1465 * raw_atomic_long_try_cmpxchg_acquire() - at 1465 * raw_atomic_long_try_cmpxchg_acquire() - atomic compare and exchange with acquire ordering 1466 * @v: pointer to atomic_long_t 1466 * @v: pointer to atomic_long_t 1467 * @old: pointer to long value to compare wit 1467 * @old: pointer to long value to compare with 1468 * @new: long value to assign 1468 * @new: long value to assign 1469 * 1469 * 1470 * If (@v == @old), atomically updates @v to 1470 * If (@v == @old), atomically updates @v to @new with acquire ordering. 1471 * Otherwise, @v is not modified, @old is upd 1471 * Otherwise, @v is not modified, @old is updated to the current value of @v, 1472 * and relaxed ordering is provided. 1472 * and relaxed ordering is provided. 1473 * 1473 * 1474 * Safe to use in noinstr code; prefer atomic 1474 * Safe to use in noinstr code; prefer atomic_long_try_cmpxchg_acquire() elsewhere. 1475 * 1475 * 1476 * Return: @true if the exchange occured, @fa 1476 * Return: @true if the exchange occured, @false otherwise. 1477 */ 1477 */ 1478 static __always_inline bool 1478 static __always_inline bool 1479 raw_atomic_long_try_cmpxchg_acquire(atomic_lo 1479 raw_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new) 1480 { 1480 { 1481 #ifdef CONFIG_64BIT 1481 #ifdef CONFIG_64BIT 1482 return raw_atomic64_try_cmpxchg_acqui 1482 return raw_atomic64_try_cmpxchg_acquire(v, (s64 *)old, new); 1483 #else 1483 #else 1484 return raw_atomic_try_cmpxchg_acquire 1484 return raw_atomic_try_cmpxchg_acquire(v, (int *)old, new); 1485 #endif 1485 #endif 1486 } 1486 } 1487 1487 1488 /** 1488 /** 1489 * raw_atomic_long_try_cmpxchg_release() - at 1489 * raw_atomic_long_try_cmpxchg_release() - atomic compare and exchange with release ordering 1490 * @v: pointer to atomic_long_t 1490 * @v: pointer to atomic_long_t 1491 * @old: pointer to long value to compare wit 1491 * @old: pointer to long value to compare with 1492 * @new: long value to assign 1492 * @new: long value to assign 1493 * 1493 * 1494 * If (@v == @old), atomically updates @v to 1494 * If (@v == @old), atomically updates @v to @new with release ordering. 1495 * Otherwise, @v is not modified, @old is upd 1495 * Otherwise, @v is not modified, @old is updated to the current value of @v, 1496 * and relaxed ordering is provided. 1496 * and relaxed ordering is provided. 1497 * 1497 * 1498 * Safe to use in noinstr code; prefer atomic 1498 * Safe to use in noinstr code; prefer atomic_long_try_cmpxchg_release() elsewhere. 1499 * 1499 * 1500 * Return: @true if the exchange occured, @fa 1500 * Return: @true if the exchange occured, @false otherwise. 1501 */ 1501 */ 1502 static __always_inline bool 1502 static __always_inline bool 1503 raw_atomic_long_try_cmpxchg_release(atomic_lo 1503 raw_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new) 1504 { 1504 { 1505 #ifdef CONFIG_64BIT 1505 #ifdef CONFIG_64BIT 1506 return raw_atomic64_try_cmpxchg_relea 1506 return raw_atomic64_try_cmpxchg_release(v, (s64 *)old, new); 1507 #else 1507 #else 1508 return raw_atomic_try_cmpxchg_release 1508 return raw_atomic_try_cmpxchg_release(v, (int *)old, new); 1509 #endif 1509 #endif 1510 } 1510 } 1511 1511 1512 /** 1512 /** 1513 * raw_atomic_long_try_cmpxchg_relaxed() - at 1513 * raw_atomic_long_try_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering 1514 * @v: pointer to atomic_long_t 1514 * @v: pointer to atomic_long_t 1515 * @old: pointer to long value to compare wit 1515 * @old: pointer to long value to compare with 1516 * @new: long value to assign 1516 * @new: long value to assign 1517 * 1517 * 1518 * If (@v == @old), atomically updates @v to 1518 * If (@v == @old), atomically updates @v to @new with relaxed ordering. 1519 * Otherwise, @v is not modified, @old is upd 1519 * Otherwise, @v is not modified, @old is updated to the current value of @v, 1520 * and relaxed ordering is provided. 1520 * and relaxed ordering is provided. 1521 * 1521 * 1522 * Safe to use in noinstr code; prefer atomic 1522 * Safe to use in noinstr code; prefer atomic_long_try_cmpxchg_relaxed() elsewhere. 1523 * 1523 * 1524 * Return: @true if the exchange occured, @fa 1524 * Return: @true if the exchange occured, @false otherwise. 1525 */ 1525 */ 1526 static __always_inline bool 1526 static __always_inline bool 1527 raw_atomic_long_try_cmpxchg_relaxed(atomic_lo 1527 raw_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new) 1528 { 1528 { 1529 #ifdef CONFIG_64BIT 1529 #ifdef CONFIG_64BIT 1530 return raw_atomic64_try_cmpxchg_relax 1530 return raw_atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new); 1531 #else 1531 #else 1532 return raw_atomic_try_cmpxchg_relaxed 1532 return raw_atomic_try_cmpxchg_relaxed(v, (int *)old, new); 1533 #endif 1533 #endif 1534 } 1534 } 1535 1535 1536 /** 1536 /** 1537 * raw_atomic_long_sub_and_test() - atomic su 1537 * raw_atomic_long_sub_and_test() - atomic subtract and test if zero with full ordering 1538 * @i: long value to subtract 1538 * @i: long value to subtract 1539 * @v: pointer to atomic_long_t 1539 * @v: pointer to atomic_long_t 1540 * 1540 * 1541 * Atomically updates @v to (@v - @i) with fu 1541 * Atomically updates @v to (@v - @i) with full ordering. 1542 * 1542 * 1543 * Safe to use in noinstr code; prefer atomic 1543 * Safe to use in noinstr code; prefer atomic_long_sub_and_test() elsewhere. 1544 * 1544 * 1545 * Return: @true if the resulting value of @v 1545 * Return: @true if the resulting value of @v is zero, @false otherwise. 1546 */ 1546 */ 1547 static __always_inline bool 1547 static __always_inline bool 1548 raw_atomic_long_sub_and_test(long i, atomic_l 1548 raw_atomic_long_sub_and_test(long i, atomic_long_t *v) 1549 { 1549 { 1550 #ifdef CONFIG_64BIT 1550 #ifdef CONFIG_64BIT 1551 return raw_atomic64_sub_and_test(i, v 1551 return raw_atomic64_sub_and_test(i, v); 1552 #else 1552 #else 1553 return raw_atomic_sub_and_test(i, v); 1553 return raw_atomic_sub_and_test(i, v); 1554 #endif 1554 #endif 1555 } 1555 } 1556 1556 1557 /** 1557 /** 1558 * raw_atomic_long_dec_and_test() - atomic de 1558 * raw_atomic_long_dec_and_test() - atomic decrement and test if zero with full ordering 1559 * @v: pointer to atomic_long_t 1559 * @v: pointer to atomic_long_t 1560 * 1560 * 1561 * Atomically updates @v to (@v - 1) with ful 1561 * Atomically updates @v to (@v - 1) with full ordering. 1562 * 1562 * 1563 * Safe to use in noinstr code; prefer atomic 1563 * Safe to use in noinstr code; prefer atomic_long_dec_and_test() elsewhere. 1564 * 1564 * 1565 * Return: @true if the resulting value of @v 1565 * Return: @true if the resulting value of @v is zero, @false otherwise. 1566 */ 1566 */ 1567 static __always_inline bool 1567 static __always_inline bool 1568 raw_atomic_long_dec_and_test(atomic_long_t *v 1568 raw_atomic_long_dec_and_test(atomic_long_t *v) 1569 { 1569 { 1570 #ifdef CONFIG_64BIT 1570 #ifdef CONFIG_64BIT 1571 return raw_atomic64_dec_and_test(v); 1571 return raw_atomic64_dec_and_test(v); 1572 #else 1572 #else 1573 return raw_atomic_dec_and_test(v); 1573 return raw_atomic_dec_and_test(v); 1574 #endif 1574 #endif 1575 } 1575 } 1576 1576 1577 /** 1577 /** 1578 * raw_atomic_long_inc_and_test() - atomic in 1578 * raw_atomic_long_inc_and_test() - atomic increment and test if zero with full ordering 1579 * @v: pointer to atomic_long_t 1579 * @v: pointer to atomic_long_t 1580 * 1580 * 1581 * Atomically updates @v to (@v + 1) with ful 1581 * Atomically updates @v to (@v + 1) with full ordering. 1582 * 1582 * 1583 * Safe to use in noinstr code; prefer atomic 1583 * Safe to use in noinstr code; prefer atomic_long_inc_and_test() elsewhere. 1584 * 1584 * 1585 * Return: @true if the resulting value of @v 1585 * Return: @true if the resulting value of @v is zero, @false otherwise. 1586 */ 1586 */ 1587 static __always_inline bool 1587 static __always_inline bool 1588 raw_atomic_long_inc_and_test(atomic_long_t *v 1588 raw_atomic_long_inc_and_test(atomic_long_t *v) 1589 { 1589 { 1590 #ifdef CONFIG_64BIT 1590 #ifdef CONFIG_64BIT 1591 return raw_atomic64_inc_and_test(v); 1591 return raw_atomic64_inc_and_test(v); 1592 #else 1592 #else 1593 return raw_atomic_inc_and_test(v); 1593 return raw_atomic_inc_and_test(v); 1594 #endif 1594 #endif 1595 } 1595 } 1596 1596 1597 /** 1597 /** 1598 * raw_atomic_long_add_negative() - atomic ad 1598 * raw_atomic_long_add_negative() - atomic add and test if negative with full ordering 1599 * @i: long value to add 1599 * @i: long value to add 1600 * @v: pointer to atomic_long_t 1600 * @v: pointer to atomic_long_t 1601 * 1601 * 1602 * Atomically updates @v to (@v + @i) with fu 1602 * Atomically updates @v to (@v + @i) with full ordering. 1603 * 1603 * 1604 * Safe to use in noinstr code; prefer atomic 1604 * Safe to use in noinstr code; prefer atomic_long_add_negative() elsewhere. 1605 * 1605 * 1606 * Return: @true if the resulting value of @v 1606 * Return: @true if the resulting value of @v is negative, @false otherwise. 1607 */ 1607 */ 1608 static __always_inline bool 1608 static __always_inline bool 1609 raw_atomic_long_add_negative(long i, atomic_l 1609 raw_atomic_long_add_negative(long i, atomic_long_t *v) 1610 { 1610 { 1611 #ifdef CONFIG_64BIT 1611 #ifdef CONFIG_64BIT 1612 return raw_atomic64_add_negative(i, v 1612 return raw_atomic64_add_negative(i, v); 1613 #else 1613 #else 1614 return raw_atomic_add_negative(i, v); 1614 return raw_atomic_add_negative(i, v); 1615 #endif 1615 #endif 1616 } 1616 } 1617 1617 1618 /** 1618 /** 1619 * raw_atomic_long_add_negative_acquire() - a 1619 * raw_atomic_long_add_negative_acquire() - atomic add and test if negative with acquire ordering 1620 * @i: long value to add 1620 * @i: long value to add 1621 * @v: pointer to atomic_long_t 1621 * @v: pointer to atomic_long_t 1622 * 1622 * 1623 * Atomically updates @v to (@v + @i) with ac 1623 * Atomically updates @v to (@v + @i) with acquire ordering. 1624 * 1624 * 1625 * Safe to use in noinstr code; prefer atomic 1625 * Safe to use in noinstr code; prefer atomic_long_add_negative_acquire() elsewhere. 1626 * 1626 * 1627 * Return: @true if the resulting value of @v 1627 * Return: @true if the resulting value of @v is negative, @false otherwise. 1628 */ 1628 */ 1629 static __always_inline bool 1629 static __always_inline bool 1630 raw_atomic_long_add_negative_acquire(long i, 1630 raw_atomic_long_add_negative_acquire(long i, atomic_long_t *v) 1631 { 1631 { 1632 #ifdef CONFIG_64BIT 1632 #ifdef CONFIG_64BIT 1633 return raw_atomic64_add_negative_acqu 1633 return raw_atomic64_add_negative_acquire(i, v); 1634 #else 1634 #else 1635 return raw_atomic_add_negative_acquir 1635 return raw_atomic_add_negative_acquire(i, v); 1636 #endif 1636 #endif 1637 } 1637 } 1638 1638 1639 /** 1639 /** 1640 * raw_atomic_long_add_negative_release() - a 1640 * raw_atomic_long_add_negative_release() - atomic add and test if negative with release ordering 1641 * @i: long value to add 1641 * @i: long value to add 1642 * @v: pointer to atomic_long_t 1642 * @v: pointer to atomic_long_t 1643 * 1643 * 1644 * Atomically updates @v to (@v + @i) with re 1644 * Atomically updates @v to (@v + @i) with release ordering. 1645 * 1645 * 1646 * Safe to use in noinstr code; prefer atomic 1646 * Safe to use in noinstr code; prefer atomic_long_add_negative_release() elsewhere. 1647 * 1647 * 1648 * Return: @true if the resulting value of @v 1648 * Return: @true if the resulting value of @v is negative, @false otherwise. 1649 */ 1649 */ 1650 static __always_inline bool 1650 static __always_inline bool 1651 raw_atomic_long_add_negative_release(long i, 1651 raw_atomic_long_add_negative_release(long i, atomic_long_t *v) 1652 { 1652 { 1653 #ifdef CONFIG_64BIT 1653 #ifdef CONFIG_64BIT 1654 return raw_atomic64_add_negative_rele 1654 return raw_atomic64_add_negative_release(i, v); 1655 #else 1655 #else 1656 return raw_atomic_add_negative_releas 1656 return raw_atomic_add_negative_release(i, v); 1657 #endif 1657 #endif 1658 } 1658 } 1659 1659 1660 /** 1660 /** 1661 * raw_atomic_long_add_negative_relaxed() - a 1661 * raw_atomic_long_add_negative_relaxed() - atomic add and test if negative with relaxed ordering 1662 * @i: long value to add 1662 * @i: long value to add 1663 * @v: pointer to atomic_long_t 1663 * @v: pointer to atomic_long_t 1664 * 1664 * 1665 * Atomically updates @v to (@v + @i) with re 1665 * Atomically updates @v to (@v + @i) with relaxed ordering. 1666 * 1666 * 1667 * Safe to use in noinstr code; prefer atomic 1667 * Safe to use in noinstr code; prefer atomic_long_add_negative_relaxed() elsewhere. 1668 * 1668 * 1669 * Return: @true if the resulting value of @v 1669 * Return: @true if the resulting value of @v is negative, @false otherwise. 1670 */ 1670 */ 1671 static __always_inline bool 1671 static __always_inline bool 1672 raw_atomic_long_add_negative_relaxed(long i, 1672 raw_atomic_long_add_negative_relaxed(long i, atomic_long_t *v) 1673 { 1673 { 1674 #ifdef CONFIG_64BIT 1674 #ifdef CONFIG_64BIT 1675 return raw_atomic64_add_negative_rela 1675 return raw_atomic64_add_negative_relaxed(i, v); 1676 #else 1676 #else 1677 return raw_atomic_add_negative_relaxe 1677 return raw_atomic_add_negative_relaxed(i, v); 1678 #endif 1678 #endif 1679 } 1679 } 1680 1680 1681 /** 1681 /** 1682 * raw_atomic_long_fetch_add_unless() - atomi 1682 * raw_atomic_long_fetch_add_unless() - atomic add unless value with full ordering 1683 * @v: pointer to atomic_long_t 1683 * @v: pointer to atomic_long_t 1684 * @a: long value to add 1684 * @a: long value to add 1685 * @u: long value to compare with 1685 * @u: long value to compare with 1686 * 1686 * 1687 * If (@v != @u), atomically updates @v to (@ 1687 * If (@v != @u), atomically updates @v to (@v + @a) with full ordering. 1688 * Otherwise, @v is not modified and relaxed 1688 * Otherwise, @v is not modified and relaxed ordering is provided. 1689 * 1689 * 1690 * Safe to use in noinstr code; prefer atomic 1690 * Safe to use in noinstr code; prefer atomic_long_fetch_add_unless() elsewhere. 1691 * 1691 * 1692 * Return: The original value of @v. 1692 * Return: The original value of @v. 1693 */ 1693 */ 1694 static __always_inline long 1694 static __always_inline long 1695 raw_atomic_long_fetch_add_unless(atomic_long_ 1695 raw_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u) 1696 { 1696 { 1697 #ifdef CONFIG_64BIT 1697 #ifdef CONFIG_64BIT 1698 return raw_atomic64_fetch_add_unless( 1698 return raw_atomic64_fetch_add_unless(v, a, u); 1699 #else 1699 #else 1700 return raw_atomic_fetch_add_unless(v, 1700 return raw_atomic_fetch_add_unless(v, a, u); 1701 #endif 1701 #endif 1702 } 1702 } 1703 1703 1704 /** 1704 /** 1705 * raw_atomic_long_add_unless() - atomic add 1705 * raw_atomic_long_add_unless() - atomic add unless value with full ordering 1706 * @v: pointer to atomic_long_t 1706 * @v: pointer to atomic_long_t 1707 * @a: long value to add 1707 * @a: long value to add 1708 * @u: long value to compare with 1708 * @u: long value to compare with 1709 * 1709 * 1710 * If (@v != @u), atomically updates @v to (@ 1710 * If (@v != @u), atomically updates @v to (@v + @a) with full ordering. 1711 * Otherwise, @v is not modified and relaxed 1711 * Otherwise, @v is not modified and relaxed ordering is provided. 1712 * 1712 * 1713 * Safe to use in noinstr code; prefer atomic 1713 * Safe to use in noinstr code; prefer atomic_long_add_unless() elsewhere. 1714 * 1714 * 1715 * Return: @true if @v was updated, @false ot 1715 * Return: @true if @v was updated, @false otherwise. 1716 */ 1716 */ 1717 static __always_inline bool 1717 static __always_inline bool 1718 raw_atomic_long_add_unless(atomic_long_t *v, 1718 raw_atomic_long_add_unless(atomic_long_t *v, long a, long u) 1719 { 1719 { 1720 #ifdef CONFIG_64BIT 1720 #ifdef CONFIG_64BIT 1721 return raw_atomic64_add_unless(v, a, 1721 return raw_atomic64_add_unless(v, a, u); 1722 #else 1722 #else 1723 return raw_atomic_add_unless(v, a, u) 1723 return raw_atomic_add_unless(v, a, u); 1724 #endif 1724 #endif 1725 } 1725 } 1726 1726 1727 /** 1727 /** 1728 * raw_atomic_long_inc_not_zero() - atomic in 1728 * raw_atomic_long_inc_not_zero() - atomic increment unless zero with full ordering 1729 * @v: pointer to atomic_long_t 1729 * @v: pointer to atomic_long_t 1730 * 1730 * 1731 * If (@v != 0), atomically updates @v to (@v 1731 * If (@v != 0), atomically updates @v to (@v + 1) with full ordering. 1732 * Otherwise, @v is not modified and relaxed 1732 * Otherwise, @v is not modified and relaxed ordering is provided. 1733 * 1733 * 1734 * Safe to use in noinstr code; prefer atomic 1734 * Safe to use in noinstr code; prefer atomic_long_inc_not_zero() elsewhere. 1735 * 1735 * 1736 * Return: @true if @v was updated, @false ot 1736 * Return: @true if @v was updated, @false otherwise. 1737 */ 1737 */ 1738 static __always_inline bool 1738 static __always_inline bool 1739 raw_atomic_long_inc_not_zero(atomic_long_t *v 1739 raw_atomic_long_inc_not_zero(atomic_long_t *v) 1740 { 1740 { 1741 #ifdef CONFIG_64BIT 1741 #ifdef CONFIG_64BIT 1742 return raw_atomic64_inc_not_zero(v); 1742 return raw_atomic64_inc_not_zero(v); 1743 #else 1743 #else 1744 return raw_atomic_inc_not_zero(v); 1744 return raw_atomic_inc_not_zero(v); 1745 #endif 1745 #endif 1746 } 1746 } 1747 1747 1748 /** 1748 /** 1749 * raw_atomic_long_inc_unless_negative() - at 1749 * raw_atomic_long_inc_unless_negative() - atomic increment unless negative with full ordering 1750 * @v: pointer to atomic_long_t 1750 * @v: pointer to atomic_long_t 1751 * 1751 * 1752 * If (@v >= 0), atomically updates @v to (@v 1752 * If (@v >= 0), atomically updates @v to (@v + 1) with full ordering. 1753 * Otherwise, @v is not modified and relaxed 1753 * Otherwise, @v is not modified and relaxed ordering is provided. 1754 * 1754 * 1755 * Safe to use in noinstr code; prefer atomic 1755 * Safe to use in noinstr code; prefer atomic_long_inc_unless_negative() elsewhere. 1756 * 1756 * 1757 * Return: @true if @v was updated, @false ot 1757 * Return: @true if @v was updated, @false otherwise. 1758 */ 1758 */ 1759 static __always_inline bool 1759 static __always_inline bool 1760 raw_atomic_long_inc_unless_negative(atomic_lo 1760 raw_atomic_long_inc_unless_negative(atomic_long_t *v) 1761 { 1761 { 1762 #ifdef CONFIG_64BIT 1762 #ifdef CONFIG_64BIT 1763 return raw_atomic64_inc_unless_negati 1763 return raw_atomic64_inc_unless_negative(v); 1764 #else 1764 #else 1765 return raw_atomic_inc_unless_negative 1765 return raw_atomic_inc_unless_negative(v); 1766 #endif 1766 #endif 1767 } 1767 } 1768 1768 1769 /** 1769 /** 1770 * raw_atomic_long_dec_unless_positive() - at 1770 * raw_atomic_long_dec_unless_positive() - atomic decrement unless positive with full ordering 1771 * @v: pointer to atomic_long_t 1771 * @v: pointer to atomic_long_t 1772 * 1772 * 1773 * If (@v <= 0), atomically updates @v to (@v 1773 * If (@v <= 0), atomically updates @v to (@v - 1) with full ordering. 1774 * Otherwise, @v is not modified and relaxed 1774 * Otherwise, @v is not modified and relaxed ordering is provided. 1775 * 1775 * 1776 * Safe to use in noinstr code; prefer atomic 1776 * Safe to use in noinstr code; prefer atomic_long_dec_unless_positive() elsewhere. 1777 * 1777 * 1778 * Return: @true if @v was updated, @false ot 1778 * Return: @true if @v was updated, @false otherwise. 1779 */ 1779 */ 1780 static __always_inline bool 1780 static __always_inline bool 1781 raw_atomic_long_dec_unless_positive(atomic_lo 1781 raw_atomic_long_dec_unless_positive(atomic_long_t *v) 1782 { 1782 { 1783 #ifdef CONFIG_64BIT 1783 #ifdef CONFIG_64BIT 1784 return raw_atomic64_dec_unless_positi 1784 return raw_atomic64_dec_unless_positive(v); 1785 #else 1785 #else 1786 return raw_atomic_dec_unless_positive 1786 return raw_atomic_dec_unless_positive(v); 1787 #endif 1787 #endif 1788 } 1788 } 1789 1789 1790 /** 1790 /** 1791 * raw_atomic_long_dec_if_positive() - atomic 1791 * raw_atomic_long_dec_if_positive() - atomic decrement if positive with full ordering 1792 * @v: pointer to atomic_long_t 1792 * @v: pointer to atomic_long_t 1793 * 1793 * 1794 * If (@v > 0), atomically updates @v to (@v 1794 * If (@v > 0), atomically updates @v to (@v - 1) with full ordering. 1795 * Otherwise, @v is not modified and relaxed 1795 * Otherwise, @v is not modified and relaxed ordering is provided. 1796 * 1796 * 1797 * Safe to use in noinstr code; prefer atomic 1797 * Safe to use in noinstr code; prefer atomic_long_dec_if_positive() elsewhere. 1798 * 1798 * 1799 * Return: The old value of (@v - 1), regardl 1799 * Return: The old value of (@v - 1), regardless of whether @v was updated. 1800 */ 1800 */ 1801 static __always_inline long 1801 static __always_inline long 1802 raw_atomic_long_dec_if_positive(atomic_long_t 1802 raw_atomic_long_dec_if_positive(atomic_long_t *v) 1803 { 1803 { 1804 #ifdef CONFIG_64BIT 1804 #ifdef CONFIG_64BIT 1805 return raw_atomic64_dec_if_positive(v 1805 return raw_atomic64_dec_if_positive(v); 1806 #else 1806 #else 1807 return raw_atomic_dec_if_positive(v); 1807 return raw_atomic_dec_if_positive(v); 1808 #endif 1808 #endif 1809 } 1809 } 1810 1810 1811 #endif /* _LINUX_ATOMIC_LONG_H */ 1811 #endif /* _LINUX_ATOMIC_LONG_H */ 1812 // eadf183c3600b8b92b91839dd3be6bcc560c752d 1812 // eadf183c3600b8b92b91839dd3be6bcc560c752d 1813 1813
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.