1 /* SPDX-License-Identifier: GPL-2.0 */ 1 2 #ifndef _ASM_GENERIC_PERCPU_H_ 3 #define _ASM_GENERIC_PERCPU_H_ 4 5 #include <linux/compiler.h> 6 #include <linux/threads.h> 7 #include <linux/percpu-defs.h> 8 9 #ifdef CONFIG_SMP 10 11 /* 12 * per_cpu_offset() is the offset that has to 13 * percpu variable to get to the instance for 14 * 15 * Most arches use the __per_cpu_offset array 16 * some arches have their own ways of determin 17 */ 18 #ifndef __per_cpu_offset 19 extern unsigned long __per_cpu_offset[NR_CPUS] 20 21 #define per_cpu_offset(x) (__per_cpu_offset[x] 22 #endif 23 24 /* 25 * Determine the offset for the currently acti 26 * An arch may define __my_cpu_offset to provi 27 * means of obtaining the offset to the per cp 28 * current processor. 29 */ 30 #ifndef __my_cpu_offset 31 #define __my_cpu_offset per_cpu_offset(raw_smp 32 #endif 33 #ifdef CONFIG_DEBUG_PREEMPT 34 #define my_cpu_offset per_cpu_offset(smp_proce 35 #else 36 #define my_cpu_offset __my_cpu_offset 37 #endif 38 39 /* 40 * Arch may define arch_raw_cpu_ptr() to provi 41 * translations for raw_cpu_ptr(). 42 */ 43 #ifndef arch_raw_cpu_ptr 44 #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR 45 #endif 46 47 #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA 48 extern void setup_per_cpu_areas(void); 49 #endif 50 51 #endif /* SMP */ 52 53 #ifndef PER_CPU_BASE_SECTION 54 #ifdef CONFIG_SMP 55 #define PER_CPU_BASE_SECTION ".data..percpu" 56 #else 57 #define PER_CPU_BASE_SECTION ".data" 58 #endif 59 #endif 60 61 #ifndef PER_CPU_ATTRIBUTES 62 #define PER_CPU_ATTRIBUTES 63 #endif 64 65 #define raw_cpu_generic_read(pcp) 66 ({ 67 *raw_cpu_ptr(&(pcp)); 68 }) 69 70 #define raw_cpu_generic_to_op(pcp, val, op) 71 do { 72 *raw_cpu_ptr(&(pcp)) op val; 73 } while (0) 74 75 #define raw_cpu_generic_add_return(pcp, val) 76 ({ 77 typeof(pcp) *__p = raw_cpu_ptr(&(pcp)) 78 79 *__p += val; 80 *__p; 81 }) 82 83 #define raw_cpu_generic_xchg(pcp, nval) 84 ({ 85 typeof(pcp) *__p = raw_cpu_ptr(&(pcp)) 86 typeof(pcp) __ret; 87 __ret = *__p; 88 *__p = nval; 89 __ret; 90 }) 91 92 #define __cpu_fallback_try_cmpxchg(pcp, ovalp, 93 ({ 94 typeof(pcp) __val, __old = *(ovalp); 95 __val = _cmpxchg(pcp, __old, nval); 96 if (__val != __old) 97 *(ovalp) = __val; 98 __val == __old; 99 }) 100 101 #define raw_cpu_generic_try_cmpxchg(pcp, ovalp 102 ({ 103 typeof(pcp) *__p = raw_cpu_ptr(&(pcp)) 104 typeof(pcp) __val = *__p, ___old = *(o 105 bool __ret; 106 if (__val == ___old) { 107 *__p = nval; 108 __ret = true; 109 } else { 110 *(ovalp) = __val; 111 __ret = false; 112 } 113 __ret; 114 }) 115 116 #define raw_cpu_generic_cmpxchg(pcp, oval, nva 117 ({ 118 typeof(pcp) __old = (oval); 119 raw_cpu_generic_try_cmpxchg(pcp, &__ol 120 __old; 121 }) 122 123 #define __this_cpu_generic_read_nopreempt(pcp) 124 ({ 125 typeof(pcp) ___ret; 126 preempt_disable_notrace(); 127 ___ret = READ_ONCE(*raw_cpu_ptr(&(pcp) 128 preempt_enable_notrace(); 129 ___ret; 130 }) 131 132 #define __this_cpu_generic_read_noirq(pcp) 133 ({ 134 typeof(pcp) ___ret; 135 unsigned long ___flags; 136 raw_local_irq_save(___flags); 137 ___ret = raw_cpu_generic_read(pcp); 138 raw_local_irq_restore(___flags); 139 ___ret; 140 }) 141 142 #define this_cpu_generic_read(pcp) 143 ({ 144 typeof(pcp) __ret; 145 if (__native_word(pcp)) 146 __ret = __this_cpu_generic_rea 147 else 148 __ret = __this_cpu_generic_rea 149 __ret; 150 }) 151 152 #define this_cpu_generic_to_op(pcp, val, op) 153 do { 154 unsigned long __flags; 155 raw_local_irq_save(__flags); 156 raw_cpu_generic_to_op(pcp, val, op); 157 raw_local_irq_restore(__flags); 158 } while (0) 159 160 161 #define this_cpu_generic_add_return(pcp, val) 162 ({ 163 typeof(pcp) __ret; 164 unsigned long __flags; 165 raw_local_irq_save(__flags); 166 __ret = raw_cpu_generic_add_return(pcp 167 raw_local_irq_restore(__flags); 168 __ret; 169 }) 170 171 #define this_cpu_generic_xchg(pcp, nval) 172 ({ 173 typeof(pcp) __ret; 174 unsigned long __flags; 175 raw_local_irq_save(__flags); 176 __ret = raw_cpu_generic_xchg(pcp, nval 177 raw_local_irq_restore(__flags); 178 __ret; 179 }) 180 181 #define this_cpu_generic_try_cmpxchg(pcp, oval 182 ({ 183 bool __ret; 184 unsigned long __flags; 185 raw_local_irq_save(__flags); 186 __ret = raw_cpu_generic_try_cmpxchg(pc 187 raw_local_irq_restore(__flags); 188 __ret; 189 }) 190 191 #define this_cpu_generic_cmpxchg(pcp, oval, nv 192 ({ 193 typeof(pcp) __ret; 194 unsigned long __flags; 195 raw_local_irq_save(__flags); 196 __ret = raw_cpu_generic_cmpxchg(pcp, o 197 raw_local_irq_restore(__flags); 198 __ret; 199 }) 200 201 #ifndef raw_cpu_read_1 202 #define raw_cpu_read_1(pcp) raw_cp 203 #endif 204 #ifndef raw_cpu_read_2 205 #define raw_cpu_read_2(pcp) raw_cp 206 #endif 207 #ifndef raw_cpu_read_4 208 #define raw_cpu_read_4(pcp) raw_cp 209 #endif 210 #ifndef raw_cpu_read_8 211 #define raw_cpu_read_8(pcp) raw_cp 212 #endif 213 214 #ifndef raw_cpu_write_1 215 #define raw_cpu_write_1(pcp, val) raw_cp 216 #endif 217 #ifndef raw_cpu_write_2 218 #define raw_cpu_write_2(pcp, val) raw_cp 219 #endif 220 #ifndef raw_cpu_write_4 221 #define raw_cpu_write_4(pcp, val) raw_cp 222 #endif 223 #ifndef raw_cpu_write_8 224 #define raw_cpu_write_8(pcp, val) raw_cp 225 #endif 226 227 #ifndef raw_cpu_add_1 228 #define raw_cpu_add_1(pcp, val) raw_cp 229 #endif 230 #ifndef raw_cpu_add_2 231 #define raw_cpu_add_2(pcp, val) raw_cp 232 #endif 233 #ifndef raw_cpu_add_4 234 #define raw_cpu_add_4(pcp, val) raw_cp 235 #endif 236 #ifndef raw_cpu_add_8 237 #define raw_cpu_add_8(pcp, val) raw_cp 238 #endif 239 240 #ifndef raw_cpu_and_1 241 #define raw_cpu_and_1(pcp, val) raw_cp 242 #endif 243 #ifndef raw_cpu_and_2 244 #define raw_cpu_and_2(pcp, val) raw_cp 245 #endif 246 #ifndef raw_cpu_and_4 247 #define raw_cpu_and_4(pcp, val) raw_cp 248 #endif 249 #ifndef raw_cpu_and_8 250 #define raw_cpu_and_8(pcp, val) raw_cp 251 #endif 252 253 #ifndef raw_cpu_or_1 254 #define raw_cpu_or_1(pcp, val) raw_cp 255 #endif 256 #ifndef raw_cpu_or_2 257 #define raw_cpu_or_2(pcp, val) raw_cp 258 #endif 259 #ifndef raw_cpu_or_4 260 #define raw_cpu_or_4(pcp, val) raw_cp 261 #endif 262 #ifndef raw_cpu_or_8 263 #define raw_cpu_or_8(pcp, val) raw_cp 264 #endif 265 266 #ifndef raw_cpu_add_return_1 267 #define raw_cpu_add_return_1(pcp, val) raw_cp 268 #endif 269 #ifndef raw_cpu_add_return_2 270 #define raw_cpu_add_return_2(pcp, val) raw_cp 271 #endif 272 #ifndef raw_cpu_add_return_4 273 #define raw_cpu_add_return_4(pcp, val) raw_cp 274 #endif 275 #ifndef raw_cpu_add_return_8 276 #define raw_cpu_add_return_8(pcp, val) raw_cp 277 #endif 278 279 #ifndef raw_cpu_xchg_1 280 #define raw_cpu_xchg_1(pcp, nval) raw_cp 281 #endif 282 #ifndef raw_cpu_xchg_2 283 #define raw_cpu_xchg_2(pcp, nval) raw_cp 284 #endif 285 #ifndef raw_cpu_xchg_4 286 #define raw_cpu_xchg_4(pcp, nval) raw_cp 287 #endif 288 #ifndef raw_cpu_xchg_8 289 #define raw_cpu_xchg_8(pcp, nval) raw_cp 290 #endif 291 292 #ifndef raw_cpu_try_cmpxchg_1 293 #ifdef raw_cpu_cmpxchg_1 294 #define raw_cpu_try_cmpxchg_1(pcp, ovalp, nval 295 __cpu_fallback_try_cmpxchg(pcp, ovalp, 296 #else 297 #define raw_cpu_try_cmpxchg_1(pcp, ovalp, nval 298 raw_cpu_generic_try_cmpxchg(pcp, ovalp 299 #endif 300 #endif 301 #ifndef raw_cpu_try_cmpxchg_2 302 #ifdef raw_cpu_cmpxchg_2 303 #define raw_cpu_try_cmpxchg_2(pcp, ovalp, nval 304 __cpu_fallback_try_cmpxchg(pcp, ovalp, 305 #else 306 #define raw_cpu_try_cmpxchg_2(pcp, ovalp, nval 307 raw_cpu_generic_try_cmpxchg(pcp, ovalp 308 #endif 309 #endif 310 #ifndef raw_cpu_try_cmpxchg_4 311 #ifdef raw_cpu_cmpxchg_4 312 #define raw_cpu_try_cmpxchg_4(pcp, ovalp, nval 313 __cpu_fallback_try_cmpxchg(pcp, ovalp, 314 #else 315 #define raw_cpu_try_cmpxchg_4(pcp, ovalp, nval 316 raw_cpu_generic_try_cmpxchg(pcp, ovalp 317 #endif 318 #endif 319 #ifndef raw_cpu_try_cmpxchg_8 320 #ifdef raw_cpu_cmpxchg_8 321 #define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval 322 __cpu_fallback_try_cmpxchg(pcp, ovalp, 323 #else 324 #define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval 325 raw_cpu_generic_try_cmpxchg(pcp, ovalp 326 #endif 327 #endif 328 329 #ifndef raw_cpu_try_cmpxchg64 330 #ifdef raw_cpu_cmpxchg64 331 #define raw_cpu_try_cmpxchg64(pcp, ovalp, nval 332 __cpu_fallback_try_cmpxchg(pcp, ovalp, 333 #else 334 #define raw_cpu_try_cmpxchg64(pcp, ovalp, nval 335 raw_cpu_generic_try_cmpxchg(pcp, ovalp 336 #endif 337 #endif 338 #ifndef raw_cpu_try_cmpxchg128 339 #ifdef raw_cpu_cmpxchg128 340 #define raw_cpu_try_cmpxchg128(pcp, ovalp, nva 341 __cpu_fallback_try_cmpxchg(pcp, ovalp, 342 #else 343 #define raw_cpu_try_cmpxchg128(pcp, ovalp, nva 344 raw_cpu_generic_try_cmpxchg(pcp, ovalp 345 #endif 346 #endif 347 348 #ifndef raw_cpu_cmpxchg_1 349 #define raw_cpu_cmpxchg_1(pcp, oval, nval) \ 350 raw_cpu_generic_cmpxchg(pcp, oval, nva 351 #endif 352 #ifndef raw_cpu_cmpxchg_2 353 #define raw_cpu_cmpxchg_2(pcp, oval, nval) \ 354 raw_cpu_generic_cmpxchg(pcp, oval, nva 355 #endif 356 #ifndef raw_cpu_cmpxchg_4 357 #define raw_cpu_cmpxchg_4(pcp, oval, nval) \ 358 raw_cpu_generic_cmpxchg(pcp, oval, nva 359 #endif 360 #ifndef raw_cpu_cmpxchg_8 361 #define raw_cpu_cmpxchg_8(pcp, oval, nval) \ 362 raw_cpu_generic_cmpxchg(pcp, oval, nva 363 #endif 364 365 #ifndef raw_cpu_cmpxchg64 366 #define raw_cpu_cmpxchg64(pcp, oval, nval) \ 367 raw_cpu_generic_cmpxchg(pcp, oval, nva 368 #endif 369 #ifndef raw_cpu_cmpxchg128 370 #define raw_cpu_cmpxchg128(pcp, oval, nval) \ 371 raw_cpu_generic_cmpxchg(pcp, oval, nva 372 #endif 373 374 #ifndef this_cpu_read_1 375 #define this_cpu_read_1(pcp) this_c 376 #endif 377 #ifndef this_cpu_read_2 378 #define this_cpu_read_2(pcp) this_c 379 #endif 380 #ifndef this_cpu_read_4 381 #define this_cpu_read_4(pcp) this_c 382 #endif 383 #ifndef this_cpu_read_8 384 #define this_cpu_read_8(pcp) this_c 385 #endif 386 387 #ifndef this_cpu_write_1 388 #define this_cpu_write_1(pcp, val) this_c 389 #endif 390 #ifndef this_cpu_write_2 391 #define this_cpu_write_2(pcp, val) this_c 392 #endif 393 #ifndef this_cpu_write_4 394 #define this_cpu_write_4(pcp, val) this_c 395 #endif 396 #ifndef this_cpu_write_8 397 #define this_cpu_write_8(pcp, val) this_c 398 #endif 399 400 #ifndef this_cpu_add_1 401 #define this_cpu_add_1(pcp, val) this_c 402 #endif 403 #ifndef this_cpu_add_2 404 #define this_cpu_add_2(pcp, val) this_c 405 #endif 406 #ifndef this_cpu_add_4 407 #define this_cpu_add_4(pcp, val) this_c 408 #endif 409 #ifndef this_cpu_add_8 410 #define this_cpu_add_8(pcp, val) this_c 411 #endif 412 413 #ifndef this_cpu_and_1 414 #define this_cpu_and_1(pcp, val) this_c 415 #endif 416 #ifndef this_cpu_and_2 417 #define this_cpu_and_2(pcp, val) this_c 418 #endif 419 #ifndef this_cpu_and_4 420 #define this_cpu_and_4(pcp, val) this_c 421 #endif 422 #ifndef this_cpu_and_8 423 #define this_cpu_and_8(pcp, val) this_c 424 #endif 425 426 #ifndef this_cpu_or_1 427 #define this_cpu_or_1(pcp, val) this_c 428 #endif 429 #ifndef this_cpu_or_2 430 #define this_cpu_or_2(pcp, val) this_c 431 #endif 432 #ifndef this_cpu_or_4 433 #define this_cpu_or_4(pcp, val) this_c 434 #endif 435 #ifndef this_cpu_or_8 436 #define this_cpu_or_8(pcp, val) this_c 437 #endif 438 439 #ifndef this_cpu_add_return_1 440 #define this_cpu_add_return_1(pcp, val) this_c 441 #endif 442 #ifndef this_cpu_add_return_2 443 #define this_cpu_add_return_2(pcp, val) this_c 444 #endif 445 #ifndef this_cpu_add_return_4 446 #define this_cpu_add_return_4(pcp, val) this_c 447 #endif 448 #ifndef this_cpu_add_return_8 449 #define this_cpu_add_return_8(pcp, val) this_c 450 #endif 451 452 #ifndef this_cpu_xchg_1 453 #define this_cpu_xchg_1(pcp, nval) this_c 454 #endif 455 #ifndef this_cpu_xchg_2 456 #define this_cpu_xchg_2(pcp, nval) this_c 457 #endif 458 #ifndef this_cpu_xchg_4 459 #define this_cpu_xchg_4(pcp, nval) this_c 460 #endif 461 #ifndef this_cpu_xchg_8 462 #define this_cpu_xchg_8(pcp, nval) this_c 463 #endif 464 465 #ifndef this_cpu_try_cmpxchg_1 466 #ifdef this_cpu_cmpxchg_1 467 #define this_cpu_try_cmpxchg_1(pcp, ovalp, nva 468 __cpu_fallback_try_cmpxchg(pcp, ovalp, 469 #else 470 #define this_cpu_try_cmpxchg_1(pcp, ovalp, nva 471 this_cpu_generic_try_cmpxchg(pcp, oval 472 #endif 473 #endif 474 #ifndef this_cpu_try_cmpxchg_2 475 #ifdef this_cpu_cmpxchg_2 476 #define this_cpu_try_cmpxchg_2(pcp, ovalp, nva 477 __cpu_fallback_try_cmpxchg(pcp, ovalp, 478 #else 479 #define this_cpu_try_cmpxchg_2(pcp, ovalp, nva 480 this_cpu_generic_try_cmpxchg(pcp, oval 481 #endif 482 #endif 483 #ifndef this_cpu_try_cmpxchg_4 484 #ifdef this_cpu_cmpxchg_4 485 #define this_cpu_try_cmpxchg_4(pcp, ovalp, nva 486 __cpu_fallback_try_cmpxchg(pcp, ovalp, 487 #else 488 #define this_cpu_try_cmpxchg_4(pcp, ovalp, nva 489 this_cpu_generic_try_cmpxchg(pcp, oval 490 #endif 491 #endif 492 #ifndef this_cpu_try_cmpxchg_8 493 #ifdef this_cpu_cmpxchg_8 494 #define this_cpu_try_cmpxchg_8(pcp, ovalp, nva 495 __cpu_fallback_try_cmpxchg(pcp, ovalp, 496 #else 497 #define this_cpu_try_cmpxchg_8(pcp, ovalp, nva 498 this_cpu_generic_try_cmpxchg(pcp, oval 499 #endif 500 #endif 501 502 #ifndef this_cpu_try_cmpxchg64 503 #ifdef this_cpu_cmpxchg64 504 #define this_cpu_try_cmpxchg64(pcp, ovalp, nva 505 __cpu_fallback_try_cmpxchg(pcp, ovalp, 506 #else 507 #define this_cpu_try_cmpxchg64(pcp, ovalp, nva 508 this_cpu_generic_try_cmpxchg(pcp, oval 509 #endif 510 #endif 511 #ifndef this_cpu_try_cmpxchg128 512 #ifdef this_cpu_cmpxchg128 513 #define this_cpu_try_cmpxchg128(pcp, ovalp, nv 514 __cpu_fallback_try_cmpxchg(pcp, ovalp, 515 #else 516 #define this_cpu_try_cmpxchg128(pcp, ovalp, nv 517 this_cpu_generic_try_cmpxchg(pcp, oval 518 #endif 519 #endif 520 521 #ifndef this_cpu_cmpxchg_1 522 #define this_cpu_cmpxchg_1(pcp, oval, nval) \ 523 this_cpu_generic_cmpxchg(pcp, oval, nv 524 #endif 525 #ifndef this_cpu_cmpxchg_2 526 #define this_cpu_cmpxchg_2(pcp, oval, nval) \ 527 this_cpu_generic_cmpxchg(pcp, oval, nv 528 #endif 529 #ifndef this_cpu_cmpxchg_4 530 #define this_cpu_cmpxchg_4(pcp, oval, nval) \ 531 this_cpu_generic_cmpxchg(pcp, oval, nv 532 #endif 533 #ifndef this_cpu_cmpxchg_8 534 #define this_cpu_cmpxchg_8(pcp, oval, nval) \ 535 this_cpu_generic_cmpxchg(pcp, oval, nv 536 #endif 537 538 #ifndef this_cpu_cmpxchg64 539 #define this_cpu_cmpxchg64(pcp, oval, nval) \ 540 this_cpu_generic_cmpxchg(pcp, oval, nv 541 #endif 542 #ifndef this_cpu_cmpxchg128 543 #define this_cpu_cmpxchg128(pcp, oval, nval) \ 544 this_cpu_generic_cmpxchg(pcp, oval, nv 545 #endif 546 547 #endif /* _ASM_GENERIC_PERCPU_H_ */ 548
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.