~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/arch/x86/include/asm/cmpxchg_32.h

Version: ~ [ linux-6.11.5 ] ~ [ linux-6.10.14 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.58 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.114 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.169 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.228 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.284 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.322 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.9 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

  1 /* SPDX-License-Identifier: GPL-2.0 */
  2 #ifndef _ASM_X86_CMPXCHG_32_H
  3 #define _ASM_X86_CMPXCHG_32_H
  4 
  5 /*
  6  * Note: if you use __cmpxchg64(), or their variants,
  7  *       you need to test for the feature in boot_cpu_data.
  8  */
  9 
 10 union __u64_halves {
 11         u64 full;
 12         struct {
 13                 u32 low, high;
 14         };
 15 };
 16 
 17 #define __arch_cmpxchg64(_ptr, _old, _new, _lock)                       \
 18 ({                                                                      \
 19         union __u64_halves o = { .full = (_old), },                     \
 20                            n = { .full = (_new), };                     \
 21                                                                         \
 22         asm volatile(_lock "cmpxchg8b %[ptr]"                           \
 23                      : [ptr] "+m" (*(_ptr)),                            \
 24                        "+a" (o.low), "+d" (o.high)                      \
 25                      : "b" (n.low), "c" (n.high)                        \
 26                      : "memory");                                       \
 27                                                                         \
 28         o.full;                                                         \
 29 })
 30 
 31 
 32 static __always_inline u64 __cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
 33 {
 34         return __arch_cmpxchg64(ptr, old, new, LOCK_PREFIX);
 35 }
 36 
 37 static __always_inline u64 __cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
 38 {
 39         return __arch_cmpxchg64(ptr, old, new,);
 40 }
 41 
 42 #define __arch_try_cmpxchg64(_ptr, _oldp, _new, _lock)                  \
 43 ({                                                                      \
 44         union __u64_halves o = { .full = *(_oldp), },                   \
 45                            n = { .full = (_new), };                     \
 46         bool ret;                                                       \
 47                                                                         \
 48         asm volatile(_lock "cmpxchg8b %[ptr]"                           \
 49                      CC_SET(e)                                          \
 50                      : CC_OUT(e) (ret),                                 \
 51                        [ptr] "+m" (*(_ptr)),                            \
 52                        "+a" (o.low), "+d" (o.high)                      \
 53                      : "b" (n.low), "c" (n.high)                        \
 54                      : "memory");                                       \
 55                                                                         \
 56         if (unlikely(!ret))                                             \
 57                 *(_oldp) = o.full;                                      \
 58                                                                         \
 59         likely(ret);                                                    \
 60 })
 61 
 62 static __always_inline bool __try_cmpxchg64(volatile u64 *ptr, u64 *oldp, u64 new)
 63 {
 64         return __arch_try_cmpxchg64(ptr, oldp, new, LOCK_PREFIX);
 65 }
 66 
 67 static __always_inline bool __try_cmpxchg64_local(volatile u64 *ptr, u64 *oldp, u64 new)
 68 {
 69         return __arch_try_cmpxchg64(ptr, oldp, new,);
 70 }
 71 
 72 #ifdef CONFIG_X86_CMPXCHG64
 73 
 74 #define arch_cmpxchg64 __cmpxchg64
 75 
 76 #define arch_cmpxchg64_local __cmpxchg64_local
 77 
 78 #define arch_try_cmpxchg64 __try_cmpxchg64
 79 
 80 #define arch_try_cmpxchg64_local __try_cmpxchg64_local
 81 
 82 #else
 83 
 84 /*
 85  * Building a kernel capable running on 80386 and 80486. It may be necessary
 86  * to simulate the cmpxchg8b on the 80386 and 80486 CPU.
 87  */
 88 
 89 #define __arch_cmpxchg64_emu(_ptr, _old, _new, _lock_loc, _lock)        \
 90 ({                                                                      \
 91         union __u64_halves o = { .full = (_old), },                     \
 92                            n = { .full = (_new), };                     \
 93                                                                         \
 94         asm volatile(ALTERNATIVE(_lock_loc                              \
 95                                  "call cmpxchg8b_emu",                  \
 96                                  _lock "cmpxchg8b %a[ptr]", X86_FEATURE_CX8) \
 97                      : "+a" (o.low), "+d" (o.high)                      \
 98                      : "b" (n.low), "c" (n.high), [ptr] "S" (_ptr)      \
 99                      : "memory");                                       \
100                                                                         \
101         o.full;                                                         \
102 })
103 
104 static __always_inline u64 arch_cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
105 {
106         return __arch_cmpxchg64_emu(ptr, old, new, LOCK_PREFIX_HERE, "lock; ");
107 }
108 #define arch_cmpxchg64 arch_cmpxchg64
109 
110 static __always_inline u64 arch_cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
111 {
112         return __arch_cmpxchg64_emu(ptr, old, new, ,);
113 }
114 #define arch_cmpxchg64_local arch_cmpxchg64_local
115 
116 #define __arch_try_cmpxchg64_emu(_ptr, _oldp, _new, _lock_loc, _lock)   \
117 ({                                                                      \
118         union __u64_halves o = { .full = *(_oldp), },                   \
119                            n = { .full = (_new), };                     \
120         bool ret;                                                       \
121                                                                         \
122         asm volatile(ALTERNATIVE(_lock_loc                              \
123                                  "call cmpxchg8b_emu",                  \
124                                  _lock "cmpxchg8b %a[ptr]", X86_FEATURE_CX8) \
125                      CC_SET(e)                                          \
126                      : CC_OUT(e) (ret),                                 \
127                        "+a" (o.low), "+d" (o.high)                      \
128                      : "b" (n.low), "c" (n.high), [ptr] "S" (_ptr)      \
129                      : "memory");                                       \
130                                                                         \
131         if (unlikely(!ret))                                             \
132                 *(_oldp) = o.full;                                      \
133                                                                         \
134         likely(ret);                                                    \
135 })
136 
137 static __always_inline bool arch_try_cmpxchg64(volatile u64 *ptr, u64 *oldp, u64 new)
138 {
139         return __arch_try_cmpxchg64_emu(ptr, oldp, new, LOCK_PREFIX_HERE, "lock; ");
140 }
141 #define arch_try_cmpxchg64 arch_try_cmpxchg64
142 
143 static __always_inline bool arch_try_cmpxchg64_local(volatile u64 *ptr, u64 *oldp, u64 new)
144 {
145         return __arch_try_cmpxchg64_emu(ptr, oldp, new, ,);
146 }
147 #define arch_try_cmpxchg64_local arch_try_cmpxchg64_local
148 
149 #endif
150 
151 #define system_has_cmpxchg64()          boot_cpu_has(X86_FEATURE_CX8)
152 
153 #endif /* _ASM_X86_CMPXCHG_32_H */
154 

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php