~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/arch/mips/include/asm/cmpxchg.h

Version: ~ [ linux-6.11-rc3 ] ~ [ linux-6.10.4 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.45 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.104 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.164 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.223 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.281 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.319 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.9 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

  1 /*
  2  * This file is subject to the terms and conditions of the GNU General Public
  3  * License.  See the file "COPYING" in the main directory of this archive
  4  * for more details.
  5  *
  6  * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
  7  */
  8 #ifndef __ASM_CMPXCHG_H
  9 #define __ASM_CMPXCHG_H
 10 
 11 #include <linux/bug.h>
 12 #include <linux/irqflags.h>
 13 #include <asm/asm.h>
 14 #include <asm/compiler.h>
 15 #include <asm/sync.h>
 16 
 17 /*
 18  * These functions doesn't exist, so if they are called you'll either:
 19  *
 20  * - Get an error at compile-time due to __compiletime_error, if supported by
 21  *   your compiler.
 22  *
 23  * or:
 24  *
 25  * - Get an error at link-time due to the call to the missing function.
 26  */
 27 extern unsigned long __cmpxchg_called_with_bad_pointer(void)
 28         __compiletime_error("Bad argument size for cmpxchg");
 29 extern unsigned long __cmpxchg64_unsupported(void)
 30         __compiletime_error("cmpxchg64 not available; cpu_has_64bits may be false");
 31 extern unsigned long __xchg_called_with_bad_pointer(void)
 32         __compiletime_error("Bad argument size for xchg");
 33 
 34 #define __xchg_asm(ld, st, m, val)                                      \
 35 ({                                                                      \
 36         __typeof(*(m)) __ret;                                           \
 37                                                                         \
 38         if (kernel_uses_llsc) {                                         \
 39                 __asm__ __volatile__(                                   \
 40                 "       .set    push                            \n"     \
 41                 "       .set    noat                            \n"     \
 42                 "       .set    push                            \n"     \
 43                 "       .set    " MIPS_ISA_ARCH_LEVEL "         \n"     \
 44                 "       " __SYNC(full, loongson3_war) "         \n"     \
 45                 "1:     " ld "  %0, %2          # __xchg_asm    \n"     \
 46                 "       .set    pop                             \n"     \
 47                 "       move    $1, %z3                         \n"     \
 48                 "       .set    " MIPS_ISA_ARCH_LEVEL "         \n"     \
 49                 "       " st "  $1, %1                          \n"     \
 50                 "\t" __stringify(SC_BEQZ)       "       $1, 1b  \n"     \
 51                 "       .set    pop                             \n"     \
 52                 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)           \
 53                 : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)                  \
 54                 : __LLSC_CLOBBER);                                      \
 55         } else {                                                        \
 56                 unsigned long __flags;                                  \
 57                                                                         \
 58                 raw_local_irq_save(__flags);                            \
 59                 __ret = *m;                                             \
 60                 *m = val;                                               \
 61                 raw_local_irq_restore(__flags);                         \
 62         }                                                               \
 63                                                                         \
 64         __ret;                                                          \
 65 })
 66 
 67 extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
 68                                   unsigned int size);
 69 
 70 static __always_inline
 71 unsigned long __arch_xchg(volatile void *ptr, unsigned long x, int size)
 72 {
 73         switch (size) {
 74         case 1:
 75         case 2:
 76                 return __xchg_small(ptr, x, size);
 77 
 78         case 4:
 79                 return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x);
 80 
 81         case 8:
 82                 if (!IS_ENABLED(CONFIG_64BIT))
 83                         return __xchg_called_with_bad_pointer();
 84 
 85                 return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x);
 86 
 87         default:
 88                 return __xchg_called_with_bad_pointer();
 89         }
 90 }
 91 
 92 #define arch_xchg(ptr, x)                                               \
 93 ({                                                                      \
 94         __typeof__(*(ptr)) __res;                                       \
 95                                                                         \
 96         /*                                                              \
 97          * In the Loongson3 workaround case __xchg_asm() already        \
 98          * contains a completion barrier prior to the LL, so we don't   \
 99          * need to emit an extra one here.                              \
100          */                                                             \
101         if (__SYNC_loongson3_war == 0)                                  \
102                 smp_mb__before_llsc();                                  \
103                                                                         \
104         __res = (__typeof__(*(ptr)))                                    \
105                 __arch_xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \
106                                                                         \
107         smp_llsc_mb();                                                  \
108                                                                         \
109         __res;                                                          \
110 })
111 
112 #define __cmpxchg_asm(ld, st, m, old, new)                              \
113 ({                                                                      \
114         __typeof(*(m)) __ret;                                           \
115                                                                         \
116         if (kernel_uses_llsc) {                                         \
117                 __asm__ __volatile__(                                   \
118                 "       .set    push                            \n"     \
119                 "       .set    noat                            \n"     \
120                 "       .set    push                            \n"     \
121                 "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"     \
122                 "       " __SYNC(full, loongson3_war) "         \n"     \
123                 "1:     " ld "  %0, %2          # __cmpxchg_asm \n"     \
124                 "       bne     %0, %z3, 2f                     \n"     \
125                 "       .set    pop                             \n"     \
126                 "       move    $1, %z4                         \n"     \
127                 "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"     \
128                 "       " st "  $1, %1                          \n"     \
129                 "\t" __stringify(SC_BEQZ)       "       $1, 1b  \n"     \
130                 "       .set    pop                             \n"     \
131                 "2:     " __SYNC(full, loongson3_war) "         \n"     \
132                 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)           \
133                 : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new)      \
134                 : __LLSC_CLOBBER);                                      \
135         } else {                                                        \
136                 unsigned long __flags;                                  \
137                                                                         \
138                 raw_local_irq_save(__flags);                            \
139                 __ret = *m;                                             \
140                 if (__ret == old)                                       \
141                         *m = new;                                       \
142                 raw_local_irq_restore(__flags);                         \
143         }                                                               \
144                                                                         \
145         __ret;                                                          \
146 })
147 
148 extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
149                                      unsigned long new, unsigned int size);
150 
151 static __always_inline
152 unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
153                         unsigned long new, unsigned int size)
154 {
155         switch (size) {
156         case 1:
157         case 2:
158                 return __cmpxchg_small(ptr, old, new, size);
159 
160         case 4:
161                 return __cmpxchg_asm("ll", "sc", (volatile u32 *)ptr,
162                                      (u32)old, new);
163 
164         case 8:
165                 /* lld/scd are only available for MIPS64 */
166                 if (!IS_ENABLED(CONFIG_64BIT))
167                         return __cmpxchg_called_with_bad_pointer();
168 
169                 return __cmpxchg_asm("lld", "scd", (volatile u64 *)ptr,
170                                      (u64)old, new);
171 
172         default:
173                 return __cmpxchg_called_with_bad_pointer();
174         }
175 }
176 
177 #define arch_cmpxchg_local(ptr, old, new)                               \
178         ((__typeof__(*(ptr)))                                           \
179                 __cmpxchg((ptr),                                        \
180                           (unsigned long)(__typeof__(*(ptr)))(old),     \
181                           (unsigned long)(__typeof__(*(ptr)))(new),     \
182                           sizeof(*(ptr))))
183 
184 #define arch_cmpxchg(ptr, old, new)                                     \
185 ({                                                                      \
186         __typeof__(*(ptr)) __res;                                       \
187                                                                         \
188         /*                                                              \
189          * In the Loongson3 workaround case __cmpxchg_asm() already     \
190          * contains a completion barrier prior to the LL, so we don't   \
191          * need to emit an extra one here.                              \
192          */                                                             \
193         if (__SYNC_loongson3_war == 0)                                  \
194                 smp_mb__before_llsc();                                  \
195                                                                         \
196         __res = arch_cmpxchg_local((ptr), (old), (new));                \
197                                                                         \
198         /*                                                              \
199          * In the Loongson3 workaround case __cmpxchg_asm() already     \
200          * contains a completion barrier after the SC, so we don't      \
201          * need to emit an extra one here.                              \
202          */                                                             \
203         if (__SYNC_loongson3_war == 0)                                  \
204                 smp_llsc_mb();                                          \
205                                                                         \
206         __res;                                                          \
207 })
208 
209 #ifdef CONFIG_64BIT
210 #define arch_cmpxchg64_local(ptr, o, n)                                 \
211   ({                                                                    \
212         BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
213         arch_cmpxchg_local((ptr), (o), (n));                            \
214   })
215 
216 #define arch_cmpxchg64(ptr, o, n)                                       \
217   ({                                                                    \
218         BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
219         arch_cmpxchg((ptr), (o), (n));                                  \
220   })
221 #else
222 
223 # include <asm-generic/cmpxchg-local.h>
224 # define arch_cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
225 
226 # ifdef CONFIG_SMP
227 
228 static inline unsigned long __cmpxchg64(volatile void *ptr,
229                                         unsigned long long old,
230                                         unsigned long long new)
231 {
232         unsigned long long tmp, ret;
233         unsigned long flags;
234 
235         /*
236          * The assembly below has to combine 32 bit values into a 64 bit
237          * register, and split 64 bit values from one register into two. If we
238          * were to take an interrupt in the middle of this we'd only save the
239          * least significant 32 bits of each register & probably clobber the
240          * most significant 32 bits of the 64 bit values we're using. In order
241          * to avoid this we must disable interrupts.
242          */
243         local_irq_save(flags);
244 
245         asm volatile(
246         "       .set    push                            \n"
247         "       .set    " MIPS_ISA_ARCH_LEVEL "         \n"
248         /* Load 64 bits from ptr */
249         "       " __SYNC(full, loongson3_war) "         \n"
250         "1:     lld     %L0, %3         # __cmpxchg64   \n"
251         "       .set    pop                             \n"
252         /*
253          * Split the 64 bit value we loaded into the 2 registers that hold the
254          * ret variable.
255          */
256         "       dsra    %M0, %L0, 32                    \n"
257         "       sll     %L0, %L0, 0                     \n"
258         /*
259          * Compare ret against old, breaking out of the loop if they don't
260          * match.
261          */
262         "       bne     %M0, %M4, 2f                    \n"
263         "       bne     %L0, %L4, 2f                    \n"
264         /*
265          * Combine the 32 bit halves from the 2 registers that hold the new
266          * variable into a single 64 bit register.
267          */
268 #  if MIPS_ISA_REV >= 2
269         "       move    %L1, %L5                        \n"
270         "       dins    %L1, %M5, 32, 32                \n"
271 #  else
272         "       dsll    %L1, %L5, 32                    \n"
273         "       dsrl    %L1, %L1, 32                    \n"
274         "       .set    noat                            \n"
275         "       dsll    $at, %M5, 32                    \n"
276         "       or      %L1, %L1, $at                   \n"
277         "       .set    at                              \n"
278 #  endif
279         "       .set    push                            \n"
280         "       .set    " MIPS_ISA_ARCH_LEVEL "         \n"
281         /* Attempt to store new at ptr */
282         "       scd     %L1, %2                         \n"
283         /* If we failed, loop! */
284         "\t" __stringify(SC_BEQZ) "     %L1, 1b         \n"
285         "2:     " __SYNC(full, loongson3_war) "         \n"
286         "       .set    pop                             \n"
287         : "=&r"(ret),
288           "=&r"(tmp),
289           "=" GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr)
290         : GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr),
291           "r" (old),
292           "r" (new)
293         : "memory");
294 
295         local_irq_restore(flags);
296         return ret;
297 }
298 
299 #  define arch_cmpxchg64(ptr, o, n) ({                                  \
300         unsigned long long __old = (__typeof__(*(ptr)))(o);             \
301         unsigned long long __new = (__typeof__(*(ptr)))(n);             \
302         __typeof__(*(ptr)) __res;                                       \
303                                                                         \
304         /*                                                              \
305          * We can only use cmpxchg64 if we know that the CPU supports   \
306          * 64-bits, ie. lld & scd. Our call to __cmpxchg64_unsupported  \
307          * will cause a build error unless cpu_has_64bits is a          \
308          * compile-time constant 1.                                     \
309          */                                                             \
310         if (cpu_has_64bits && kernel_uses_llsc) {                       \
311                 smp_mb__before_llsc();                                  \
312                 __res = __cmpxchg64((ptr), __old, __new);               \
313                 smp_llsc_mb();                                          \
314         } else {                                                        \
315                 __res = __cmpxchg64_unsupported();                      \
316         }                                                               \
317                                                                         \
318         __res;                                                          \
319 })
320 
321 # else /* !CONFIG_SMP */
322 #  define arch_cmpxchg64(ptr, o, n) arch_cmpxchg64_local((ptr), (o), (n))
323 # endif /* !CONFIG_SMP */
324 #endif /* !CONFIG_64BIT */
325 
326 #endif /* __ASM_CMPXCHG_H */
327 

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php