~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/arch/loongarch/include/asm/cmpxchg.h

Version: ~ [ linux-6.11.5 ] ~ [ linux-6.10.14 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.58 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.114 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.169 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.228 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.284 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.322 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.9 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

  1 /* SPDX-License-Identifier: GPL-2.0 */
  2 /*
  3  * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
  4  */
  5 #ifndef __ASM_CMPXCHG_H
  6 #define __ASM_CMPXCHG_H
  7 
  8 #include <linux/bits.h>
  9 #include <linux/build_bug.h>
 10 #include <asm/barrier.h>
 11 
 12 #define __xchg_asm(amswap_db, m, val)           \
 13 ({                                              \
 14                 __typeof(val) __ret;            \
 15                                                 \
 16                 __asm__ __volatile__ (          \
 17                 " "amswap_db" %1, %z2, %0 \n"   \
 18                 : "+ZB" (*m), "=&r" (__ret)     \
 19                 : "Jr" (val)                    \
 20                 : "memory");                    \
 21                                                 \
 22                 __ret;                          \
 23 })
 24 
 25 static inline unsigned int __xchg_small(volatile void *ptr, unsigned int val,
 26                                         unsigned int size)
 27 {
 28         unsigned int shift;
 29         u32 old32, mask, temp;
 30         volatile u32 *ptr32;
 31 
 32         /* Mask value to the correct size. */
 33         mask = GENMASK((size * BITS_PER_BYTE) - 1, 0);
 34         val &= mask;
 35 
 36         /*
 37          * Calculate a shift & mask that correspond to the value we wish to
 38          * exchange within the naturally aligned 4 byte integerthat includes
 39          * it.
 40          */
 41         shift = (unsigned long)ptr & 0x3;
 42         shift *= BITS_PER_BYTE;
 43         mask <<= shift;
 44 
 45         /*
 46          * Calculate a pointer to the naturally aligned 4 byte integer that
 47          * includes our byte of interest, and load its value.
 48          */
 49         ptr32 = (volatile u32 *)((unsigned long)ptr & ~0x3);
 50 
 51         asm volatile (
 52         "1:     ll.w            %0, %3          \n"
 53         "       andn            %1, %0, %z4     \n"
 54         "       or              %1, %1, %z5     \n"
 55         "       sc.w            %1, %2          \n"
 56         "       beqz            %1, 1b          \n"
 57         : "=&r" (old32), "=&r" (temp), "=ZC" (*ptr32)
 58         : "ZC" (*ptr32), "Jr" (mask), "Jr" (val << shift)
 59         : "memory");
 60 
 61         return (old32 & mask) >> shift;
 62 }
 63 
 64 static __always_inline unsigned long
 65 __arch_xchg(volatile void *ptr, unsigned long x, int size)
 66 {
 67         switch (size) {
 68         case 1:
 69         case 2:
 70                 return __xchg_small(ptr, x, size);
 71 
 72         case 4:
 73                 return __xchg_asm("amswap_db.w", (volatile u32 *)ptr, (u32)x);
 74 
 75         case 8:
 76                 return __xchg_asm("amswap_db.d", (volatile u64 *)ptr, (u64)x);
 77 
 78         default:
 79                 BUILD_BUG();
 80         }
 81 
 82         return 0;
 83 }
 84 
 85 #define arch_xchg(ptr, x)                                               \
 86 ({                                                                      \
 87         __typeof__(*(ptr)) __res;                                       \
 88                                                                         \
 89         __res = (__typeof__(*(ptr)))                                    \
 90                 __arch_xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \
 91                                                                         \
 92         __res;                                                          \
 93 })
 94 
 95 #define __cmpxchg_asm(ld, st, m, old, new)                              \
 96 ({                                                                      \
 97         __typeof(old) __ret;                                            \
 98                                                                         \
 99         __asm__ __volatile__(                                           \
100         "1:     " ld "  %0, %2          # __cmpxchg_asm \n"             \
101         "       bne     %0, %z3, 2f                     \n"             \
102         "       move    $t0, %z4                        \n"             \
103         "       " st "  $t0, %1                         \n"             \
104         "       beqz    $t0, 1b                         \n"             \
105         "2:                                             \n"             \
106         __WEAK_LLSC_MB                                                  \
107         : "=&r" (__ret), "=ZB"(*m)                                      \
108         : "ZB"(*m), "Jr" (old), "Jr" (new)                              \
109         : "t0", "memory");                                              \
110                                                                         \
111         __ret;                                                          \
112 })
113 
114 static inline unsigned int __cmpxchg_small(volatile void *ptr, unsigned int old,
115                                            unsigned int new, unsigned int size)
116 {
117         unsigned int shift;
118         u32 old32, mask, temp;
119         volatile u32 *ptr32;
120 
121         /* Mask inputs to the correct size. */
122         mask = GENMASK((size * BITS_PER_BYTE) - 1, 0);
123         old &= mask;
124         new &= mask;
125 
126         /*
127          * Calculate a shift & mask that correspond to the value we wish to
128          * compare & exchange within the naturally aligned 4 byte integer
129          * that includes it.
130          */
131         shift = (unsigned long)ptr & 0x3;
132         shift *= BITS_PER_BYTE;
133         old <<= shift;
134         new <<= shift;
135         mask <<= shift;
136 
137         /*
138          * Calculate a pointer to the naturally aligned 4 byte integer that
139          * includes our byte of interest, and load its value.
140          */
141         ptr32 = (volatile u32 *)((unsigned long)ptr & ~0x3);
142 
143         asm volatile (
144         "1:     ll.w            %0, %3          \n"
145         "       and             %1, %0, %z4     \n"
146         "       bne             %1, %z5, 2f     \n"
147         "       andn            %1, %0, %z4     \n"
148         "       or              %1, %1, %z6     \n"
149         "       sc.w            %1, %2          \n"
150         "       beqz            %1, 1b          \n"
151         "       b               3f              \n"
152         "2:                                     \n"
153         __WEAK_LLSC_MB
154         "3:                                     \n"
155         : "=&r" (old32), "=&r" (temp), "=ZC" (*ptr32)
156         : "ZC" (*ptr32), "Jr" (mask), "Jr" (old), "Jr" (new)
157         : "memory");
158 
159         return (old32 & mask) >> shift;
160 }
161 
162 static __always_inline unsigned long
163 __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, unsigned int size)
164 {
165         switch (size) {
166         case 1:
167         case 2:
168                 return __cmpxchg_small(ptr, old, new, size);
169 
170         case 4:
171                 return __cmpxchg_asm("ll.w", "sc.w", (volatile u32 *)ptr,
172                                      (u32)old, new);
173 
174         case 8:
175                 return __cmpxchg_asm("ll.d", "sc.d", (volatile u64 *)ptr,
176                                      (u64)old, new);
177 
178         default:
179                 BUILD_BUG();
180         }
181 
182         return 0;
183 }
184 
185 #define arch_cmpxchg_local(ptr, old, new)                               \
186         ((__typeof__(*(ptr)))                                           \
187                 __cmpxchg((ptr),                                        \
188                           (unsigned long)(__typeof__(*(ptr)))(old),     \
189                           (unsigned long)(__typeof__(*(ptr)))(new),     \
190                           sizeof(*(ptr))))
191 
192 #define arch_cmpxchg(ptr, old, new)                                     \
193 ({                                                                      \
194         __typeof__(*(ptr)) __res;                                       \
195                                                                         \
196         __res = arch_cmpxchg_local((ptr), (old), (new));                \
197                                                                         \
198         __res;                                                          \
199 })
200 
201 #ifdef CONFIG_64BIT
202 #define arch_cmpxchg64_local(ptr, o, n)                                 \
203   ({                                                                    \
204         BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
205         arch_cmpxchg_local((ptr), (o), (n));                            \
206   })
207 
208 #define arch_cmpxchg64(ptr, o, n)                                       \
209   ({                                                                    \
210         BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
211         arch_cmpxchg((ptr), (o), (n));                                  \
212   })
213 #else
214 #include <asm-generic/cmpxchg-local.h>
215 #define arch_cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
216 #define arch_cmpxchg64(ptr, o, n) arch_cmpxchg64_local((ptr), (o), (n))
217 #endif
218 
219 #endif /* __ASM_CMPXCHG_H */
220 

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php