~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/arch/s390/include/asm/cmpxchg.h

Version: ~ [ linux-6.11-rc3 ] ~ [ linux-6.10.4 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.45 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.104 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.164 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.223 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.281 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.319 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.9 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

  1 /* SPDX-License-Identifier: GPL-2.0 */
  2 /*
  3  * Copyright IBM Corp. 1999, 2011
  4  *
  5  * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
  6  */
  7 
  8 #ifndef __ASM_CMPXCHG_H
  9 #define __ASM_CMPXCHG_H
 10 
 11 #include <linux/mmdebug.h>
 12 #include <linux/types.h>
 13 #include <linux/bug.h>
 14 
 15 void __xchg_called_with_bad_pointer(void);
 16 
 17 static __always_inline unsigned long
 18 __arch_xchg(unsigned long x, unsigned long address, int size)
 19 {
 20         unsigned long old;
 21         int shift;
 22 
 23         switch (size) {
 24         case 1:
 25                 shift = (3 ^ (address & 3)) << 3;
 26                 address ^= address & 3;
 27                 asm volatile(
 28                         "       l       %0,%1\n"
 29                         "0:     lr      0,%0\n"
 30                         "       nr      0,%3\n"
 31                         "       or      0,%2\n"
 32                         "       cs      %0,0,%1\n"
 33                         "       jl      0b\n"
 34                         : "=&d" (old), "+Q" (*(int *) address)
 35                         : "d" ((x & 0xff) << shift), "d" (~(0xff << shift))
 36                         : "memory", "cc", "");
 37                 return old >> shift;
 38         case 2:
 39                 shift = (2 ^ (address & 2)) << 3;
 40                 address ^= address & 2;
 41                 asm volatile(
 42                         "       l       %0,%1\n"
 43                         "0:     lr      0,%0\n"
 44                         "       nr      0,%3\n"
 45                         "       or      0,%2\n"
 46                         "       cs      %0,0,%1\n"
 47                         "       jl      0b\n"
 48                         : "=&d" (old), "+Q" (*(int *) address)
 49                         : "d" ((x & 0xffff) << shift), "d" (~(0xffff << shift))
 50                         : "memory", "cc", "");
 51                 return old >> shift;
 52         case 4:
 53                 asm volatile(
 54                         "       l       %0,%1\n"
 55                         "0:     cs      %0,%2,%1\n"
 56                         "       jl      0b\n"
 57                         : "=&d" (old), "+Q" (*(int *) address)
 58                         : "d" (x)
 59                         : "memory", "cc");
 60                 return old;
 61         case 8:
 62                 asm volatile(
 63                         "       lg      %0,%1\n"
 64                         "0:     csg     %0,%2,%1\n"
 65                         "       jl      0b\n"
 66                         : "=&d" (old), "+QS" (*(long *) address)
 67                         : "d" (x)
 68                         : "memory", "cc");
 69                 return old;
 70         }
 71         __xchg_called_with_bad_pointer();
 72         return x;
 73 }
 74 
 75 #define arch_xchg(ptr, x)                                               \
 76 ({                                                                      \
 77         __typeof__(*(ptr)) __ret;                                       \
 78                                                                         \
 79         __ret = (__typeof__(*(ptr)))                                    \
 80                 __arch_xchg((unsigned long)(x), (unsigned long)(ptr),   \
 81                             sizeof(*(ptr)));                            \
 82         __ret;                                                          \
 83 })
 84 
 85 void __cmpxchg_called_with_bad_pointer(void);
 86 
 87 static __always_inline unsigned long __cmpxchg(unsigned long address,
 88                                                unsigned long old,
 89                                                unsigned long new, int size)
 90 {
 91         switch (size) {
 92         case 1: {
 93                 unsigned int prev, shift, mask;
 94 
 95                 shift = (3 ^ (address & 3)) << 3;
 96                 address ^= address & 3;
 97                 old = (old & 0xff) << shift;
 98                 new = (new & 0xff) << shift;
 99                 mask = ~(0xff << shift);
100                 asm volatile(
101                         "       l       %[prev],%[address]\n"
102                         "       nr      %[prev],%[mask]\n"
103                         "       xilf    %[mask],0xffffffff\n"
104                         "       or      %[new],%[prev]\n"
105                         "       or      %[prev],%[tmp]\n"
106                         "0:     lr      %[tmp],%[prev]\n"
107                         "       cs      %[prev],%[new],%[address]\n"
108                         "       jnl     1f\n"
109                         "       xr      %[tmp],%[prev]\n"
110                         "       xr      %[new],%[tmp]\n"
111                         "       nr      %[tmp],%[mask]\n"
112                         "       jz      0b\n"
113                         "1:"
114                         : [prev] "=&d" (prev),
115                           [address] "+Q" (*(int *)address),
116                           [tmp] "+&d" (old),
117                           [new] "+&d" (new),
118                           [mask] "+&d" (mask)
119                         :: "memory", "cc");
120                 return prev >> shift;
121         }
122         case 2: {
123                 unsigned int prev, shift, mask;
124 
125                 shift = (2 ^ (address & 2)) << 3;
126                 address ^= address & 2;
127                 old = (old & 0xffff) << shift;
128                 new = (new & 0xffff) << shift;
129                 mask = ~(0xffff << shift);
130                 asm volatile(
131                         "       l       %[prev],%[address]\n"
132                         "       nr      %[prev],%[mask]\n"
133                         "       xilf    %[mask],0xffffffff\n"
134                         "       or      %[new],%[prev]\n"
135                         "       or      %[prev],%[tmp]\n"
136                         "0:     lr      %[tmp],%[prev]\n"
137                         "       cs      %[prev],%[new],%[address]\n"
138                         "       jnl     1f\n"
139                         "       xr      %[tmp],%[prev]\n"
140                         "       xr      %[new],%[tmp]\n"
141                         "       nr      %[tmp],%[mask]\n"
142                         "       jz      0b\n"
143                         "1:"
144                         : [prev] "=&d" (prev),
145                           [address] "+Q" (*(int *)address),
146                           [tmp] "+&d" (old),
147                           [new] "+&d" (new),
148                           [mask] "+&d" (mask)
149                         :: "memory", "cc");
150                 return prev >> shift;
151         }
152         case 4: {
153                 unsigned int prev = old;
154 
155                 asm volatile(
156                         "       cs      %[prev],%[new],%[address]\n"
157                         : [prev] "+&d" (prev),
158                           [address] "+Q" (*(int *)address)
159                         : [new] "d" (new)
160                         : "memory", "cc");
161                 return prev;
162         }
163         case 8: {
164                 unsigned long prev = old;
165 
166                 asm volatile(
167                         "       csg     %[prev],%[new],%[address]\n"
168                         : [prev] "+&d" (prev),
169                           [address] "+QS" (*(long *)address)
170                         : [new] "d" (new)
171                         : "memory", "cc");
172                 return prev;
173         }
174         }
175         __cmpxchg_called_with_bad_pointer();
176         return old;
177 }
178 
179 #define arch_cmpxchg(ptr, o, n)                                         \
180 ({                                                                      \
181         __typeof__(*(ptr)) __ret;                                       \
182                                                                         \
183         __ret = (__typeof__(*(ptr)))                                    \
184                 __cmpxchg((unsigned long)(ptr), (unsigned long)(o),     \
185                           (unsigned long)(n), sizeof(*(ptr)));          \
186         __ret;                                                          \
187 })
188 
189 #define arch_cmpxchg64          arch_cmpxchg
190 #define arch_cmpxchg_local      arch_cmpxchg
191 #define arch_cmpxchg64_local    arch_cmpxchg
192 
193 #define system_has_cmpxchg128()         1
194 
195 static __always_inline u128 arch_cmpxchg128(volatile u128 *ptr, u128 old, u128 new)
196 {
197         asm volatile(
198                 "       cdsg    %[old],%[new],%[ptr]\n"
199                 : [old] "+d" (old), [ptr] "+QS" (*ptr)
200                 : [new] "d" (new)
201                 : "memory", "cc");
202         return old;
203 }
204 
205 #define arch_cmpxchg128         arch_cmpxchg128
206 
207 #endif /* __ASM_CMPXCHG_H */
208 

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php