~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/arch/alpha/include/asm/bitops.h

Version: ~ [ linux-6.12-rc7 ] ~ [ linux-6.11.7 ] ~ [ linux-6.10.14 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.60 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.116 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.171 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.229 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.285 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.323 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.12 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

Diff markup

Differences between /arch/alpha/include/asm/bitops.h (Architecture sparc64) and /arch/mips/include/asm-mips/bitops.h (Architecture mips)


  1 /* SPDX-License-Identifier: GPL-2.0 */              1 
  2 #ifndef _ALPHA_BITOPS_H                           
  3 #define _ALPHA_BITOPS_H                           
  4                                                   
  5 #ifndef _LINUX_BITOPS_H                           
  6 #error only <linux/bitops.h> can be included d    
  7 #endif                                            
  8                                                   
  9 #include <asm/compiler.h>                         
 10 #include <asm/barrier.h>                          
 11                                                   
 12 /*                                                
 13  * Copyright 1994, Linus Torvalds.                
 14  */                                               
 15                                                   
 16 /*                                                
 17  * These have to be done with inline assembly:    
 18  * is guaranteed to be atomic. All bit operati    
 19  * was cleared before the operation and != 0 i    
 20  *                                                
 21  * To get proper branch prediction for the mai    
 22  * forward to code at the end of this object's    
 23  * branch back to restart the operation.          
 24  *                                                
 25  * bit 0 is the LSB of addr; bit 64 is the LSB    
 26  */                                               
 27                                                   
 28 static inline void                                
 29 set_bit(unsigned long nr, volatile void * addr    
 30 {                                                 
 31         unsigned long temp;                       
 32         int *m = ((int *) addr) + (nr >> 5);      
 33                                                   
 34         __asm__ __volatile__(                     
 35         "1:     ldl_l %0,%3\n"                    
 36         "       bis %0,%2,%0\n"                   
 37         "       stl_c %0,%1\n"                    
 38         "       beq %0,2f\n"                      
 39         ".subsection 2\n"                         
 40         "2:     br 1b\n"                          
 41         ".previous"                               
 42         :"=&r" (temp), "=m" (*m)                  
 43         :"Ir" (1UL << (nr & 31)), "m" (*m));      
 44 }                                                 
 45                                                   
 46 /*                                                
 47  * WARNING: non atomic version.                   
 48  */                                               
 49 static __always_inline void                       
 50 arch___set_bit(unsigned long nr, volatile unsi    
 51 {                                                 
 52         int *m = ((int *) addr) + (nr >> 5);      
 53                                                   
 54         *m |= 1 << (nr & 31);                     
 55 }                                                 
 56                                                   
 57 static inline void                                
 58 clear_bit(unsigned long nr, volatile void * ad    
 59 {                                                 
 60         unsigned long temp;                       
 61         int *m = ((int *) addr) + (nr >> 5);      
 62                                                   
 63         __asm__ __volatile__(                     
 64         "1:     ldl_l %0,%3\n"                    
 65         "       bic %0,%2,%0\n"                   
 66         "       stl_c %0,%1\n"                    
 67         "       beq %0,2f\n"                      
 68         ".subsection 2\n"                         
 69         "2:     br 1b\n"                          
 70         ".previous"                               
 71         :"=&r" (temp), "=m" (*m)                  
 72         :"Ir" (1UL << (nr & 31)), "m" (*m));      
 73 }                                                 
 74                                                   
 75 static inline void                                
 76 clear_bit_unlock(unsigned long nr, volatile vo    
 77 {                                                 
 78         smp_mb();                                 
 79         clear_bit(nr, addr);                      
 80 }                                                 
 81                                                   
 82 /*                                                
 83  * WARNING: non atomic version.                   
 84  */                                               
 85 static __always_inline void                       
 86 arch___clear_bit(unsigned long nr, volatile un    
 87 {                                                 
 88         int *m = ((int *) addr) + (nr >> 5);      
 89                                                   
 90         *m &= ~(1 << (nr & 31));                  
 91 }                                                 
 92                                                   
 93 static inline void                                
 94 __clear_bit_unlock(unsigned long nr, volatile     
 95 {                                                 
 96         smp_mb();                                 
 97         arch___clear_bit(nr, addr);               
 98 }                                                 
 99                                                   
100 static inline void                                
101 change_bit(unsigned long nr, volatile void * a    
102 {                                                 
103         unsigned long temp;                       
104         int *m = ((int *) addr) + (nr >> 5);      
105                                                   
106         __asm__ __volatile__(                     
107         "1:     ldl_l %0,%3\n"                    
108         "       xor %0,%2,%0\n"                   
109         "       stl_c %0,%1\n"                    
110         "       beq %0,2f\n"                      
111         ".subsection 2\n"                         
112         "2:     br 1b\n"                          
113         ".previous"                               
114         :"=&r" (temp), "=m" (*m)                  
115         :"Ir" (1UL << (nr & 31)), "m" (*m));      
116 }                                                 
117                                                   
118 /*                                                
119  * WARNING: non atomic version.                   
120  */                                               
121 static __always_inline void                       
122 arch___change_bit(unsigned long nr, volatile u    
123 {                                                 
124         int *m = ((int *) addr) + (nr >> 5);      
125                                                   
126         *m ^= 1 << (nr & 31);                     
127 }                                                 
128                                                   
129 static inline int                                 
130 test_and_set_bit(unsigned long nr, volatile vo    
131 {                                                 
132         unsigned long oldbit;                     
133         unsigned long temp;                       
134         int *m = ((int *) addr) + (nr >> 5);      
135                                                   
136         __asm__ __volatile__(                     
137 #ifdef CONFIG_SMP                                 
138         "       mb\n"                             
139 #endif                                            
140         "1:     ldl_l %0,%4\n"                    
141         "       and %0,%3,%2\n"                   
142         "       bne %2,2f\n"                      
143         "       xor %0,%3,%0\n"                   
144         "       stl_c %0,%1\n"                    
145         "       beq %0,3f\n"                      
146         "2:\n"                                    
147 #ifdef CONFIG_SMP                                 
148         "       mb\n"                             
149 #endif                                            
150         ".subsection 2\n"                         
151         "3:     br 1b\n"                          
152         ".previous"                               
153         :"=&r" (temp), "=m" (*m), "=&r" (oldbi    
154         :"Ir" (1UL << (nr & 31)), "m" (*m) : "    
155                                                   
156         return oldbit != 0;                       
157 }                                                 
158                                                   
159 static inline int                                 
160 test_and_set_bit_lock(unsigned long nr, volati    
161 {                                                 
162         unsigned long oldbit;                     
163         unsigned long temp;                       
164         int *m = ((int *) addr) + (nr >> 5);      
165                                                   
166         __asm__ __volatile__(                     
167         "1:     ldl_l %0,%4\n"                    
168         "       and %0,%3,%2\n"                   
169         "       bne %2,2f\n"                      
170         "       xor %0,%3,%0\n"                   
171         "       stl_c %0,%1\n"                    
172         "       beq %0,3f\n"                      
173         "2:\n"                                    
174 #ifdef CONFIG_SMP                                 
175         "       mb\n"                             
176 #endif                                            
177         ".subsection 2\n"                         
178         "3:     br 1b\n"                          
179         ".previous"                               
180         :"=&r" (temp), "=m" (*m), "=&r" (oldbi    
181         :"Ir" (1UL << (nr & 31)), "m" (*m) : "    
182                                                   
183         return oldbit != 0;                       
184 }                                                 
185                                                   
186 /*                                                
187  * WARNING: non atomic version.                   
188  */                                               
189 static __always_inline bool                       
190 arch___test_and_set_bit(unsigned long nr, vola    
191 {                                                 
192         unsigned long mask = 1 << (nr & 0x1f);    
193         int *m = ((int *) addr) + (nr >> 5);      
194         int old = *m;                             
195                                                   
196         *m = old | mask;                          
197         return (old & mask) != 0;                 
198 }                                                 
199                                                   
200 static inline int                                 
201 test_and_clear_bit(unsigned long nr, volatile     
202 {                                                 
203         unsigned long oldbit;                     
204         unsigned long temp;                       
205         int *m = ((int *) addr) + (nr >> 5);      
206                                                   
207         __asm__ __volatile__(                     
208 #ifdef CONFIG_SMP                                 
209         "       mb\n"                             
210 #endif                                            
211         "1:     ldl_l %0,%4\n"                    
212         "       and %0,%3,%2\n"                   
213         "       beq %2,2f\n"                      
214         "       xor %0,%3,%0\n"                   
215         "       stl_c %0,%1\n"                    
216         "       beq %0,3f\n"                      
217         "2:\n"                                    
218 #ifdef CONFIG_SMP                                 
219         "       mb\n"                             
220 #endif                                            
221         ".subsection 2\n"                         
222         "3:     br 1b\n"                          
223         ".previous"                               
224         :"=&r" (temp), "=m" (*m), "=&r" (oldbi    
225         :"Ir" (1UL << (nr & 31)), "m" (*m) : "    
226                                                   
227         return oldbit != 0;                       
228 }                                                 
229                                                   
230 /*                                                
231  * WARNING: non atomic version.                   
232  */                                               
233 static __always_inline bool                       
234 arch___test_and_clear_bit(unsigned long nr, vo    
235 {                                                 
236         unsigned long mask = 1 << (nr & 0x1f);    
237         int *m = ((int *) addr) + (nr >> 5);      
238         int old = *m;                             
239                                                   
240         *m = old & ~mask;                         
241         return (old & mask) != 0;                 
242 }                                                 
243                                                   
244 static inline int                                 
245 test_and_change_bit(unsigned long nr, volatile    
246 {                                                 
247         unsigned long oldbit;                     
248         unsigned long temp;                       
249         int *m = ((int *) addr) + (nr >> 5);      
250                                                   
251         __asm__ __volatile__(                     
252 #ifdef CONFIG_SMP                                 
253         "       mb\n"                             
254 #endif                                            
255         "1:     ldl_l %0,%4\n"                    
256         "       and %0,%3,%2\n"                   
257         "       xor %0,%3,%0\n"                   
258         "       stl_c %0,%1\n"                    
259         "       beq %0,3f\n"                      
260 #ifdef CONFIG_SMP                                 
261         "       mb\n"                             
262 #endif                                            
263         ".subsection 2\n"                         
264         "3:     br 1b\n"                          
265         ".previous"                               
266         :"=&r" (temp), "=m" (*m), "=&r" (oldbi    
267         :"Ir" (1UL << (nr & 31)), "m" (*m) : "    
268                                                   
269         return oldbit != 0;                       
270 }                                                 
271                                                   
272 /*                                                
273  * WARNING: non atomic version.                   
274  */                                               
275 static __always_inline bool                       
276 arch___test_and_change_bit(unsigned long nr, v    
277 {                                                 
278         unsigned long mask = 1 << (nr & 0x1f);    
279         int *m = ((int *) addr) + (nr >> 5);      
280         int old = *m;                             
281                                                   
282         *m = old ^ mask;                          
283         return (old & mask) != 0;                 
284 }                                                 
285                                                   
286 #define arch_test_bit generic_test_bit            
287 #define arch_test_bit_acquire generic_test_bit    
288                                                   
289 static inline bool xor_unlock_is_negative_byte    
290                 volatile unsigned long *p)        
291 {                                                 
292         unsigned long temp, old;                  
293                                                   
294         __asm__ __volatile__(                     
295         "1:     ldl_l %0,%4\n"                    
296         "       mov %0,%2\n"                      
297         "       xor %0,%3,%0\n"                   
298         "       stl_c %0,%1\n"                    
299         "       beq %0,2f\n"                      
300         ".subsection 2\n"                         
301         "2:     br 1b\n"                          
302         ".previous"                               
303         :"=&r" (temp), "=m" (*p), "=&r" (old)     
304         :"Ir" (mask), "m" (*p));                  
305                                                   
306         return (old & BIT(7)) != 0;               
307 }                                                 
308                                                   
309 /*                                                
310  * ffz = Find First Zero in word. Undefined if    
311  * so code should check against ~0UL first..      
312  *                                                
313  * Do a binary search on the bits.  Due to the    
314  * constants on the alpha, it is worthwhile to    
315  */                                               
316 static inline unsigned long ffz_b(unsigned lon    
317 {                                                 
318         unsigned long sum, x1, x2, x4;            
319                                                   
320         x = ~x & -~x;           /* set first 0    
321         x1 = x & 0xAA;                            
322         x2 = x & 0xCC;                            
323         x4 = x & 0xF0;                            
324         sum = x2 ? 2 : 0;                         
325         sum += (x4 != 0) * 4;                     
326         sum += (x1 != 0);                         
327                                                   
328         return sum;                               
329 }                                                 
330                                                   
331 static inline unsigned long ffz(unsigned long     
332 {                                                 
333 #if defined(CONFIG_ALPHA_EV6) && defined(CONFI    
334         /* Whee.  EV67 can calculate it direct    
335         return __kernel_cttz(~word);              
336 #else                                             
337         unsigned long bits, qofs, bofs;           
338                                                   
339         bits = __kernel_cmpbge(word, ~0UL);       
340         qofs = ffz_b(bits);                       
341         bits = __kernel_extbl(word, qofs);        
342         bofs = ffz_b(bits);                       
343                                                   
344         return qofs*8 + bofs;                     
345 #endif                                            
346 }                                                 
347                                                   
348 /*                                                
349  * __ffs = Find First set bit in word.  Undefi    
350  */                                               
351 static inline unsigned long __ffs(unsigned lon    
352 {                                                 
353 #if defined(CONFIG_ALPHA_EV6) && defined(CONFI    
354         /* Whee.  EV67 can calculate it direct    
355         return __kernel_cttz(word);               
356 #else                                             
357         unsigned long bits, qofs, bofs;           
358                                                   
359         bits = __kernel_cmpbge(0, word);          
360         qofs = ffz_b(bits);                       
361         bits = __kernel_extbl(word, qofs);        
362         bofs = ffz_b(~bits);                      
363                                                   
364         return qofs*8 + bofs;                     
365 #endif                                            
366 }                                                 
367                                                   
368 #ifdef __KERNEL__                                 
369                                                   
370 /*                                                
371  * ffs: find first bit set. This is defined th    
372  * the libc and compiler builtin ffs routines,    
373  * differs in spirit from the above __ffs.        
374  */                                               
375                                                   
376 static inline int ffs(int word)                   
377 {                                                 
378         int result = __ffs(word) + 1;             
379         return word ? result : 0;                 
380 }                                                 
381                                                   
382 /*                                                
383  * fls: find last bit set.                        
384  */                                               
385 #if defined(CONFIG_ALPHA_EV6) && defined(CONFI    
386 static inline int fls64(unsigned long word)       
387 {                                                 
388         return 64 - __kernel_ctlz(word);          
389 }                                                 
390 #else                                             
391 extern const unsigned char __flsm1_tab[256];      
392                                                   
393 static inline int fls64(unsigned long x)          
394 {                                                 
395         unsigned long t, a, r;                    
396                                                   
397         t = __kernel_cmpbge (x, 0x010101010101    
398         a = __flsm1_tab[t];                       
399         t = __kernel_extbl (x, a);                
400         r = a*8 + __flsm1_tab[t] + (x != 0);      
401                                                   
402         return r;                                 
403 }                                                 
404 #endif                                            
405                                                   
406 static inline unsigned long __fls(unsigned lon    
407 {                                                 
408         return fls64(x) - 1;                      
409 }                                                 
410                                                   
411 static inline int fls(unsigned int x)             
412 {                                                 
413         return fls64(x);                          
414 }                                                 
415                                                   
416 /*                                                
417  * hweightN: returns the hamming weight (i.e.     
418  * of bits set) of a N-bit word                   
419  */                                               
420                                                   
421 #if defined(CONFIG_ALPHA_EV6) && defined(CONFI    
422 /* Whee.  EV67 can calculate it directly.  */     
423 static inline unsigned long __arch_hweight64(u    
424 {                                                 
425         return __kernel_ctpop(w);                 
426 }                                                 
427                                                   
428 static inline unsigned int __arch_hweight32(un    
429 {                                                 
430         return __arch_hweight64(w);               
431 }                                                 
432                                                   
433 static inline unsigned int __arch_hweight16(un    
434 {                                                 
435         return __arch_hweight64(w & 0xffff);      
436 }                                                 
437                                                   
438 static inline unsigned int __arch_hweight8(uns    
439 {                                                 
440         return __arch_hweight64(w & 0xff);        
441 }                                                 
442 #else                                             
443 #include <asm-generic/bitops/arch_hweight.h>      
444 #endif                                            
445                                                   
446 #include <asm-generic/bitops/const_hweight.h>     
447                                                   
448 #endif /* __KERNEL__ */                           
449                                                   
450 #ifdef __KERNEL__                                 
451                                                   
452 /*                                                
453  * Every architecture must define this functio    
454  * way of searching a 100-bit bitmap.  It's gu    
455  * one of the 100 bits is cleared.                
456  */                                               
457 static inline unsigned long                       
458 sched_find_first_bit(const unsigned long b[2])    
459 {                                                 
460         unsigned long b0, b1, ofs, tmp;           
461                                                   
462         b0 = b[0];                                
463         b1 = b[1];                                
464         ofs = (b0 ? 0 : 64);                      
465         tmp = (b0 ? b0 : b1);                     
466                                                   
467         return __ffs(tmp) + ofs;                  
468 }                                                 
469                                                   
470 #include <asm-generic/bitops/non-instrumented-    
471                                                   
472 #include <asm-generic/bitops/le.h>                
473                                                   
474 #include <asm-generic/bitops/ext2-atomic-setbi    
475                                                   
476 #endif /* __KERNEL__ */                           
477                                                   
478 #endif /* _ALPHA_BITOPS_H */                      
479                                                   

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php