~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/arch/xtensa/include/asm/bitops.h

Version: ~ [ linux-6.11.5 ] ~ [ linux-6.10.14 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.58 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.114 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.169 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.228 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.284 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.322 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.9 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

  1 /*
  2  * include/asm-xtensa/bitops.h
  3  *
  4  * Atomic operations that C can't guarantee us.Useful for resource counting etc.
  5  *
  6  * This file is subject to the terms and conditions of the GNU General Public
  7  * License.  See the file "COPYING" in the main directory of this archive
  8  * for more details.
  9  *
 10  * Copyright (C) 2001 - 2007 Tensilica Inc.
 11  */
 12 
 13 #ifndef _XTENSA_BITOPS_H
 14 #define _XTENSA_BITOPS_H
 15 
 16 #ifndef _LINUX_BITOPS_H
 17 #error only <linux/bitops.h> can be included directly
 18 #endif
 19 
 20 #include <asm/processor.h>
 21 #include <asm/byteorder.h>
 22 #include <asm/barrier.h>
 23 
 24 #include <asm-generic/bitops/non-atomic.h>
 25 
 26 #if XCHAL_HAVE_NSA
 27 
 28 static inline unsigned long __cntlz (unsigned long x)
 29 {
 30         int lz;
 31         asm ("nsau %0, %1" : "=r" (lz) : "r" (x));
 32         return lz;
 33 }
 34 
 35 /*
 36  * ffz: Find first zero in word. Undefined if no zero exists.
 37  * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
 38  */
 39 
 40 static inline int ffz(unsigned long x)
 41 {
 42         return 31 - __cntlz(~x & -~x);
 43 }
 44 
 45 /*
 46  * __ffs: Find first bit set in word. Return 0 for bit 0
 47  */
 48 
 49 static inline unsigned long __ffs(unsigned long x)
 50 {
 51         return 31 - __cntlz(x & -x);
 52 }
 53 
 54 /*
 55  * ffs: Find first bit set in word. This is defined the same way as
 56  * the libc and compiler builtin ffs routines, therefore
 57  * differs in spirit from the above ffz (man ffs).
 58  */
 59 
 60 static inline int ffs(unsigned long x)
 61 {
 62         return 32 - __cntlz(x & -x);
 63 }
 64 
 65 /*
 66  * fls: Find last (most-significant) bit set in word.
 67  * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
 68  */
 69 
 70 static inline int fls (unsigned int x)
 71 {
 72         return 32 - __cntlz(x);
 73 }
 74 
 75 /**
 76  * __fls - find last (most-significant) set bit in a long word
 77  * @word: the word to search
 78  *
 79  * Undefined if no set bit exists, so code should check against 0 first.
 80  */
 81 static inline unsigned long __fls(unsigned long word)
 82 {
 83         return 31 - __cntlz(word);
 84 }
 85 #else
 86 
 87 /* Use the generic implementation if we don't have the nsa/nsau instructions. */
 88 
 89 # include <asm-generic/bitops/ffs.h>
 90 # include <asm-generic/bitops/__ffs.h>
 91 # include <asm-generic/bitops/ffz.h>
 92 # include <asm-generic/bitops/fls.h>
 93 # include <asm-generic/bitops/__fls.h>
 94 
 95 #endif
 96 
 97 #include <asm-generic/bitops/fls64.h>
 98 
 99 #if XCHAL_HAVE_EXCLUSIVE
100 
101 #define BIT_OP(op, insn, inv)                                           \
102 static inline void arch_##op##_bit(unsigned int bit, volatile unsigned long *p)\
103 {                                                                       \
104         unsigned long tmp;                                              \
105         unsigned long mask = 1UL << (bit & 31);                         \
106                                                                         \
107         p += bit >> 5;                                                  \
108                                                                         \
109         __asm__ __volatile__(                                           \
110                         "1:     l32ex   %[tmp], %[addr]\n"              \
111                         "      "insn"   %[tmp], %[tmp], %[mask]\n"      \
112                         "       s32ex   %[tmp], %[addr]\n"              \
113                         "       getex   %[tmp]\n"                       \
114                         "       beqz    %[tmp], 1b\n"                   \
115                         : [tmp] "=&a" (tmp)                             \
116                         : [mask] "a" (inv mask), [addr] "a" (p)         \
117                         : "memory");                                    \
118 }
119 
120 #define TEST_AND_BIT_OP(op, insn, inv)                                  \
121 static inline int                                                       \
122 arch_test_and_##op##_bit(unsigned int bit, volatile unsigned long *p)   \
123 {                                                                       \
124         unsigned long tmp, value;                                       \
125         unsigned long mask = 1UL << (bit & 31);                         \
126                                                                         \
127         p += bit >> 5;                                                  \
128                                                                         \
129         __asm__ __volatile__(                                           \
130                         "1:     l32ex   %[value], %[addr]\n"            \
131                         "      "insn"   %[tmp], %[value], %[mask]\n"    \
132                         "       s32ex   %[tmp], %[addr]\n"              \
133                         "       getex   %[tmp]\n"                       \
134                         "       beqz    %[tmp], 1b\n"                   \
135                         : [tmp] "=&a" (tmp), [value] "=&a" (value)      \
136                         : [mask] "a" (inv mask), [addr] "a" (p)         \
137                         : "memory");                                    \
138                                                                         \
139         return value & mask;                                            \
140 }
141 
142 #elif XCHAL_HAVE_S32C1I
143 
144 #define BIT_OP(op, insn, inv)                                           \
145 static inline void arch_##op##_bit(unsigned int bit, volatile unsigned long *p)\
146 {                                                                       \
147         unsigned long tmp, value;                                       \
148         unsigned long mask = 1UL << (bit & 31);                         \
149                                                                         \
150         p += bit >> 5;                                                  \
151                                                                         \
152         __asm__ __volatile__(                                           \
153                         "1:     l32i    %[value], %[mem]\n"             \
154                         "       wsr     %[value], scompare1\n"          \
155                         "      "insn"   %[tmp], %[value], %[mask]\n"    \
156                         "       s32c1i  %[tmp], %[mem]\n"               \
157                         "       bne     %[tmp], %[value], 1b\n"         \
158                         : [tmp] "=&a" (tmp), [value] "=&a" (value),     \
159                           [mem] "+m" (*p)                               \
160                         : [mask] "a" (inv mask)                         \
161                         : "memory");                                    \
162 }
163 
164 #define TEST_AND_BIT_OP(op, insn, inv)                                  \
165 static inline int                                                       \
166 arch_test_and_##op##_bit(unsigned int bit, volatile unsigned long *p)   \
167 {                                                                       \
168         unsigned long tmp, value;                                       \
169         unsigned long mask = 1UL << (bit & 31);                         \
170                                                                         \
171         p += bit >> 5;                                                  \
172                                                                         \
173         __asm__ __volatile__(                                           \
174                         "1:     l32i    %[value], %[mem]\n"             \
175                         "       wsr     %[value], scompare1\n"          \
176                         "      "insn"   %[tmp], %[value], %[mask]\n"    \
177                         "       s32c1i  %[tmp], %[mem]\n"               \
178                         "       bne     %[tmp], %[value], 1b\n"         \
179                         : [tmp] "=&a" (tmp), [value] "=&a" (value),     \
180                           [mem] "+m" (*p)                               \
181                         : [mask] "a" (inv mask)                         \
182                         : "memory");                                    \
183                                                                         \
184         return tmp & mask;                                              \
185 }
186 
187 #else
188 
189 #define BIT_OP(op, insn, inv)
190 #define TEST_AND_BIT_OP(op, insn, inv)
191 
192 #include <asm-generic/bitops/atomic.h>
193 
194 #endif /* XCHAL_HAVE_S32C1I */
195 
196 #define BIT_OPS(op, insn, inv)          \
197         BIT_OP(op, insn, inv)           \
198         TEST_AND_BIT_OP(op, insn, inv)
199 
200 BIT_OPS(set, "or", )
201 BIT_OPS(clear, "and", ~)
202 BIT_OPS(change, "xor", )
203 
204 #undef BIT_OPS
205 #undef BIT_OP
206 #undef TEST_AND_BIT_OP
207 
208 #include <asm-generic/bitops/instrumented-atomic.h>
209 
210 #include <asm-generic/bitops/le.h>
211 
212 #include <asm-generic/bitops/ext2-atomic-setbit.h>
213 
214 #include <asm-generic/bitops/hweight.h>
215 #include <asm-generic/bitops/lock.h>
216 #include <asm-generic/bitops/sched.h>
217 
218 #endif  /* _XTENSA_BITOPS_H */
219 

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php