~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/arch/sh/include/asm/atomic-llsc.h

Version: ~ [ linux-6.11.5 ] ~ [ linux-6.10.14 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.58 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.114 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.169 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.228 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.284 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.322 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.9 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

  1 /* SPDX-License-Identifier: GPL-2.0 */
  2 #ifndef __ASM_SH_ATOMIC_LLSC_H
  3 #define __ASM_SH_ATOMIC_LLSC_H
  4 
  5 /*
  6  * SH-4A note:
  7  *
  8  * We basically get atomic_xxx_return() for free compared with
  9  * atomic_xxx(). movli.l/movco.l require r0 due to the instruction
 10  * encoding, so the retval is automatically set without having to
 11  * do any special work.
 12  */
 13 /*
 14  * To get proper branch prediction for the main line, we must branch
 15  * forward to code at the end of this object's .text section, then
 16  * branch back to restart the operation.
 17  */
 18 
 19 #define ATOMIC_OP(op)                                                   \
 20 static inline void arch_atomic_##op(int i, atomic_t *v)                 \
 21 {                                                                       \
 22         unsigned long tmp;                                              \
 23                                                                         \
 24         __asm__ __volatile__ (                                          \
 25 "1:     movli.l @%2, %0         ! atomic_" #op "\n"                     \
 26 "       " #op " %1, %0                          \n"                     \
 27 "       movco.l %0, @%2                         \n"                     \
 28 "       bf      1b                              \n"                     \
 29         : "=&z" (tmp)                                                   \
 30         : "r" (i), "r" (&v->counter)                                    \
 31         : "t");                                                         \
 32 }
 33 
 34 #define ATOMIC_OP_RETURN(op)                                            \
 35 static inline int arch_atomic_##op##_return(int i, atomic_t *v)         \
 36 {                                                                       \
 37         unsigned long temp;                                             \
 38                                                                         \
 39         __asm__ __volatile__ (                                          \
 40 "1:     movli.l @%2, %0         ! atomic_" #op "_return \n"             \
 41 "       " #op " %1, %0                                  \n"             \
 42 "       movco.l %0, @%2                                 \n"             \
 43 "       bf      1b                                      \n"             \
 44 "       synco                                           \n"             \
 45         : "=&z" (temp)                                                  \
 46         : "r" (i), "r" (&v->counter)                                    \
 47         : "t");                                                         \
 48                                                                         \
 49         return temp;                                                    \
 50 }
 51 
 52 #define ATOMIC_FETCH_OP(op)                                             \
 53 static inline int arch_atomic_fetch_##op(int i, atomic_t *v)            \
 54 {                                                                       \
 55         unsigned long res, temp;                                        \
 56                                                                         \
 57         __asm__ __volatile__ (                                          \
 58 "1:     movli.l @%3, %0         ! atomic_fetch_" #op "  \n"             \
 59 "       mov %0, %1                                      \n"             \
 60 "       " #op " %2, %0                                  \n"             \
 61 "       movco.l %0, @%3                                 \n"             \
 62 "       bf      1b                                      \n"             \
 63 "       synco                                           \n"             \
 64         : "=&z" (temp), "=&r" (res)                                     \
 65         : "r" (i), "r" (&v->counter)                                    \
 66         : "t");                                                         \
 67                                                                         \
 68         return res;                                                     \
 69 }
 70 
 71 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
 72 
 73 ATOMIC_OPS(add)
 74 ATOMIC_OPS(sub)
 75 
 76 #define arch_atomic_add_return  arch_atomic_add_return
 77 #define arch_atomic_sub_return  arch_atomic_sub_return
 78 #define arch_atomic_fetch_add   arch_atomic_fetch_add
 79 #define arch_atomic_fetch_sub   arch_atomic_fetch_sub
 80 
 81 #undef ATOMIC_OPS
 82 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
 83 
 84 ATOMIC_OPS(and)
 85 ATOMIC_OPS(or)
 86 ATOMIC_OPS(xor)
 87 
 88 #define arch_atomic_fetch_and   arch_atomic_fetch_and
 89 #define arch_atomic_fetch_or    arch_atomic_fetch_or
 90 #define arch_atomic_fetch_xor   arch_atomic_fetch_xor
 91 
 92 #undef ATOMIC_OPS
 93 #undef ATOMIC_FETCH_OP
 94 #undef ATOMIC_OP_RETURN
 95 #undef ATOMIC_OP
 96 
 97 #endif /* __ASM_SH_ATOMIC_LLSC_H */
 98 

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php