1 /* SPDX-License-Identifier: GPL-2.0 */ 1 /* SPDX-License-Identifier: GPL-2.0 */ 2 /* atomic.S: These things are too big to do in 2 /* atomic.S: These things are too big to do inline. 3 * 3 * 4 * Copyright (C) 1999, 2007 2012 David S. Mill 4 * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net) 5 */ 5 */ 6 6 7 #include <linux/export.h> 7 #include <linux/export.h> 8 #include <linux/linkage.h> 8 #include <linux/linkage.h> 9 #include <asm/asi.h> 9 #include <asm/asi.h> 10 #include <asm/backoff.h> 10 #include <asm/backoff.h> 11 11 12 .text 12 .text 13 13 14 /* Three versions of the atomic routin 14 /* Three versions of the atomic routines, one that 15 * does not return a value and does no 15 * does not return a value and does not perform 16 * memory barriers, and a two which re 16 * memory barriers, and a two which return 17 * a value, the new and old value resp 17 * a value, the new and old value resp. and does the 18 * barriers. 18 * barriers. 19 */ 19 */ 20 20 21 #define ATOMIC_OP(op) 21 #define ATOMIC_OP(op) \ 22 ENTRY(arch_atomic_##op) /* %o0 = increment, %o 22 ENTRY(arch_atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ 23 BACKOFF_SETUP(%o2); 23 BACKOFF_SETUP(%o2); \ 24 1: lduw [%o1], %g1; 24 1: lduw [%o1], %g1; \ 25 op %g1, %o0, %g7; 25 op %g1, %o0, %g7; \ 26 cas [%o1], %g1, %g7; 26 cas [%o1], %g1, %g7; \ 27 cmp %g1, %g7; 27 cmp %g1, %g7; \ 28 bne,pn %icc, BACKOFF_LABEL(2f, 1b); 28 bne,pn %icc, BACKOFF_LABEL(2f, 1b); \ 29 nop; 29 nop; \ 30 retl; 30 retl; \ 31 nop; 31 nop; \ 32 2: BACKOFF_SPIN(%o2, %o3, 1b); 32 2: BACKOFF_SPIN(%o2, %o3, 1b); \ 33 ENDPROC(arch_atomic_##op); 33 ENDPROC(arch_atomic_##op); \ 34 EXPORT_SYMBOL(arch_atomic_##op); 34 EXPORT_SYMBOL(arch_atomic_##op); 35 35 36 #define ATOMIC_OP_RETURN(op) 36 #define ATOMIC_OP_RETURN(op) \ 37 ENTRY(arch_atomic_##op##_return) /* %o0 = incr 37 ENTRY(arch_atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */\ 38 BACKOFF_SETUP(%o2); 38 BACKOFF_SETUP(%o2); \ 39 1: lduw [%o1], %g1; 39 1: lduw [%o1], %g1; \ 40 op %g1, %o0, %g7; 40 op %g1, %o0, %g7; \ 41 cas [%o1], %g1, %g7; 41 cas [%o1], %g1, %g7; \ 42 cmp %g1, %g7; 42 cmp %g1, %g7; \ 43 bne,pn %icc, BACKOFF_LABEL(2f, 1b); 43 bne,pn %icc, BACKOFF_LABEL(2f, 1b); \ 44 op %g1, %o0, %g1; 44 op %g1, %o0, %g1; \ 45 retl; 45 retl; \ 46 sra %g1, 0, %o0; 46 sra %g1, 0, %o0; \ 47 2: BACKOFF_SPIN(%o2, %o3, 1b); 47 2: BACKOFF_SPIN(%o2, %o3, 1b); \ 48 ENDPROC(arch_atomic_##op##_return); 48 ENDPROC(arch_atomic_##op##_return); \ 49 EXPORT_SYMBOL(arch_atomic_##op##_return); 49 EXPORT_SYMBOL(arch_atomic_##op##_return); 50 50 51 #define ATOMIC_FETCH_OP(op) 51 #define ATOMIC_FETCH_OP(op) \ 52 ENTRY(arch_atomic_fetch_##op) /* %o0 = increme 52 ENTRY(arch_atomic_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ 53 BACKOFF_SETUP(%o2); 53 BACKOFF_SETUP(%o2); \ 54 1: lduw [%o1], %g1; 54 1: lduw [%o1], %g1; \ 55 op %g1, %o0, %g7; 55 op %g1, %o0, %g7; \ 56 cas [%o1], %g1, %g7; 56 cas [%o1], %g1, %g7; \ 57 cmp %g1, %g7; 57 cmp %g1, %g7; \ 58 bne,pn %icc, BACKOFF_LABEL(2f, 1b); 58 bne,pn %icc, BACKOFF_LABEL(2f, 1b); \ 59 nop; 59 nop; \ 60 retl; 60 retl; \ 61 sra %g1, 0, %o0; 61 sra %g1, 0, %o0; \ 62 2: BACKOFF_SPIN(%o2, %o3, 1b); 62 2: BACKOFF_SPIN(%o2, %o3, 1b); \ 63 ENDPROC(arch_atomic_fetch_##op); 63 ENDPROC(arch_atomic_fetch_##op); \ 64 EXPORT_SYMBOL(arch_atomic_fetch_##op); 64 EXPORT_SYMBOL(arch_atomic_fetch_##op); 65 65 66 ATOMIC_OP(add) 66 ATOMIC_OP(add) 67 ATOMIC_OP_RETURN(add) 67 ATOMIC_OP_RETURN(add) 68 ATOMIC_FETCH_OP(add) 68 ATOMIC_FETCH_OP(add) 69 69 70 ATOMIC_OP(sub) 70 ATOMIC_OP(sub) 71 ATOMIC_OP_RETURN(sub) 71 ATOMIC_OP_RETURN(sub) 72 ATOMIC_FETCH_OP(sub) 72 ATOMIC_FETCH_OP(sub) 73 73 74 ATOMIC_OP(and) 74 ATOMIC_OP(and) 75 ATOMIC_FETCH_OP(and) 75 ATOMIC_FETCH_OP(and) 76 76 77 ATOMIC_OP(or) 77 ATOMIC_OP(or) 78 ATOMIC_FETCH_OP(or) 78 ATOMIC_FETCH_OP(or) 79 79 80 ATOMIC_OP(xor) 80 ATOMIC_OP(xor) 81 ATOMIC_FETCH_OP(xor) 81 ATOMIC_FETCH_OP(xor) 82 82 83 #undef ATOMIC_FETCH_OP 83 #undef ATOMIC_FETCH_OP 84 #undef ATOMIC_OP_RETURN 84 #undef ATOMIC_OP_RETURN 85 #undef ATOMIC_OP 85 #undef ATOMIC_OP 86 86 87 #define ATOMIC64_OP(op) 87 #define ATOMIC64_OP(op) \ 88 ENTRY(arch_atomic64_##op) /* %o0 = increment, 88 ENTRY(arch_atomic64_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ 89 BACKOFF_SETUP(%o2); 89 BACKOFF_SETUP(%o2); \ 90 1: ldx [%o1], %g1; 90 1: ldx [%o1], %g1; \ 91 op %g1, %o0, %g7; 91 op %g1, %o0, %g7; \ 92 casx [%o1], %g1, %g7; 92 casx [%o1], %g1, %g7; \ 93 cmp %g1, %g7; 93 cmp %g1, %g7; \ 94 bne,pn %xcc, BACKOFF_LABEL(2f, 1b); 94 bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \ 95 nop; 95 nop; \ 96 retl; 96 retl; \ 97 nop; 97 nop; \ 98 2: BACKOFF_SPIN(%o2, %o3, 1b); 98 2: BACKOFF_SPIN(%o2, %o3, 1b); \ 99 ENDPROC(arch_atomic64_##op); 99 ENDPROC(arch_atomic64_##op); \ 100 EXPORT_SYMBOL(arch_atomic64_##op); 100 EXPORT_SYMBOL(arch_atomic64_##op); 101 101 102 #define ATOMIC64_OP_RETURN(op) 102 #define ATOMIC64_OP_RETURN(op) \ 103 ENTRY(arch_atomic64_##op##_return) /* %o0 = in 103 ENTRY(arch_atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \ 104 BACKOFF_SETUP(%o2); 104 BACKOFF_SETUP(%o2); \ 105 1: ldx [%o1], %g1; 105 1: ldx [%o1], %g1; \ 106 op %g1, %o0, %g7; 106 op %g1, %o0, %g7; \ 107 casx [%o1], %g1, %g7; 107 casx [%o1], %g1, %g7; \ 108 cmp %g1, %g7; 108 cmp %g1, %g7; \ 109 bne,pn %xcc, BACKOFF_LABEL(2f, 1b); 109 bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \ 110 nop; 110 nop; \ 111 retl; 111 retl; \ 112 op %g1, %o0, %o0; 112 op %g1, %o0, %o0; \ 113 2: BACKOFF_SPIN(%o2, %o3, 1b); 113 2: BACKOFF_SPIN(%o2, %o3, 1b); \ 114 ENDPROC(arch_atomic64_##op##_return); 114 ENDPROC(arch_atomic64_##op##_return); \ 115 EXPORT_SYMBOL(arch_atomic64_##op##_return); 115 EXPORT_SYMBOL(arch_atomic64_##op##_return); 116 116 117 #define ATOMIC64_FETCH_OP(op) 117 #define ATOMIC64_FETCH_OP(op) \ 118 ENTRY(arch_atomic64_fetch_##op) /* %o0 = incre 118 ENTRY(arch_atomic64_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ 119 BACKOFF_SETUP(%o2); 119 BACKOFF_SETUP(%o2); \ 120 1: ldx [%o1], %g1; 120 1: ldx [%o1], %g1; \ 121 op %g1, %o0, %g7; 121 op %g1, %o0, %g7; \ 122 casx [%o1], %g1, %g7; 122 casx [%o1], %g1, %g7; \ 123 cmp %g1, %g7; 123 cmp %g1, %g7; \ 124 bne,pn %xcc, BACKOFF_LABEL(2f, 1b); 124 bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \ 125 nop; 125 nop; \ 126 retl; 126 retl; \ 127 mov %g1, %o0; 127 mov %g1, %o0; \ 128 2: BACKOFF_SPIN(%o2, %o3, 1b); 128 2: BACKOFF_SPIN(%o2, %o3, 1b); \ 129 ENDPROC(arch_atomic64_fetch_##op); 129 ENDPROC(arch_atomic64_fetch_##op); \ 130 EXPORT_SYMBOL(arch_atomic64_fetch_##op); 130 EXPORT_SYMBOL(arch_atomic64_fetch_##op); 131 131 132 ATOMIC64_OP(add) 132 ATOMIC64_OP(add) 133 ATOMIC64_OP_RETURN(add) 133 ATOMIC64_OP_RETURN(add) 134 ATOMIC64_FETCH_OP(add) 134 ATOMIC64_FETCH_OP(add) 135 135 136 ATOMIC64_OP(sub) 136 ATOMIC64_OP(sub) 137 ATOMIC64_OP_RETURN(sub) 137 ATOMIC64_OP_RETURN(sub) 138 ATOMIC64_FETCH_OP(sub) 138 ATOMIC64_FETCH_OP(sub) 139 139 140 ATOMIC64_OP(and) 140 ATOMIC64_OP(and) 141 ATOMIC64_FETCH_OP(and) 141 ATOMIC64_FETCH_OP(and) 142 142 143 ATOMIC64_OP(or) 143 ATOMIC64_OP(or) 144 ATOMIC64_FETCH_OP(or) 144 ATOMIC64_FETCH_OP(or) 145 145 146 ATOMIC64_OP(xor) 146 ATOMIC64_OP(xor) 147 ATOMIC64_FETCH_OP(xor) 147 ATOMIC64_FETCH_OP(xor) 148 148 149 #undef ATOMIC64_FETCH_OP 149 #undef ATOMIC64_FETCH_OP 150 #undef ATOMIC64_OP_RETURN 150 #undef ATOMIC64_OP_RETURN 151 #undef ATOMIC64_OP 151 #undef ATOMIC64_OP 152 152 153 ENTRY(arch_atomic64_dec_if_positive) /* %o0 = 153 ENTRY(arch_atomic64_dec_if_positive) /* %o0 = atomic_ptr */ 154 BACKOFF_SETUP(%o2) 154 BACKOFF_SETUP(%o2) 155 1: ldx [%o0], %g1 155 1: ldx [%o0], %g1 156 brlez,pn %g1, 3f 156 brlez,pn %g1, 3f 157 sub %g1, 1, %g7 157 sub %g1, 1, %g7 158 casx [%o0], %g1, %g7 158 casx [%o0], %g1, %g7 159 cmp %g1, %g7 159 cmp %g1, %g7 160 bne,pn %xcc, BACKOFF_LABEL(2f, 1b) 160 bne,pn %xcc, BACKOFF_LABEL(2f, 1b) 161 nop 161 nop 162 3: retl 162 3: retl 163 sub %g1, 1, %o0 163 sub %g1, 1, %o0 164 2: BACKOFF_SPIN(%o2, %o3, 1b) 164 2: BACKOFF_SPIN(%o2, %o3, 1b) 165 ENDPROC(arch_atomic64_dec_if_positive) 165 ENDPROC(arch_atomic64_dec_if_positive) 166 EXPORT_SYMBOL(arch_atomic64_dec_if_positive) 166 EXPORT_SYMBOL(arch_atomic64_dec_if_positive)
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.