b.liu | e958203 | 2025-04-17 19:18:16 +0800 | [diff] [blame^] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
| 2 | /* atomic.S: These things are too big to do inline. |
| 3 | * |
| 4 | * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net) |
| 5 | */ |
| 6 | |
| 7 | #include <linux/linkage.h> |
| 8 | #include <asm/asi.h> |
| 9 | #include <asm/backoff.h> |
| 10 | #include <asm/export.h> |
| 11 | |
| 12 | .text |
| 13 | |
| 14 | /* Three versions of the atomic routines, one that |
| 15 | * does not return a value and does not perform |
| 16 | * memory barriers, and a two which return |
| 17 | * a value, the new and old value resp. and does the |
| 18 | * barriers. |
| 19 | */ |
| 20 | |
| 21 | #define ATOMIC_OP(op) \ |
| 22 | ENTRY(atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ |
| 23 | BACKOFF_SETUP(%o2); \ |
| 24 | 1: lduw [%o1], %g1; \ |
| 25 | op %g1, %o0, %g7; \ |
| 26 | cas [%o1], %g1, %g7; \ |
| 27 | cmp %g1, %g7; \ |
| 28 | bne,pn %icc, BACKOFF_LABEL(2f, 1b); \ |
| 29 | nop; \ |
| 30 | retl; \ |
| 31 | nop; \ |
| 32 | 2: BACKOFF_SPIN(%o2, %o3, 1b); \ |
| 33 | ENDPROC(atomic_##op); \ |
| 34 | EXPORT_SYMBOL(atomic_##op); |
| 35 | |
| 36 | #define ATOMIC_OP_RETURN(op) \ |
| 37 | ENTRY(atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \ |
| 38 | BACKOFF_SETUP(%o2); \ |
| 39 | 1: lduw [%o1], %g1; \ |
| 40 | op %g1, %o0, %g7; \ |
| 41 | cas [%o1], %g1, %g7; \ |
| 42 | cmp %g1, %g7; \ |
| 43 | bne,pn %icc, BACKOFF_LABEL(2f, 1b); \ |
| 44 | op %g1, %o0, %g1; \ |
| 45 | retl; \ |
| 46 | sra %g1, 0, %o0; \ |
| 47 | 2: BACKOFF_SPIN(%o2, %o3, 1b); \ |
| 48 | ENDPROC(atomic_##op##_return); \ |
| 49 | EXPORT_SYMBOL(atomic_##op##_return); |
| 50 | |
| 51 | #define ATOMIC_FETCH_OP(op) \ |
| 52 | ENTRY(atomic_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ |
| 53 | BACKOFF_SETUP(%o2); \ |
| 54 | 1: lduw [%o1], %g1; \ |
| 55 | op %g1, %o0, %g7; \ |
| 56 | cas [%o1], %g1, %g7; \ |
| 57 | cmp %g1, %g7; \ |
| 58 | bne,pn %icc, BACKOFF_LABEL(2f, 1b); \ |
| 59 | nop; \ |
| 60 | retl; \ |
| 61 | sra %g1, 0, %o0; \ |
| 62 | 2: BACKOFF_SPIN(%o2, %o3, 1b); \ |
| 63 | ENDPROC(atomic_fetch_##op); \ |
| 64 | EXPORT_SYMBOL(atomic_fetch_##op); |
| 65 | |
| 66 | ATOMIC_OP(add) |
| 67 | ATOMIC_OP_RETURN(add) |
| 68 | ATOMIC_FETCH_OP(add) |
| 69 | |
| 70 | ATOMIC_OP(sub) |
| 71 | ATOMIC_OP_RETURN(sub) |
| 72 | ATOMIC_FETCH_OP(sub) |
| 73 | |
| 74 | ATOMIC_OP(and) |
| 75 | ATOMIC_FETCH_OP(and) |
| 76 | |
| 77 | ATOMIC_OP(or) |
| 78 | ATOMIC_FETCH_OP(or) |
| 79 | |
| 80 | ATOMIC_OP(xor) |
| 81 | ATOMIC_FETCH_OP(xor) |
| 82 | |
| 83 | #undef ATOMIC_FETCH_OP |
| 84 | #undef ATOMIC_OP_RETURN |
| 85 | #undef ATOMIC_OP |
| 86 | |
| 87 | #define ATOMIC64_OP(op) \ |
| 88 | ENTRY(atomic64_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ |
| 89 | BACKOFF_SETUP(%o2); \ |
| 90 | 1: ldx [%o1], %g1; \ |
| 91 | op %g1, %o0, %g7; \ |
| 92 | casx [%o1], %g1, %g7; \ |
| 93 | cmp %g1, %g7; \ |
| 94 | bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \ |
| 95 | nop; \ |
| 96 | retl; \ |
| 97 | nop; \ |
| 98 | 2: BACKOFF_SPIN(%o2, %o3, 1b); \ |
| 99 | ENDPROC(atomic64_##op); \ |
| 100 | EXPORT_SYMBOL(atomic64_##op); |
| 101 | |
| 102 | #define ATOMIC64_OP_RETURN(op) \ |
| 103 | ENTRY(atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \ |
| 104 | BACKOFF_SETUP(%o2); \ |
| 105 | 1: ldx [%o1], %g1; \ |
| 106 | op %g1, %o0, %g7; \ |
| 107 | casx [%o1], %g1, %g7; \ |
| 108 | cmp %g1, %g7; \ |
| 109 | bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \ |
| 110 | nop; \ |
| 111 | retl; \ |
| 112 | op %g1, %o0, %o0; \ |
| 113 | 2: BACKOFF_SPIN(%o2, %o3, 1b); \ |
| 114 | ENDPROC(atomic64_##op##_return); \ |
| 115 | EXPORT_SYMBOL(atomic64_##op##_return); |
| 116 | |
| 117 | #define ATOMIC64_FETCH_OP(op) \ |
| 118 | ENTRY(atomic64_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ |
| 119 | BACKOFF_SETUP(%o2); \ |
| 120 | 1: ldx [%o1], %g1; \ |
| 121 | op %g1, %o0, %g7; \ |
| 122 | casx [%o1], %g1, %g7; \ |
| 123 | cmp %g1, %g7; \ |
| 124 | bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \ |
| 125 | nop; \ |
| 126 | retl; \ |
| 127 | mov %g1, %o0; \ |
| 128 | 2: BACKOFF_SPIN(%o2, %o3, 1b); \ |
| 129 | ENDPROC(atomic64_fetch_##op); \ |
| 130 | EXPORT_SYMBOL(atomic64_fetch_##op); |
| 131 | |
| 132 | ATOMIC64_OP(add) |
| 133 | ATOMIC64_OP_RETURN(add) |
| 134 | ATOMIC64_FETCH_OP(add) |
| 135 | |
| 136 | ATOMIC64_OP(sub) |
| 137 | ATOMIC64_OP_RETURN(sub) |
| 138 | ATOMIC64_FETCH_OP(sub) |
| 139 | |
| 140 | ATOMIC64_OP(and) |
| 141 | ATOMIC64_FETCH_OP(and) |
| 142 | |
| 143 | ATOMIC64_OP(or) |
| 144 | ATOMIC64_FETCH_OP(or) |
| 145 | |
| 146 | ATOMIC64_OP(xor) |
| 147 | ATOMIC64_FETCH_OP(xor) |
| 148 | |
| 149 | #undef ATOMIC64_FETCH_OP |
| 150 | #undef ATOMIC64_OP_RETURN |
| 151 | #undef ATOMIC64_OP |
| 152 | |
| 153 | ENTRY(atomic64_dec_if_positive) /* %o0 = atomic_ptr */ |
| 154 | BACKOFF_SETUP(%o2) |
| 155 | 1: ldx [%o0], %g1 |
| 156 | brlez,pn %g1, 3f |
| 157 | sub %g1, 1, %g7 |
| 158 | casx [%o0], %g1, %g7 |
| 159 | cmp %g1, %g7 |
| 160 | bne,pn %xcc, BACKOFF_LABEL(2f, 1b) |
| 161 | nop |
| 162 | 3: retl |
| 163 | sub %g1, 1, %o0 |
| 164 | 2: BACKOFF_SPIN(%o2, %o3, 1b) |
| 165 | ENDPROC(atomic64_dec_if_positive) |
| 166 | EXPORT_SYMBOL(atomic64_dec_if_positive) |