blob: 8b0e1e7284e0d972bde1066ffbd50d1f6ac5a747 [file] [log] [blame]
xf.libdd93d52023-05-12 07:10:14 -07001/* Atomic operations. PowerPC Common version.
2 Copyright (C) 2003-2016 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
4 Contributed by Paul Mackerras <paulus@au.ibm.com>, 2003.
5
6 The GNU C Library is free software; you can redistribute it and/or
7 modify it under the terms of the GNU Lesser General Public
8 License as published by the Free Software Foundation; either
9 version 2.1 of the License, or (at your option) any later version.
10
11 The GNU C Library is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 Lesser General Public License for more details.
15
16 You should have received a copy of the GNU Lesser General Public
17 License along with the GNU C Library; if not, see
18 <http://www.gnu.org/licenses/>. */
19
20/*
21 * Never include sysdeps/powerpc/atomic-machine.h directly.
22 * Alway use include/atomic.h which will include either
23 * sysdeps/powerpc/powerpc32/atomic-machine.h
24 * or
25 * sysdeps/powerpc/powerpc64/atomic-machine.h
26 * as appropriate and which in turn include this file.
27 */
28
29#include <stdint.h>
30
31typedef int32_t atomic32_t;
32typedef uint32_t uatomic32_t;
33typedef int_fast32_t atomic_fast32_t;
34typedef uint_fast32_t uatomic_fast32_t;
35
36typedef int64_t atomic64_t;
37typedef uint64_t uatomic64_t;
38typedef int_fast64_t atomic_fast64_t;
39typedef uint_fast64_t uatomic_fast64_t;
40
41typedef intptr_t atomicptr_t;
42typedef uintptr_t uatomicptr_t;
43typedef intmax_t atomic_max_t;
44typedef uintmax_t uatomic_max_t;
45
46/*
47 * Powerpc does not have byte and halfword forms of load and reserve and
48 * store conditional. So for powerpc we stub out the 8- and 16-bit forms.
49 */
50#define __arch_compare_and_exchange_bool_8_acq(mem, newval, oldval) \
51 (abort (), 0)
52
53#define __arch_compare_and_exchange_bool_16_acq(mem, newval, oldval) \
54 (abort (), 0)
55
56#define __arch_compare_and_exchange_bool_8_rel(mem, newval, oldval) \
57 (abort (), 0)
58
59#define __arch_compare_and_exchange_bool_16_rel(mem, newval, oldval) \
60 (abort (), 0)
61
62#ifdef UP
63# define __ARCH_ACQ_INSTR ""
64# define __ARCH_REL_INSTR ""
65#else
66# define __ARCH_ACQ_INSTR "isync"
67# ifndef __ARCH_REL_INSTR
68# define __ARCH_REL_INSTR "sync"
69# endif
70#endif
71
72#ifndef MUTEX_HINT_ACQ
73# define MUTEX_HINT_ACQ
74#endif
75#ifndef MUTEX_HINT_REL
76# define MUTEX_HINT_REL
77#endif
78
79#define atomic_full_barrier() __asm ("sync" ::: "memory")
80
81#define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
82 ({ \
83 __typeof (*(mem)) __tmp; \
84 __typeof (mem) __memp = (mem); \
85 __asm __volatile ( \
86 "1: lwarx %0,0,%1" MUTEX_HINT_ACQ "\n" \
87 " cmpw %0,%2\n" \
88 " bne 2f\n" \
89 " stwcx. %3,0,%1\n" \
90 " bne- 1b\n" \
91 "2: " __ARCH_ACQ_INSTR \
92 : "=&r" (__tmp) \
93 : "b" (__memp), "r" (oldval), "r" (newval) \
94 : "cr0", "memory"); \
95 __tmp; \
96 })
97
98#define __arch_compare_and_exchange_val_32_rel(mem, newval, oldval) \
99 ({ \
100 __typeof (*(mem)) __tmp; \
101 __typeof (mem) __memp = (mem); \
102 __asm __volatile (__ARCH_REL_INSTR "\n" \
103 "1: lwarx %0,0,%1" MUTEX_HINT_REL "\n" \
104 " cmpw %0,%2\n" \
105 " bne 2f\n" \
106 " stwcx. %3,0,%1\n" \
107 " bne- 1b\n" \
108 "2: " \
109 : "=&r" (__tmp) \
110 : "b" (__memp), "r" (oldval), "r" (newval) \
111 : "cr0", "memory"); \
112 __tmp; \
113 })
114
115#define __arch_atomic_exchange_32_acq(mem, value) \
116 ({ \
117 __typeof (*mem) __val; \
118 __asm __volatile ( \
119 "1: lwarx %0,0,%2" MUTEX_HINT_ACQ "\n" \
120 " stwcx. %3,0,%2\n" \
121 " bne- 1b\n" \
122 " " __ARCH_ACQ_INSTR \
123 : "=&r" (__val), "=m" (*mem) \
124 : "b" (mem), "r" (value), "m" (*mem) \
125 : "cr0", "memory"); \
126 __val; \
127 })
128
129#define __arch_atomic_exchange_32_rel(mem, value) \
130 ({ \
131 __typeof (*mem) __val; \
132 __asm __volatile (__ARCH_REL_INSTR "\n" \
133 "1: lwarx %0,0,%2" MUTEX_HINT_REL "\n" \
134 " stwcx. %3,0,%2\n" \
135 " bne- 1b" \
136 : "=&r" (__val), "=m" (*mem) \
137 : "b" (mem), "r" (value), "m" (*mem) \
138 : "cr0", "memory"); \
139 __val; \
140 })
141
142#define __arch_atomic_exchange_and_add_32(mem, value) \
143 ({ \
144 __typeof (*mem) __val, __tmp; \
145 __asm __volatile ("1: lwarx %0,0,%3\n" \
146 " add %1,%0,%4\n" \
147 " stwcx. %1,0,%3\n" \
148 " bne- 1b" \
149 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
150 : "b" (mem), "r" (value), "m" (*mem) \
151 : "cr0", "memory"); \
152 __val; \
153 })
154
155#define __arch_atomic_exchange_and_add_32_acq(mem, value) \
156 ({ \
157 __typeof (*mem) __val, __tmp; \
158 __asm __volatile ("1: lwarx %0,0,%3" MUTEX_HINT_ACQ "\n" \
159 " add %1,%0,%4\n" \
160 " stwcx. %1,0,%3\n" \
161 " bne- 1b\n" \
162 __ARCH_ACQ_INSTR \
163 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
164 : "b" (mem), "r" (value), "m" (*mem) \
165 : "cr0", "memory"); \
166 __val; \
167 })
168
169#define __arch_atomic_exchange_and_add_32_rel(mem, value) \
170 ({ \
171 __typeof (*mem) __val, __tmp; \
172 __asm __volatile (__ARCH_REL_INSTR "\n" \
173 "1: lwarx %0,0,%3" MUTEX_HINT_REL "\n" \
174 " add %1,%0,%4\n" \
175 " stwcx. %1,0,%3\n" \
176 " bne- 1b" \
177 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
178 : "b" (mem), "r" (value), "m" (*mem) \
179 : "cr0", "memory"); \
180 __val; \
181 })
182
183#define __arch_atomic_increment_val_32(mem) \
184 ({ \
185 __typeof (*(mem)) __val; \
186 __asm __volatile ("1: lwarx %0,0,%2\n" \
187 " addi %0,%0,1\n" \
188 " stwcx. %0,0,%2\n" \
189 " bne- 1b" \
190 : "=&b" (__val), "=m" (*mem) \
191 : "b" (mem), "m" (*mem) \
192 : "cr0", "memory"); \
193 __val; \
194 })
195
196#define __arch_atomic_decrement_val_32(mem) \
197 ({ \
198 __typeof (*(mem)) __val; \
199 __asm __volatile ("1: lwarx %0,0,%2\n" \
200 " subi %0,%0,1\n" \
201 " stwcx. %0,0,%2\n" \
202 " bne- 1b" \
203 : "=&b" (__val), "=m" (*mem) \
204 : "b" (mem), "m" (*mem) \
205 : "cr0", "memory"); \
206 __val; \
207 })
208
209#define __arch_atomic_decrement_if_positive_32(mem) \
210 ({ int __val, __tmp; \
211 __asm __volatile ("1: lwarx %0,0,%3\n" \
212 " cmpwi 0,%0,0\n" \
213 " addi %1,%0,-1\n" \
214 " ble 2f\n" \
215 " stwcx. %1,0,%3\n" \
216 " bne- 1b\n" \
217 "2: " __ARCH_ACQ_INSTR \
218 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
219 : "b" (mem), "m" (*mem) \
220 : "cr0", "memory"); \
221 __val; \
222 })
223
224#define atomic_compare_and_exchange_val_acq(mem, newval, oldval) \
225 ({ \
226 __typeof (*(mem)) __result; \
227 if (sizeof (*mem) == 4) \
228 __result = __arch_compare_and_exchange_val_32_acq(mem, newval, oldval); \
229 else if (sizeof (*mem) == 8) \
230 __result = __arch_compare_and_exchange_val_64_acq(mem, newval, oldval); \
231 else \
232 abort (); \
233 __result; \
234 })
235
236#define atomic_compare_and_exchange_val_rel(mem, newval, oldval) \
237 ({ \
238 __typeof (*(mem)) __result; \
239 if (sizeof (*mem) == 4) \
240 __result = __arch_compare_and_exchange_val_32_rel(mem, newval, oldval); \
241 else if (sizeof (*mem) == 8) \
242 __result = __arch_compare_and_exchange_val_64_rel(mem, newval, oldval); \
243 else \
244 abort (); \
245 __result; \
246 })
247
248#define atomic_exchange_acq(mem, value) \
249 ({ \
250 __typeof (*(mem)) __result; \
251 if (sizeof (*mem) == 4) \
252 __result = __arch_atomic_exchange_32_acq (mem, value); \
253 else if (sizeof (*mem) == 8) \
254 __result = __arch_atomic_exchange_64_acq (mem, value); \
255 else \
256 abort (); \
257 __result; \
258 })
259
260#define atomic_exchange_rel(mem, value) \
261 ({ \
262 __typeof (*(mem)) __result; \
263 if (sizeof (*mem) == 4) \
264 __result = __arch_atomic_exchange_32_rel (mem, value); \
265 else if (sizeof (*mem) == 8) \
266 __result = __arch_atomic_exchange_64_rel (mem, value); \
267 else \
268 abort (); \
269 __result; \
270 })
271
272#define atomic_exchange_and_add(mem, value) \
273 ({ \
274 __typeof (*(mem)) __result; \
275 if (sizeof (*mem) == 4) \
276 __result = __arch_atomic_exchange_and_add_32 (mem, value); \
277 else if (sizeof (*mem) == 8) \
278 __result = __arch_atomic_exchange_and_add_64 (mem, value); \
279 else \
280 abort (); \
281 __result; \
282 })
283#define atomic_exchange_and_add_acq(mem, value) \
284 ({ \
285 __typeof (*(mem)) __result; \
286 if (sizeof (*mem) == 4) \
287 __result = __arch_atomic_exchange_and_add_32_acq (mem, value); \
288 else if (sizeof (*mem) == 8) \
289 __result = __arch_atomic_exchange_and_add_64_acq (mem, value); \
290 else \
291 abort (); \
292 __result; \
293 })
294#define atomic_exchange_and_add_rel(mem, value) \
295 ({ \
296 __typeof (*(mem)) __result; \
297 if (sizeof (*mem) == 4) \
298 __result = __arch_atomic_exchange_and_add_32_rel (mem, value); \
299 else if (sizeof (*mem) == 8) \
300 __result = __arch_atomic_exchange_and_add_64_rel (mem, value); \
301 else \
302 abort (); \
303 __result; \
304 })
305
306#define atomic_increment_val(mem) \
307 ({ \
308 __typeof (*(mem)) __result; \
309 if (sizeof (*(mem)) == 4) \
310 __result = __arch_atomic_increment_val_32 (mem); \
311 else if (sizeof (*(mem)) == 8) \
312 __result = __arch_atomic_increment_val_64 (mem); \
313 else \
314 abort (); \
315 __result; \
316 })
317
318#define atomic_increment(mem) ({ atomic_increment_val (mem); (void) 0; })
319
320#define atomic_decrement_val(mem) \
321 ({ \
322 __typeof (*(mem)) __result; \
323 if (sizeof (*(mem)) == 4) \
324 __result = __arch_atomic_decrement_val_32 (mem); \
325 else if (sizeof (*(mem)) == 8) \
326 __result = __arch_atomic_decrement_val_64 (mem); \
327 else \
328 abort (); \
329 __result; \
330 })
331
332#define atomic_decrement(mem) ({ atomic_decrement_val (mem); (void) 0; })
333
334
335/* Decrement *MEM if it is > 0, and return the old value. */
336#define atomic_decrement_if_positive(mem) \
337 ({ __typeof (*(mem)) __result; \
338 if (sizeof (*mem) == 4) \
339 __result = __arch_atomic_decrement_if_positive_32 (mem); \
340 else if (sizeof (*mem) == 8) \
341 __result = __arch_atomic_decrement_if_positive_64 (mem); \
342 else \
343 abort (); \
344 __result; \
345 })