blob: 640d46edf32e71cd9a117f2e4d0ea1611de1698c [file] [log] [blame]
b.liue9582032025-04-17 19:18:16 +08001/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __ASM_BARRIER_H
3#define __ASM_BARRIER_H
4
5#ifndef __ASSEMBLY__
6
7/* The synchronize caches instruction executes as a nop on systems in
8 which all memory references are performed in order. */
9#define synchronize_caches() __asm__ __volatile__ ("sync" : : : "memory")
10
11#if defined(CONFIG_SMP)
12#define mb() do { synchronize_caches(); } while (0)
13#define rmb() mb()
14#define wmb() mb()
15#define dma_rmb() mb()
16#define dma_wmb() mb()
17#else
18#define mb() barrier()
19#define rmb() barrier()
20#define wmb() barrier()
21#define dma_rmb() barrier()
22#define dma_wmb() barrier()
23#endif
24
25#define __smp_mb() mb()
26#define __smp_rmb() mb()
27#define __smp_wmb() mb()
28
29#define __smp_store_release(p, v) \
30do { \
31 typeof(p) __p = (p); \
32 union { typeof(*p) __val; char __c[1]; } __u = \
33 { .__val = (__force typeof(*p)) (v) }; \
34 compiletime_assert_atomic_type(*p); \
35 switch (sizeof(*p)) { \
36 case 1: \
37 asm volatile("stb,ma %0,0(%1)" \
38 : : "r"(*(__u8 *)__u.__c), "r"(__p) \
39 : "memory"); \
40 break; \
41 case 2: \
42 asm volatile("sth,ma %0,0(%1)" \
43 : : "r"(*(__u16 *)__u.__c), "r"(__p) \
44 : "memory"); \
45 break; \
46 case 4: \
47 asm volatile("stw,ma %0,0(%1)" \
48 : : "r"(*(__u32 *)__u.__c), "r"(__p) \
49 : "memory"); \
50 break; \
51 case 8: \
52 if (IS_ENABLED(CONFIG_64BIT)) \
53 asm volatile("std,ma %0,0(%1)" \
54 : : "r"(*(__u64 *)__u.__c), "r"(__p) \
55 : "memory"); \
56 break; \
57 } \
58} while (0)
59
60#define __smp_load_acquire(p) \
61({ \
62 union { typeof(*p) __val; char __c[1]; } __u; \
63 typeof(p) __p = (p); \
64 compiletime_assert_atomic_type(*p); \
65 switch (sizeof(*p)) { \
66 case 1: \
67 asm volatile("ldb,ma 0(%1),%0" \
68 : "=r"(*(__u8 *)__u.__c) : "r"(__p) \
69 : "memory"); \
70 break; \
71 case 2: \
72 asm volatile("ldh,ma 0(%1),%0" \
73 : "=r"(*(__u16 *)__u.__c) : "r"(__p) \
74 : "memory"); \
75 break; \
76 case 4: \
77 asm volatile("ldw,ma 0(%1),%0" \
78 : "=r"(*(__u32 *)__u.__c) : "r"(__p) \
79 : "memory"); \
80 break; \
81 case 8: \
82 if (IS_ENABLED(CONFIG_64BIT)) \
83 asm volatile("ldd,ma 0(%1),%0" \
84 : "=r"(*(__u64 *)__u.__c) : "r"(__p) \
85 : "memory"); \
86 break; \
87 } \
88 __u.__val; \
89})
90#include <asm-generic/barrier.h>
91
92#endif /* !__ASSEMBLY__ */
93#endif /* __ASM_BARRIER_H */