rjw | 1f88458 | 2022-01-06 17:20:42 +0800 | [diff] [blame^] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
| 2 | #if defined(__i386__) || defined(__x86_64__) |
| 3 | #define barrier() asm volatile("" ::: "memory") |
| 4 | #define virt_mb() __sync_synchronize() |
| 5 | #define virt_rmb() barrier() |
| 6 | #define virt_wmb() barrier() |
| 7 | /* Atomic store should be enough, but gcc generates worse code in that case. */ |
| 8 | #define virt_store_mb(var, value) do { \ |
| 9 | typeof(var) virt_store_mb_value = (value); \ |
| 10 | __atomic_exchange(&(var), &virt_store_mb_value, &virt_store_mb_value, \ |
| 11 | __ATOMIC_SEQ_CST); \ |
| 12 | barrier(); \ |
| 13 | } while (0); |
| 14 | /* Weak barriers should be used. If not - it's a bug */ |
| 15 | # define mb() abort() |
| 16 | # define rmb() abort() |
| 17 | # define wmb() abort() |
| 18 | #else |
| 19 | #error Please fill in barrier macros |
| 20 | #endif |
| 21 | |