b.liu | e958203 | 2025-04-17 19:18:16 +0800 | [diff] [blame^] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
| 2 | #ifndef TOOLS_ASM_X86_CMPXCHG_H |
| 3 | #define TOOLS_ASM_X86_CMPXCHG_H |
| 4 | |
| 5 | #include <linux/compiler.h> |
| 6 | |
| 7 | /* |
| 8 | * Non-existant functions to indicate usage errors at link time |
| 9 | * (or compile-time if the compiler implements __compiletime_error(). |
| 10 | */ |
| 11 | extern void __cmpxchg_wrong_size(void) |
| 12 | __compiletime_error("Bad argument size for cmpxchg"); |
| 13 | |
| 14 | /* |
| 15 | * Constants for operation sizes. On 32-bit, the 64-bit size it set to |
| 16 | * -1 because sizeof will never return -1, thereby making those switch |
| 17 | * case statements guaranteeed dead code which the compiler will |
| 18 | * eliminate, and allowing the "missing symbol in the default case" to |
| 19 | * indicate a usage error. |
| 20 | */ |
| 21 | #define __X86_CASE_B 1 |
| 22 | #define __X86_CASE_W 2 |
| 23 | #define __X86_CASE_L 4 |
| 24 | #ifdef __x86_64__ |
| 25 | #define __X86_CASE_Q 8 |
| 26 | #else |
| 27 | #define __X86_CASE_Q -1 /* sizeof will never return -1 */ |
| 28 | #endif |
| 29 | |
| 30 | /* |
| 31 | * Atomic compare and exchange. Compare OLD with MEM, if identical, |
| 32 | * store NEW in MEM. Return the initial value in MEM. Success is |
| 33 | * indicated by comparing RETURN with OLD. |
| 34 | */ |
| 35 | #define __raw_cmpxchg(ptr, old, new, size, lock) \ |
| 36 | ({ \ |
| 37 | __typeof__(*(ptr)) __ret; \ |
| 38 | __typeof__(*(ptr)) __old = (old); \ |
| 39 | __typeof__(*(ptr)) __new = (new); \ |
| 40 | switch (size) { \ |
| 41 | case __X86_CASE_B: \ |
| 42 | { \ |
| 43 | volatile u8 *__ptr = (volatile u8 *)(ptr); \ |
| 44 | asm volatile(lock "cmpxchgb %2,%1" \ |
| 45 | : "=a" (__ret), "+m" (*__ptr) \ |
| 46 | : "q" (__new), "0" (__old) \ |
| 47 | : "memory"); \ |
| 48 | break; \ |
| 49 | } \ |
| 50 | case __X86_CASE_W: \ |
| 51 | { \ |
| 52 | volatile u16 *__ptr = (volatile u16 *)(ptr); \ |
| 53 | asm volatile(lock "cmpxchgw %2,%1" \ |
| 54 | : "=a" (__ret), "+m" (*__ptr) \ |
| 55 | : "r" (__new), "0" (__old) \ |
| 56 | : "memory"); \ |
| 57 | break; \ |
| 58 | } \ |
| 59 | case __X86_CASE_L: \ |
| 60 | { \ |
| 61 | volatile u32 *__ptr = (volatile u32 *)(ptr); \ |
| 62 | asm volatile(lock "cmpxchgl %2,%1" \ |
| 63 | : "=a" (__ret), "+m" (*__ptr) \ |
| 64 | : "r" (__new), "0" (__old) \ |
| 65 | : "memory"); \ |
| 66 | break; \ |
| 67 | } \ |
| 68 | case __X86_CASE_Q: \ |
| 69 | { \ |
| 70 | volatile u64 *__ptr = (volatile u64 *)(ptr); \ |
| 71 | asm volatile(lock "cmpxchgq %2,%1" \ |
| 72 | : "=a" (__ret), "+m" (*__ptr) \ |
| 73 | : "r" (__new), "0" (__old) \ |
| 74 | : "memory"); \ |
| 75 | break; \ |
| 76 | } \ |
| 77 | default: \ |
| 78 | __cmpxchg_wrong_size(); \ |
| 79 | } \ |
| 80 | __ret; \ |
| 81 | }) |
| 82 | |
| 83 | #define __cmpxchg(ptr, old, new, size) \ |
| 84 | __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX) |
| 85 | |
| 86 | #define cmpxchg(ptr, old, new) \ |
| 87 | __cmpxchg(ptr, old, new, sizeof(*(ptr))) |
| 88 | |
| 89 | |
| 90 | #endif /* TOOLS_ASM_X86_CMPXCHG_H */ |