b.liu | e958203 | 2025-04-17 19:18:16 +0800 | [diff] [blame^] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
| 2 | #ifndef _ASM_X86_KASAN_H |
| 3 | #define _ASM_X86_KASAN_H |
| 4 | |
| 5 | #include <linux/const.h> |
| 6 | #define KASAN_SHADOW_OFFSET _AC(CONFIG_KASAN_SHADOW_OFFSET, UL) |
| 7 | #define KASAN_SHADOW_SCALE_SHIFT 3 |
| 8 | |
| 9 | /* |
| 10 | * Compiler uses shadow offset assuming that addresses start |
| 11 | * from 0. Kernel addresses don't start from 0, so shadow |
| 12 | * for kernel really starts from compiler's shadow offset + |
| 13 | * 'kernel address space start' >> KASAN_SHADOW_SCALE_SHIFT |
| 14 | */ |
| 15 | #define KASAN_SHADOW_START (KASAN_SHADOW_OFFSET + \ |
| 16 | ((-1UL << __VIRTUAL_MASK_SHIFT) >> \ |
| 17 | KASAN_SHADOW_SCALE_SHIFT)) |
| 18 | /* |
| 19 | * 47 bits for kernel address -> (47 - KASAN_SHADOW_SCALE_SHIFT) bits for shadow |
| 20 | * 56 bits for kernel address -> (56 - KASAN_SHADOW_SCALE_SHIFT) bits for shadow |
| 21 | */ |
| 22 | #define KASAN_SHADOW_END (KASAN_SHADOW_START + \ |
| 23 | (1ULL << (__VIRTUAL_MASK_SHIFT - \ |
| 24 | KASAN_SHADOW_SCALE_SHIFT))) |
| 25 | |
| 26 | #ifndef __ASSEMBLY__ |
| 27 | |
| 28 | #ifdef CONFIG_KASAN |
| 29 | void __init kasan_early_init(void); |
| 30 | void __init kasan_init(void); |
| 31 | #else |
| 32 | static inline void kasan_early_init(void) { } |
| 33 | static inline void kasan_init(void) { } |
| 34 | #endif |
| 35 | |
| 36 | #endif |
| 37 | |
| 38 | #endif |