blob: 0e00c0e13374cef58998369c6f59f525a6449844 [file] [log] [blame]
b.liue9582032025-04-17 19:18:16 +08001/* SPDX-License-Identifier: GPL-2.0 WITH Linux-syscall-note */
2#ifndef _UAPI__ALPHA_COMPILER_H
3#define _UAPI__ALPHA_COMPILER_H
4
5/*
6 * Herein are macros we use when describing various patterns we want to GCC.
7 * In all cases we can get better schedules out of the compiler if we hide
8 * as little as possible inside inline assembly. However, we want to be
9 * able to know what we'll get out before giving up inline assembly. Thus
10 * these tests and macros.
11 */
12
13#if __GNUC__ == 3 && __GNUC_MINOR__ >= 4 || __GNUC__ > 3
14# define __kernel_insbl(val, shift) __builtin_alpha_insbl(val, shift)
15# define __kernel_inswl(val, shift) __builtin_alpha_inswl(val, shift)
16# define __kernel_insql(val, shift) __builtin_alpha_insql(val, shift)
17# define __kernel_inslh(val, shift) __builtin_alpha_inslh(val, shift)
18# define __kernel_extbl(val, shift) __builtin_alpha_extbl(val, shift)
19# define __kernel_extwl(val, shift) __builtin_alpha_extwl(val, shift)
20# define __kernel_cmpbge(a, b) __builtin_alpha_cmpbge(a, b)
21#else
22# define __kernel_insbl(val, shift) \
23 ({ unsigned long __kir; \
24 __asm__("insbl %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \
25 __kir; })
26# define __kernel_inswl(val, shift) \
27 ({ unsigned long __kir; \
28 __asm__("inswl %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \
29 __kir; })
30# define __kernel_insql(val, shift) \
31 ({ unsigned long __kir; \
32 __asm__("insql %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \
33 __kir; })
34# define __kernel_inslh(val, shift) \
35 ({ unsigned long __kir; \
36 __asm__("inslh %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \
37 __kir; })
38# define __kernel_extbl(val, shift) \
39 ({ unsigned long __kir; \
40 __asm__("extbl %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \
41 __kir; })
42# define __kernel_extwl(val, shift) \
43 ({ unsigned long __kir; \
44 __asm__("extwl %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \
45 __kir; })
46# define __kernel_cmpbge(a, b) \
47 ({ unsigned long __kir; \
48 __asm__("cmpbge %r2,%1,%0" : "=r"(__kir) : "rI"(b), "rJ"(a)); \
49 __kir; })
50#endif
51
52#ifdef __alpha_cix__
53# if __GNUC__ == 3 && __GNUC_MINOR__ >= 4 || __GNUC__ > 3
54# define __kernel_cttz(x) __builtin_ctzl(x)
55# define __kernel_ctlz(x) __builtin_clzl(x)
56# define __kernel_ctpop(x) __builtin_popcountl(x)
57# else
58# define __kernel_cttz(x) \
59 ({ unsigned long __kir; \
60 __asm__("cttz %1,%0" : "=r"(__kir) : "r"(x)); \
61 __kir; })
62# define __kernel_ctlz(x) \
63 ({ unsigned long __kir; \
64 __asm__("ctlz %1,%0" : "=r"(__kir) : "r"(x)); \
65 __kir; })
66# define __kernel_ctpop(x) \
67 ({ unsigned long __kir; \
68 __asm__("ctpop %1,%0" : "=r"(__kir) : "r"(x)); \
69 __kir; })
70# endif
71#else
72# define __kernel_cttz(x) \
73 ({ unsigned long __kir; \
74 __asm__(".arch ev67; cttz %1,%0" : "=r"(__kir) : "r"(x)); \
75 __kir; })
76# define __kernel_ctlz(x) \
77 ({ unsigned long __kir; \
78 __asm__(".arch ev67; ctlz %1,%0" : "=r"(__kir) : "r"(x)); \
79 __kir; })
80# define __kernel_ctpop(x) \
81 ({ unsigned long __kir; \
82 __asm__(".arch ev67; ctpop %1,%0" : "=r"(__kir) : "r"(x)); \
83 __kir; })
84#endif
85
86
87/*
88 * Beginning with EGCS 1.1, GCC defines __alpha_bwx__ when the BWX
89 * extension is enabled. Previous versions did not define anything
90 * we could test during compilation -- too bad, so sad.
91 */
92
93#if defined(__alpha_bwx__)
94#define __kernel_ldbu(mem) (mem)
95#define __kernel_ldwu(mem) (mem)
96#define __kernel_stb(val,mem) ((mem) = (val))
97#define __kernel_stw(val,mem) ((mem) = (val))
98#else
99#define __kernel_ldbu(mem) \
100 ({ unsigned char __kir; \
101 __asm__(".arch ev56; \
102 ldbu %0,%1" : "=r"(__kir) : "m"(mem)); \
103 __kir; })
104#define __kernel_ldwu(mem) \
105 ({ unsigned short __kir; \
106 __asm__(".arch ev56; \
107 ldwu %0,%1" : "=r"(__kir) : "m"(mem)); \
108 __kir; })
109#define __kernel_stb(val,mem) \
110 __asm__(".arch ev56; \
111 stb %1,%0" : "=m"(mem) : "r"(val))
112#define __kernel_stw(val,mem) \
113 __asm__(".arch ev56; \
114 stw %1,%0" : "=m"(mem) : "r"(val))
115#endif
116
117
118#endif /* _UAPI__ALPHA_COMPILER_H */