x86, AVX-512: Enable AVX-512 States Context Switch
[cascardo/linux.git] / arch / x86 / include / asm / xsave.h
1 #ifndef __ASM_X86_XSAVE_H
2 #define __ASM_X86_XSAVE_H
3
4 #include <linux/types.h>
5 #include <asm/processor.h>
6
7 #define XSTATE_CPUID            0x0000000d
8
9 #define XSTATE_FP               0x1
10 #define XSTATE_SSE              0x2
11 #define XSTATE_YMM              0x4
12 #define XSTATE_BNDREGS          0x8
13 #define XSTATE_BNDCSR           0x10
14 #define XSTATE_OPMASK           0x20
15 #define XSTATE_ZMM_Hi256        0x40
16 #define XSTATE_Hi16_ZMM         0x80
17
18 #define XSTATE_FPSSE    (XSTATE_FP | XSTATE_SSE)
19
20 #define FXSAVE_SIZE     512
21
22 #define XSAVE_HDR_SIZE      64
23 #define XSAVE_HDR_OFFSET    FXSAVE_SIZE
24
25 #define XSAVE_YMM_SIZE      256
26 #define XSAVE_YMM_OFFSET    (XSAVE_HDR_SIZE + XSAVE_HDR_OFFSET)
27
28 /* Supported features which support lazy state saving */
29 #define XSTATE_LAZY     (XSTATE_FP | XSTATE_SSE | XSTATE_YMM                  \
30                         | XSTATE_OPMASK | XSTATE_ZMM_Hi256 | XSTATE_Hi16_ZMM)
31
32 /* Supported features which require eager state saving */
33 #define XSTATE_EAGER    (XSTATE_BNDREGS | XSTATE_BNDCSR)
34
35 /* All currently supported features */
36 #define XCNTXT_MASK     (XSTATE_LAZY | XSTATE_EAGER)
37
38 #ifdef CONFIG_X86_64
39 #define REX_PREFIX      "0x48, "
40 #else
41 #define REX_PREFIX
42 #endif
43
44 extern unsigned int xstate_size;
45 extern u64 pcntxt_mask;
46 extern u64 xstate_fx_sw_bytes[USER_XSTATE_FX_SW_WORDS];
47 extern struct xsave_struct *init_xstate_buf;
48
49 extern void xsave_init(void);
50 extern void update_regset_xstate_info(unsigned int size, u64 xstate_mask);
51 extern int init_fpu(struct task_struct *child);
52
53 static inline int fpu_xrstor_checking(struct xsave_struct *fx)
54 {
55         int err;
56
57         asm volatile("1: .byte " REX_PREFIX "0x0f,0xae,0x2f\n\t"
58                      "2:\n"
59                      ".section .fixup,\"ax\"\n"
60                      "3:  movl $-1,%[err]\n"
61                      "    jmp  2b\n"
62                      ".previous\n"
63                      _ASM_EXTABLE(1b, 3b)
64                      : [err] "=r" (err)
65                      : "D" (fx), "m" (*fx), "a" (-1), "d" (-1), "0" (0)
66                      : "memory");
67
68         return err;
69 }
70
71 static inline int xsave_user(struct xsave_struct __user *buf)
72 {
73         int err;
74
75         /*
76          * Clear the xsave header first, so that reserved fields are
77          * initialized to zero.
78          */
79         err = __clear_user(&buf->xsave_hdr, sizeof(buf->xsave_hdr));
80         if (unlikely(err))
81                 return -EFAULT;
82
83         __asm__ __volatile__(ASM_STAC "\n"
84                              "1: .byte " REX_PREFIX "0x0f,0xae,0x27\n"
85                              "2: " ASM_CLAC "\n"
86                              ".section .fixup,\"ax\"\n"
87                              "3:  movl $-1,%[err]\n"
88                              "    jmp  2b\n"
89                              ".previous\n"
90                              _ASM_EXTABLE(1b,3b)
91                              : [err] "=r" (err)
92                              : "D" (buf), "a" (-1), "d" (-1), "0" (0)
93                              : "memory");
94         return err;
95 }
96
97 static inline int xrestore_user(struct xsave_struct __user *buf, u64 mask)
98 {
99         int err;
100         struct xsave_struct *xstate = ((__force struct xsave_struct *)buf);
101         u32 lmask = mask;
102         u32 hmask = mask >> 32;
103
104         __asm__ __volatile__(ASM_STAC "\n"
105                              "1: .byte " REX_PREFIX "0x0f,0xae,0x2f\n"
106                              "2: " ASM_CLAC "\n"
107                              ".section .fixup,\"ax\"\n"
108                              "3:  movl $-1,%[err]\n"
109                              "    jmp  2b\n"
110                              ".previous\n"
111                              _ASM_EXTABLE(1b,3b)
112                              : [err] "=r" (err)
113                              : "D" (xstate), "a" (lmask), "d" (hmask), "0" (0)
114                              : "memory");       /* memory required? */
115         return err;
116 }
117
118 static inline void xrstor_state(struct xsave_struct *fx, u64 mask)
119 {
120         u32 lmask = mask;
121         u32 hmask = mask >> 32;
122
123         asm volatile(".byte " REX_PREFIX "0x0f,0xae,0x2f\n\t"
124                      : : "D" (fx), "m" (*fx), "a" (lmask), "d" (hmask)
125                      :   "memory");
126 }
127
128 static inline void xsave_state(struct xsave_struct *fx, u64 mask)
129 {
130         u32 lmask = mask;
131         u32 hmask = mask >> 32;
132
133         asm volatile(".byte " REX_PREFIX "0x0f,0xae,0x27\n\t"
134                      : : "D" (fx), "m" (*fx), "a" (lmask), "d" (hmask)
135                      :   "memory");
136 }
137
138 static inline void fpu_xsave(struct fpu *fpu)
139 {
140         /* This, however, we can work around by forcing the compiler to select
141            an addressing mode that doesn't require extended registers. */
142         alternative_input(
143                 ".byte " REX_PREFIX "0x0f,0xae,0x27",
144                 ".byte " REX_PREFIX "0x0f,0xae,0x37",
145                 X86_FEATURE_XSAVEOPT,
146                 [fx] "D" (&fpu->state->xsave), "a" (-1), "d" (-1) :
147                 "memory");
148 }
149 #endif