1#ifndef _ASM_X86_SPECIAL_INSNS_H
2#define _ASM_X86_SPECIAL_INSNS_H
3
4
5#ifdef __KERNEL__
6
7static inline void native_clts(void)
8{
9	asm volatile("clts");
10}
11
12/*
13 * Volatile isn't enough to prevent the compiler from reordering the
14 * read/write functions for the control registers and messing everything up.
15 * A memory clobber would solve the problem, but would prevent reordering of
16 * all loads stores around it, which can hurt performance. Solution is to
17 * use a variable and mimic reads and writes to it to enforce serialization
18 */
19extern unsigned long __force_order;
20
21static inline unsigned long native_read_cr0(void)
22{
23	unsigned long val;
24	asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order));
25	return val;
26}
27
28static inline void native_write_cr0(unsigned long val)
29{
30	asm volatile("mov %0,%%cr0": : "r" (val), "m" (__force_order));
31}
32
33static inline unsigned long native_read_cr2(void)
34{
35	unsigned long val;
36	asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order));
37	return val;
38}
39
40static inline void native_write_cr2(unsigned long val)
41{
42	asm volatile("mov %0,%%cr2": : "r" (val), "m" (__force_order));
43}
44
45static inline unsigned long native_read_cr3(void)
46{
47	unsigned long val;
48	asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order));
49	return val;
50}
51
52static inline void native_write_cr3(unsigned long val)
53{
54	asm volatile("mov %0,%%cr3": : "r" (val), "m" (__force_order));
55}
56
57static inline unsigned long native_read_cr4(void)
58{
59	unsigned long val;
60	asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order));
61	return val;
62}
63
64static inline unsigned long native_read_cr4_safe(void)
65{
66	unsigned long val;
67	/* This could fault if %cr4 does not exist. In x86_64, a cr4 always
68	 * exists, so it will never fail. */
69#ifdef CONFIG_X86_32
70	asm volatile("1: mov %%cr4, %0\n"
71		     "2:\n"
72		     _ASM_EXTABLE(1b, 2b)
73		     : "=r" (val), "=m" (__force_order) : "0" (0));
74#else
75	val = native_read_cr4();
76#endif
77	return val;
78}
79
80static inline void native_write_cr4(unsigned long val)
81{
82	asm volatile("mov %0,%%cr4": : "r" (val), "m" (__force_order));
83}
84
85#ifdef CONFIG_X86_64
86static inline unsigned long native_read_cr8(void)
87{
88	unsigned long cr8;
89	asm volatile("movq %%cr8,%0" : "=r" (cr8));
90	return cr8;
91}
92
93static inline void native_write_cr8(unsigned long val)
94{
95	asm volatile("movq %0,%%cr8" :: "r" (val) : "memory");
96}
97#endif
98
99static inline void native_wbinvd(void)
100{
101	asm volatile("wbinvd": : :"memory");
102}
103
104extern asmlinkage void native_load_gs_index(unsigned);
105
106#ifdef CONFIG_PARAVIRT
107#include <asm/paravirt.h>
108#else
109
110static inline unsigned long read_cr0(void)
111{
112	return native_read_cr0();
113}
114
115static inline void write_cr0(unsigned long x)
116{
117	native_write_cr0(x);
118}
119
120static inline unsigned long read_cr2(void)
121{
122	return native_read_cr2();
123}
124
125static inline void write_cr2(unsigned long x)
126{
127	native_write_cr2(x);
128}
129
130static inline unsigned long read_cr3(void)
131{
132	return native_read_cr3();
133}
134
135static inline void write_cr3(unsigned long x)
136{
137	native_write_cr3(x);
138}
139
140static inline unsigned long read_cr4(void)
141{
142	return native_read_cr4();
143}
144
145static inline unsigned long read_cr4_safe(void)
146{
147	return native_read_cr4_safe();
148}
149
150static inline void write_cr4(unsigned long x)
151{
152	native_write_cr4(x);
153}
154
155static inline void wbinvd(void)
156{
157	native_wbinvd();
158}
159
160#ifdef CONFIG_X86_64
161
162static inline unsigned long read_cr8(void)
163{
164	return native_read_cr8();
165}
166
167static inline void write_cr8(unsigned long x)
168{
169	native_write_cr8(x);
170}
171
172static inline void load_gs_index(unsigned selector)
173{
174	native_load_gs_index(selector);
175}
176
177#endif
178
179/* Clear the 'TS' bit */
180static inline void clts(void)
181{
182	native_clts();
183}
184
185#endif/* CONFIG_PARAVIRT */
186
187#define stts() write_cr0(read_cr0() | X86_CR0_TS)
188
189static inline void clflush(volatile void *__p)
190{
191	asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p));
192}
193
194static inline void clflushopt(volatile void *__p)
195{
196	alternative_io(".byte " __stringify(NOP_DS_PREFIX) "; clflush %P0",
197		       ".byte 0x66; clflush %P0",
198		       X86_FEATURE_CLFLUSHOPT,
199		       "+m" (*(volatile char __force *)__p));
200}
201
202#define nop() asm volatile ("nop")
203
204
205#endif /* __KERNEL__ */
206
207#endif /* _ASM_X86_SPECIAL_INSNS_H */
208