1 /*
2 * Copyright (c) 2013-2018 Ali Mashtizadeh
3 * All rights reserved.
4 */
5
6 #ifndef __AMD64OP_H__
7 #define __AMD64OP_H__
8
enable_interrupts()9 static INLINE void enable_interrupts()
10 {
11 asm volatile("sti");
12 }
13
disable_interrupts()14 static INLINE void disable_interrupts()
15 {
16 asm volatile("cli");
17 }
18
hlt()19 static INLINE void hlt()
20 {
21 asm volatile("hlt");
22 }
23
pause()24 static INLINE void pause()
25 {
26 asm volatile("pause");
27 }
28
breakpoint()29 static INLINE void breakpoint()
30 {
31 asm volatile("int3");
32 }
33
icebp()34 static INLINE void icebp()
35 {
36 asm volatile(".byte 0xf1");
37 }
38
rdtsc()39 static INLINE uint64_t rdtsc()
40 {
41 uint32_t lo, hi;
42
43 asm volatile("rdtsc"
44 : "=a" (lo), "=d" (hi));
45
46 return ((uint64_t)hi << 32) | (uint64_t)lo;
47 }
48
rdtscp(uint32_t * procno)49 static INLINE uint64_t rdtscp(uint32_t *procno)
50 {
51 uint32_t lo, hi, proc;
52
53 asm volatile("rdtsc"
54 : "=a" (lo), "=d" (hi), "=c" (proc));
55
56 if (procno)
57 *procno = proc;
58
59 return ((uint64_t)hi << 32) | (uint64_t)lo;
60 }
61
lidt(PseudoDescriptor * idt)62 static INLINE void lidt(PseudoDescriptor *idt)
63 {
64 asm volatile("lidt (%0)"
65 :
66 : "r" (idt)
67 : "memory");
68 }
69
lgdt(PseudoDescriptor * gdt)70 static INLINE void lgdt(PseudoDescriptor *gdt)
71 {
72 asm volatile("lgdt (%0)"
73 :
74 : "r" (gdt)
75 : "memory");
76 }
77
ltr(uint16_t tss)78 static INLINE void ltr(uint16_t tss)
79 {
80 asm volatile("ltr %0"
81 :
82 : "r" (tss));
83 }
84
cpuid(uint32_t info,uint32_t * eax,uint32_t * ebx,uint32_t * ecx,uint32_t * edx)85 static INLINE void cpuid(uint32_t info, uint32_t *eax, uint32_t *ebx,
86 uint32_t *ecx, uint32_t *edx)
87 {
88 uint32_t a, b, c, d;
89
90 asm volatile("cpuid"
91 : "=a" (a), "=b" (b), "=c" (c), "=d" (d)
92 : "a" (info));
93
94 if (eax)
95 *eax = a;
96 if (ebx)
97 *ebx = b;
98 if (ecx)
99 *ecx = c;
100 if (edx)
101 *edx = d;
102 }
103
wrmsr(uint32_t addr,uint64_t val)104 static INLINE void wrmsr(uint32_t addr, uint64_t val)
105 {
106 uint32_t eax = val & 0xFFFFFFFF;
107 uint32_t edx = val >> 32;
108
109 asm volatile("wrmsr"
110 :
111 : "a" (eax), "c" (addr), "d" (edx));
112 }
113
rdmsr(uint32_t addr)114 static INLINE uint64_t rdmsr(uint32_t addr)
115 {
116 uint64_t eax, edx;
117
118 asm volatile("rdmsr"
119 : "=a" (eax), "=d" (edx)
120 : "c" (addr));
121
122 return edx << 32 | eax;
123 }
124
125 /*
126 * Control Registers
127 */
128
read_cr0()129 static INLINE uint64_t read_cr0()
130 {
131 uint64_t val;
132
133 asm volatile("movq %%cr0, %0"
134 : "=r" (val));
135
136 return val;
137 }
138
write_cr0(uint64_t val)139 static INLINE void write_cr0(uint64_t val)
140 {
141 asm volatile("movq %0, %%cr0"
142 :
143 : "r" (val));
144 }
145
read_cr2()146 static INLINE uint64_t read_cr2()
147 {
148 uint64_t val;
149
150 asm volatile("movq %%cr2, %0"
151 : "=r" (val));
152
153 return val;
154 }
155
read_cr3()156 static INLINE uint64_t read_cr3()
157 {
158 uint64_t val;
159
160 asm volatile("movq %%cr3, %0"
161 : "=r" (val));
162
163 return val;
164 }
165
write_cr3(uint64_t val)166 static INLINE void write_cr3(uint64_t val)
167 {
168 asm volatile("movq %0, %%cr3"
169 :
170 : "r" (val));
171 }
172
read_cr4()173 static INLINE uint64_t read_cr4()
174 {
175 uint64_t val;
176
177 asm volatile("movq %%cr4, %0"
178 : "=r" (val));
179
180 return val;
181 }
182
write_cr4(uint64_t val)183 static INLINE void write_cr4(uint64_t val)
184 {
185 asm volatile("movq %0, %%cr4"
186 :
187 : "r" (val));
188 }
189
190 /*
191 * Debug Registers
192 */
193
read_dr0()194 static INLINE uint64_t read_dr0()
195 {
196 uint64_t val;
197
198 asm volatile("movq %%dr0, %0"
199 : "=r" (val));
200
201 return val;
202 }
203
write_dr0(uint64_t val)204 static INLINE void write_dr0(uint64_t val)
205 {
206 asm volatile("movq %0, %%dr0"
207 :
208 : "r" (val));
209 }
210
read_dr1()211 static INLINE uint64_t read_dr1()
212 {
213 uint64_t val;
214
215 asm volatile("movq %%dr1, %0"
216 : "=r" (val));
217
218 return val;
219 }
220
write_dr1(uint64_t val)221 static INLINE void write_dr1(uint64_t val)
222 {
223 asm volatile("movq %0, %%dr1"
224 :
225 : "r" (val));
226 }
227
read_dr2()228 static INLINE uint64_t read_dr2()
229 {
230 uint64_t val;
231
232 asm volatile("movq %%dr2, %0"
233 : "=r" (val));
234
235 return val;
236 }
237
write_dr2(uint64_t val)238 static INLINE void write_dr2(uint64_t val)
239 {
240 asm volatile("movq %0, %%dr2"
241 :
242 : "r" (val));
243 }
244
read_dr3()245 static INLINE uint64_t read_dr3()
246 {
247 uint64_t val;
248
249 asm volatile("movq %%dr3, %0"
250 : "=r" (val));
251
252 return val;
253 }
254
write_dr3(uint64_t val)255 static INLINE void write_dr3(uint64_t val)
256 {
257 asm volatile("movq %0, %%dr3"
258 :
259 : "r" (val));
260 }
261
read_dr6()262 static INLINE uint64_t read_dr6()
263 {
264 uint64_t val;
265
266 asm volatile("movq %%dr6, %0"
267 : "=r" (val));
268
269 return val;
270 }
271
write_dr6(uint64_t val)272 static INLINE void write_dr6(uint64_t val)
273 {
274 asm volatile("movq %0, %%dr6"
275 :
276 : "r" (val));
277 }
278
read_dr7()279 static INLINE uint64_t read_dr7()
280 {
281 uint64_t val;
282
283 asm volatile("movq %%dr7, %0"
284 : "=r" (val));
285
286 return val;
287 }
288
write_dr7(uint64_t val)289 static INLINE void write_dr7(uint64_t val)
290 {
291 asm volatile("movq %0, %%dr7"
292 :
293 : "r" (val));
294 }
295
296 /*
297 * Segment Registers
298 */
299
read_ds()300 static INLINE uint16_t read_ds()
301 {
302 uint16_t val;
303
304 asm volatile("movw %%ds, %0"
305 : "=r" (val));
306
307 return val;
308 }
309
write_ds(uint16_t val)310 static INLINE void write_ds(uint16_t val)
311 {
312 asm volatile("movw %0, %%ds"
313 :
314 : "r" (val));
315 }
316
read_es()317 static INLINE uint16_t read_es()
318 {
319 uint16_t val;
320
321 asm volatile("movw %%es, %0"
322 : "=r" (val));
323
324 return val;
325 }
326
write_es(uint16_t val)327 static INLINE void write_es(uint16_t val)
328 {
329 asm volatile("movw %0, %%es"
330 :
331 : "r" (val));
332 }
333
read_fs()334 static INLINE uint16_t read_fs()
335 {
336 uint16_t val;
337
338 asm volatile("movw %%fs, %0"
339 : "=r" (val));
340
341 return val;
342 }
343
write_fs(uint16_t val)344 static INLINE void write_fs(uint16_t val)
345 {
346 asm volatile("movw %0, %%fs"
347 :
348 : "r" (val));
349 }
350
read_gs()351 static INLINE uint16_t read_gs()
352 {
353 uint16_t val;
354
355 asm volatile("movw %%gs, %0"
356 : "=r" (val));
357
358 return val;
359 }
360
write_gs(uint16_t val)361 static INLINE void write_gs(uint16_t val)
362 {
363 asm volatile("movw %0, %%gs"
364 :
365 : "r" (val));
366 }
367
368 /*
369 * Floating Point
370 */
371
clts()372 static INLINE void clts()
373 {
374 asm volatile("clts");
375 }
376
fxsave(struct XSAVEArea * xsa)377 static INLINE void fxsave(struct XSAVEArea *xsa)
378 {
379 asm volatile("fxsave %0"
380 : "=m" (*xsa)
381 :
382 : "memory");
383 }
384
385 // XXX: Need to fix AMD Bug
fxrstor(struct XSAVEArea * xsa)386 static INLINE void fxrstor(struct XSAVEArea *xsa)
387 {
388 asm volatile("fxrstor %0"
389 :
390 : "m" (*xsa)
391 : "memory");
392 }
393
xsave(struct XSAVEArea * xsa,uint64_t mask)394 static INLINE void xsave(struct XSAVEArea *xsa, uint64_t mask)
395 {
396 uint32_t lo = (uint32_t)mask;
397 uint32_t hi = (uint32_t)(mask >> 32);
398
399 asm volatile("xsave %0"
400 : "=m" (*xsa)
401 : "a" (lo), "d" (hi)
402 : "memory");
403 }
404
xsaveopt(struct XSAVEArea * xsa,uint64_t mask)405 static INLINE void xsaveopt(struct XSAVEArea *xsa, uint64_t mask)
406 {
407 uint32_t lo = (uint32_t)mask;
408 uint32_t hi = (uint32_t)(mask >> 32);
409
410 asm volatile("xsaveopt %0"
411 : "=m" (*xsa)
412 : "a" (lo), "d" (hi)
413 : "memory");
414 }
415
xrstor(struct XSAVEArea * xsa,uint64_t mask)416 static INLINE void xrstor(struct XSAVEArea *xsa, uint64_t mask)
417 {
418 uint32_t lo = (uint32_t)mask;
419 uint32_t hi = (uint32_t)(mask >> 32);
420
421 asm volatile("xrstor %0"
422 :
423 : "m" (*xsa), "a" (lo), "d" (hi)
424 : "memory");
425 }
426
427 /*
428 * Port IO
429 */
430
outb(uint16_t port,uint8_t data)431 static INLINE void outb(uint16_t port, uint8_t data)
432 {
433 asm volatile("outb %0, %1"
434 :
435 : "a" (data), "d" (port));
436 }
437
outw(uint16_t port,uint16_t data)438 static INLINE void outw(uint16_t port, uint16_t data)
439 {
440 asm volatile("outw %0, %1"
441 :
442 : "a" (data), "d" (port));
443 }
444
outl(uint16_t port,uint32_t data)445 static INLINE void outl(uint16_t port, uint32_t data)
446 {
447 asm volatile("outl %0, %1"
448 :
449 : "a" (data), "d" (port));
450 }
451
inb(uint16_t port)452 static INLINE uint8_t inb(uint16_t port)
453 {
454 uint8_t data;
455
456 asm volatile("inb %1, %0"
457 : "=a" (data)
458 :"d" (port));
459
460 return data;
461 }
462
inw(uint16_t port)463 static INLINE uint16_t inw(uint16_t port)
464 {
465 uint16_t data;
466
467 asm volatile("inw %1, %0"
468 : "=a" (data)
469 :"d" (port));
470
471 return data;
472 }
473
inl(uint16_t port)474 static INLINE uint32_t inl(uint16_t port)
475 {
476 uint32_t data;
477
478 asm volatile("inl %1, %0"
479 : "=a" (data)
480 :"d" (port));
481
482 return data;
483 }
484
485 #endif /* __AMD64OP_H__ */
486
487