VirtualBox

source: vbox/trunk/include/iprt/asm-amd64-x86.h@ 59529

Last change on this file since 59529 was 59310, checked in by vboxsync, 9 years ago

iprt/*.h: 16-bit watcom / DOS adjustments.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 73.3 KB
Line 
1/** @file
2 * IPRT - AMD64 and x86 Specific Assembly Functions.
3 */
4
5/*
6 * Copyright (C) 2006-2015 Oracle Corporation
7 *
8 * This file is part of VirtualBox Open Source Edition (OSE), as
9 * available from http://www.virtualbox.org. This file is free software;
10 * you can redistribute it and/or modify it under the terms of the GNU
11 * General Public License (GPL) as published by the Free Software
12 * Foundation, in version 2 as it comes in the "COPYING" file of the
13 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
14 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
15 *
16 * The contents of this file may alternatively be used under the terms
17 * of the Common Development and Distribution License Version 1.0
18 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
19 * VirtualBox OSE distribution, in which case the provisions of the
20 * CDDL are applicable instead of those of the GPL.
21 *
22 * You may elect to license modified versions of this file under the
23 * terms and conditions of either the GPL or the CDDL or both.
24 */
25
26#ifndef ___iprt_asm_amd64_x86_h
27#define ___iprt_asm_amd64_x86_h
28
29#include <iprt/types.h>
30#include <iprt/assert.h>
31#if !defined(RT_ARCH_AMD64) && !defined(RT_ARCH_X86)
32# error "Not on AMD64 or x86"
33#endif
34
35#if defined(_MSC_VER) && RT_INLINE_ASM_USES_INTRIN
36# include <intrin.h>
37 /* Emit the intrinsics at all optimization levels. */
38# pragma intrinsic(_ReadWriteBarrier)
39# pragma intrinsic(__cpuid)
40# pragma intrinsic(_enable)
41# pragma intrinsic(_disable)
42# pragma intrinsic(__rdtsc)
43# pragma intrinsic(__readmsr)
44# pragma intrinsic(__writemsr)
45# pragma intrinsic(__outbyte)
46# pragma intrinsic(__outbytestring)
47# pragma intrinsic(__outword)
48# pragma intrinsic(__outwordstring)
49# pragma intrinsic(__outdword)
50# pragma intrinsic(__outdwordstring)
51# pragma intrinsic(__inbyte)
52# pragma intrinsic(__inbytestring)
53# pragma intrinsic(__inword)
54# pragma intrinsic(__inwordstring)
55# pragma intrinsic(__indword)
56# pragma intrinsic(__indwordstring)
57# pragma intrinsic(__invlpg)
58# pragma intrinsic(__wbinvd)
59# pragma intrinsic(__readcr0)
60# pragma intrinsic(__readcr2)
61# pragma intrinsic(__readcr3)
62# pragma intrinsic(__readcr4)
63# pragma intrinsic(__writecr0)
64# pragma intrinsic(__writecr3)
65# pragma intrinsic(__writecr4)
66# pragma intrinsic(__readdr)
67# pragma intrinsic(__writedr)
68# ifdef RT_ARCH_AMD64
69# pragma intrinsic(__readcr8)
70# pragma intrinsic(__writecr8)
71# endif
72# if RT_INLINE_ASM_USES_INTRIN >= 15
73# pragma intrinsic(__readeflags)
74# pragma intrinsic(__writeeflags)
75# pragma intrinsic(__rdtscp)
76# endif
77#endif
78
79
80/*
81 * Include #pragma aux definitions for Watcom C/C++.
82 */
83#if defined(__WATCOMC__) && ARCH_BITS == 16
84# include "asm-amd64-x86-watcom-16.h"
85#elif defined(__WATCOMC__) && ARCH_BITS == 32
86# include "asm-amd64-x86-watcom-32.h"
87#endif
88
89
90/** @defgroup grp_rt_asm_amd64_x86 AMD64 and x86 Specific ASM Routines
91 * @ingroup grp_rt_asm
92 * @{
93 */
94
95/** @todo find a more proper place for these structures? */
96
97#pragma pack(1)
98/** IDTR */
99typedef struct RTIDTR
100{
101 /** Size of the IDT. */
102 uint16_t cbIdt;
103 /** Address of the IDT. */
104#if ARCH_BITS != 64
105 uint32_t pIdt;
106#else
107 uint64_t pIdt;
108#endif
109} RTIDTR, *PRTIDTR;
110#pragma pack()
111
112#pragma pack(1)
113/** @internal */
114typedef struct RTIDTRALIGNEDINT
115{
116 /** Alignment padding. */
117 uint16_t au16Padding[ARCH_BITS == 64 ? 3 : 1];
118 /** The IDTR structure. */
119 RTIDTR Idtr;
120} RTIDTRALIGNEDINT;
121#pragma pack()
122
123/** Wrapped RTIDTR for preventing misalignment exceptions. */
124typedef union RTIDTRALIGNED
125{
126 /** Try make sure this structure has optimal alignment. */
127 uint64_t auAlignmentHack[ARCH_BITS == 64 ? 2 : 1];
128 /** Aligned structure. */
129 RTIDTRALIGNEDINT s;
130} RTIDTRALIGNED;
131AssertCompileSize(RTIDTRALIGNED, ((ARCH_BITS == 64) + 1) * 8);
132/** Pointer to a an RTIDTR alignment wrapper. */
133typedef RTIDTRALIGNED *PRIDTRALIGNED;
134
135
136#pragma pack(1)
137/** GDTR */
138typedef struct RTGDTR
139{
140 /** Size of the GDT. */
141 uint16_t cbGdt;
142 /** Address of the GDT. */
143#if ARCH_BITS != 64
144 uint32_t pGdt;
145#else
146 uint64_t pGdt;
147#endif
148} RTGDTR, *PRTGDTR;
149#pragma pack()
150
151#pragma pack(1)
152/** @internal */
153typedef struct RTGDTRALIGNEDINT
154{
155 /** Alignment padding. */
156 uint16_t au16Padding[ARCH_BITS == 64 ? 3 : 1];
157 /** The GDTR structure. */
158 RTGDTR Gdtr;
159} RTGDTRALIGNEDINT;
160#pragma pack()
161
162/** Wrapped RTGDTR for preventing misalignment exceptions. */
163typedef union RTGDTRALIGNED
164{
165 /** Try make sure this structure has optimal alignment. */
166 uint64_t auAlignmentHack[ARCH_BITS == 64 ? 2 : 1];
167 /** Aligned structure. */
168 RTGDTRALIGNEDINT s;
169} RTGDTRALIGNED;
170AssertCompileSize(RTIDTRALIGNED, ((ARCH_BITS == 64) + 1) * 8);
171/** Pointer to a an RTGDTR alignment wrapper. */
172typedef RTGDTRALIGNED *PRGDTRALIGNED;
173
174
175/**
176 * Gets the content of the IDTR CPU register.
177 * @param pIdtr Where to store the IDTR contents.
178 */
179#if RT_INLINE_ASM_EXTERNAL
180DECLASM(void) ASMGetIDTR(PRTIDTR pIdtr);
181#else
182DECLINLINE(void) ASMGetIDTR(PRTIDTR pIdtr)
183{
184# if RT_INLINE_ASM_GNU_STYLE
185 __asm__ __volatile__("sidt %0" : "=m" (*pIdtr));
186# else
187 __asm
188 {
189# ifdef RT_ARCH_AMD64
190 mov rax, [pIdtr]
191 sidt [rax]
192# else
193 mov eax, [pIdtr]
194 sidt [eax]
195# endif
196 }
197# endif
198}
199#endif
200
201
202/**
203 * Gets the content of the IDTR.LIMIT CPU register.
204 * @returns IDTR limit.
205 */
206#if RT_INLINE_ASM_EXTERNAL
207DECLASM(uint16_t) ASMGetIdtrLimit(void);
208#else
209DECLINLINE(uint16_t) ASMGetIdtrLimit(void)
210{
211 RTIDTRALIGNED TmpIdtr;
212# if RT_INLINE_ASM_GNU_STYLE
213 __asm__ __volatile__("sidt %0" : "=m" (TmpIdtr.s.Idtr));
214# else
215 __asm
216 {
217 sidt [TmpIdtr.s.Idtr]
218 }
219# endif
220 return TmpIdtr.s.Idtr.cbIdt;
221}
222#endif
223
224
225/**
226 * Sets the content of the IDTR CPU register.
227 * @param pIdtr Where to load the IDTR contents from
228 */
229#if RT_INLINE_ASM_EXTERNAL
230DECLASM(void) ASMSetIDTR(const RTIDTR *pIdtr);
231#else
232DECLINLINE(void) ASMSetIDTR(const RTIDTR *pIdtr)
233{
234# if RT_INLINE_ASM_GNU_STYLE
235 __asm__ __volatile__("lidt %0" : : "m" (*pIdtr));
236# else
237 __asm
238 {
239# ifdef RT_ARCH_AMD64
240 mov rax, [pIdtr]
241 lidt [rax]
242# else
243 mov eax, [pIdtr]
244 lidt [eax]
245# endif
246 }
247# endif
248}
249#endif
250
251
252/**
253 * Gets the content of the GDTR CPU register.
254 * @param pGdtr Where to store the GDTR contents.
255 */
256#if RT_INLINE_ASM_EXTERNAL
257DECLASM(void) ASMGetGDTR(PRTGDTR pGdtr);
258#else
259DECLINLINE(void) ASMGetGDTR(PRTGDTR pGdtr)
260{
261# if RT_INLINE_ASM_GNU_STYLE
262 __asm__ __volatile__("sgdt %0" : "=m" (*pGdtr));
263# else
264 __asm
265 {
266# ifdef RT_ARCH_AMD64
267 mov rax, [pGdtr]
268 sgdt [rax]
269# else
270 mov eax, [pGdtr]
271 sgdt [eax]
272# endif
273 }
274# endif
275}
276#endif
277
278
279/**
280 * Sets the content of the GDTR CPU register.
281 * @param pGdtr Where to load the GDTR contents from
282 */
283#if RT_INLINE_ASM_EXTERNAL
284DECLASM(void) ASMSetGDTR(const RTGDTR *pGdtr);
285#else
286DECLINLINE(void) ASMSetGDTR(const RTGDTR *pGdtr)
287{
288# if RT_INLINE_ASM_GNU_STYLE
289 __asm__ __volatile__("lgdt %0" : : "m" (*pGdtr));
290# else
291 __asm
292 {
293# ifdef RT_ARCH_AMD64
294 mov rax, [pGdtr]
295 lgdt [rax]
296# else
297 mov eax, [pGdtr]
298 lgdt [eax]
299# endif
300 }
301# endif
302}
303#endif
304
305
306
307/**
308 * Get the cs register.
309 * @returns cs.
310 */
311#if RT_INLINE_ASM_EXTERNAL
312DECLASM(RTSEL) ASMGetCS(void);
313#else
314DECLINLINE(RTSEL) ASMGetCS(void)
315{
316 RTSEL SelCS;
317# if RT_INLINE_ASM_GNU_STYLE
318 __asm__ __volatile__("movw %%cs, %0\n\t" : "=r" (SelCS));
319# else
320 __asm
321 {
322 mov ax, cs
323 mov [SelCS], ax
324 }
325# endif
326 return SelCS;
327}
328#endif
329
330
331/**
332 * Get the DS register.
333 * @returns DS.
334 */
335#if RT_INLINE_ASM_EXTERNAL
336DECLASM(RTSEL) ASMGetDS(void);
337#else
338DECLINLINE(RTSEL) ASMGetDS(void)
339{
340 RTSEL SelDS;
341# if RT_INLINE_ASM_GNU_STYLE
342 __asm__ __volatile__("movw %%ds, %0\n\t" : "=r" (SelDS));
343# else
344 __asm
345 {
346 mov ax, ds
347 mov [SelDS], ax
348 }
349# endif
350 return SelDS;
351}
352#endif
353
354
355/**
356 * Get the ES register.
357 * @returns ES.
358 */
359#if RT_INLINE_ASM_EXTERNAL
360DECLASM(RTSEL) ASMGetES(void);
361#else
362DECLINLINE(RTSEL) ASMGetES(void)
363{
364 RTSEL SelES;
365# if RT_INLINE_ASM_GNU_STYLE
366 __asm__ __volatile__("movw %%es, %0\n\t" : "=r" (SelES));
367# else
368 __asm
369 {
370 mov ax, es
371 mov [SelES], ax
372 }
373# endif
374 return SelES;
375}
376#endif
377
378
379/**
380 * Get the FS register.
381 * @returns FS.
382 */
383#if RT_INLINE_ASM_EXTERNAL
384DECLASM(RTSEL) ASMGetFS(void);
385#else
386DECLINLINE(RTSEL) ASMGetFS(void)
387{
388 RTSEL SelFS;
389# if RT_INLINE_ASM_GNU_STYLE
390 __asm__ __volatile__("movw %%fs, %0\n\t" : "=r" (SelFS));
391# else
392 __asm
393 {
394 mov ax, fs
395 mov [SelFS], ax
396 }
397# endif
398 return SelFS;
399}
400# endif
401
402
403/**
404 * Get the GS register.
405 * @returns GS.
406 */
407#if RT_INLINE_ASM_EXTERNAL
408DECLASM(RTSEL) ASMGetGS(void);
409#else
410DECLINLINE(RTSEL) ASMGetGS(void)
411{
412 RTSEL SelGS;
413# if RT_INLINE_ASM_GNU_STYLE
414 __asm__ __volatile__("movw %%gs, %0\n\t" : "=r" (SelGS));
415# else
416 __asm
417 {
418 mov ax, gs
419 mov [SelGS], ax
420 }
421# endif
422 return SelGS;
423}
424#endif
425
426
427/**
428 * Get the SS register.
429 * @returns SS.
430 */
431#if RT_INLINE_ASM_EXTERNAL
432DECLASM(RTSEL) ASMGetSS(void);
433#else
434DECLINLINE(RTSEL) ASMGetSS(void)
435{
436 RTSEL SelSS;
437# if RT_INLINE_ASM_GNU_STYLE
438 __asm__ __volatile__("movw %%ss, %0\n\t" : "=r" (SelSS));
439# else
440 __asm
441 {
442 mov ax, ss
443 mov [SelSS], ax
444 }
445# endif
446 return SelSS;
447}
448#endif
449
450
451/**
452 * Get the TR register.
453 * @returns TR.
454 */
455#if RT_INLINE_ASM_EXTERNAL
456DECLASM(RTSEL) ASMGetTR(void);
457#else
458DECLINLINE(RTSEL) ASMGetTR(void)
459{
460 RTSEL SelTR;
461# if RT_INLINE_ASM_GNU_STYLE
462 __asm__ __volatile__("str %w0\n\t" : "=r" (SelTR));
463# else
464 __asm
465 {
466 str ax
467 mov [SelTR], ax
468 }
469# endif
470 return SelTR;
471}
472#endif
473
474
475/**
476 * Get the LDTR register.
477 * @returns LDTR.
478 */
479#if RT_INLINE_ASM_EXTERNAL
480DECLASM(RTSEL) ASMGetLDTR(void);
481#else
482DECLINLINE(RTSEL) ASMGetLDTR(void)
483{
484 RTSEL SelLDTR;
485# if RT_INLINE_ASM_GNU_STYLE
486 __asm__ __volatile__("sldt %w0\n\t" : "=r" (SelLDTR));
487# else
488 __asm
489 {
490 sldt ax
491 mov [SelLDTR], ax
492 }
493# endif
494 return SelLDTR;
495}
496#endif
497
498
499/**
500 * Get the access rights for the segment selector.
501 *
502 * @returns The access rights on success or UINT32_MAX on failure.
503 * @param uSel The selector value.
504 *
505 * @remarks Using UINT32_MAX for failure is chosen because valid access rights
506 * always have bits 0:7 as 0 (on both Intel & AMD).
507 */
508#if RT_INLINE_ASM_EXTERNAL
509DECLASM(uint32_t) ASMGetSegAttr(uint32_t uSel);
510#else
511DECLINLINE(uint32_t) ASMGetSegAttr(uint32_t uSel)
512{
513 uint32_t uAttr;
514 /* LAR only accesses 16-bit of the source operand, but eax for the
515 destination operand is required for getting the full 32-bit access rights. */
516# if RT_INLINE_ASM_GNU_STYLE
517 __asm__ __volatile__("lar %1, %%eax\n\t"
518 "jz done%=\n\t"
519 "movl $0xffffffff, %%eax\n\t"
520 "done%=:\n\t"
521 "movl %%eax, %0\n\t"
522 : "=r" (uAttr)
523 : "r" (uSel)
524 : "cc", "%eax");
525# else
526 __asm
527 {
528 lar eax, [uSel]
529 jz done
530 mov eax, 0ffffffffh
531 done:
532 mov [uAttr], eax
533 }
534# endif
535 return uAttr;
536}
537#endif
538
539
540/**
541 * Get the [RE]FLAGS register.
542 * @returns [RE]FLAGS.
543 */
544#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < 15
545DECLASM(RTCCUINTREG) ASMGetFlags(void);
546#else
547DECLINLINE(RTCCUINTREG) ASMGetFlags(void)
548{
549 RTCCUINTREG uFlags;
550# if RT_INLINE_ASM_GNU_STYLE
551# ifdef RT_ARCH_AMD64
552 __asm__ __volatile__("pushfq\n\t"
553 "popq %0\n\t"
554 : "=r" (uFlags));
555# else
556 __asm__ __volatile__("pushfl\n\t"
557 "popl %0\n\t"
558 : "=r" (uFlags));
559# endif
560# elif RT_INLINE_ASM_USES_INTRIN >= 15
561 uFlags = __readeflags();
562# else
563 __asm
564 {
565# ifdef RT_ARCH_AMD64
566 pushfq
567 pop [uFlags]
568# else
569 pushfd
570 pop [uFlags]
571# endif
572 }
573# endif
574 return uFlags;
575}
576#endif
577
578
579/**
580 * Set the [RE]FLAGS register.
581 * @param uFlags The new [RE]FLAGS value.
582 */
583#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < 15
584DECLASM(void) ASMSetFlags(RTCCUINTREG uFlags);
585#else
586DECLINLINE(void) ASMSetFlags(RTCCUINTREG uFlags)
587{
588# if RT_INLINE_ASM_GNU_STYLE
589# ifdef RT_ARCH_AMD64
590 __asm__ __volatile__("pushq %0\n\t"
591 "popfq\n\t"
592 : : "g" (uFlags));
593# else
594 __asm__ __volatile__("pushl %0\n\t"
595 "popfl\n\t"
596 : : "g" (uFlags));
597# endif
598# elif RT_INLINE_ASM_USES_INTRIN >= 15
599 __writeeflags(uFlags);
600# else
601 __asm
602 {
603# ifdef RT_ARCH_AMD64
604 push [uFlags]
605 popfq
606# else
607 push [uFlags]
608 popfd
609# endif
610 }
611# endif
612}
613#endif
614
615
616/**
617 * Modifies the [RE]FLAGS register.
618 * @returns Original value.
619 * @param fAndEfl Flags to keep (applied first).
620 * @param fOrEfl Flags to be set.
621 */
622#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < 15
623DECLASM(RTCCUINTREG) ASMChangeFlags(RTCCUINTREG fAndEfl, RTCCUINTREG fOrEfl);
624#else
625DECLINLINE(RTCCUINTREG) ASMChangeFlags(RTCCUINTREG fAndEfl, RTCCUINTREG fOrEfl)
626{
627 RTCCUINTREG fOldEfl;
628# if RT_INLINE_ASM_GNU_STYLE
629# ifdef RT_ARCH_AMD64
630 __asm__ __volatile__("pushfq\n\t"
631 "movq (%%rsp), %0\n\t"
632 "andq %0, %1\n\t"
633 "orq %3, %1\n\t"
634 "mov %1, (%%rsp)\n\t"
635 "popfq\n\t"
636 : "=&r" (fOldEfl),
637 "=r" (fAndEfl)
638 : "1" (fAndEfl),
639 "rn" (fOrEfl) );
640# else
641 __asm__ __volatile__("pushfl\n\t"
642 "movl (%%esp), %0\n\t"
643 "andl %1, (%%esp)\n\t"
644 "orl %2, (%%esp)\n\t"
645 "popfl\n\t"
646 : "=&r" (fOldEfl)
647 : "rn" (fAndEfl),
648 "rn" (fOrEfl) );
649# endif
650# elif RT_INLINE_ASM_USES_INTRIN >= 15
651 fOldEfl = __readeflags();
652 __writeeflags((fOldEfl & fAndEfl) | fOrEfl);
653# else
654 __asm
655 {
656# ifdef RT_ARCH_AMD64
657 mov rdx, [fAndEfl]
658 mov rcx, [fOrEfl]
659 pushfq
660 mov rax, [rsp]
661 and rdx, rax
662 or rdx, rcx
663 mov [rsp], rdx
664 popfq
665 mov [fOldEfl], rax
666# else
667 mov edx, [fAndEfl]
668 mov ecx, [fOrEfl]
669 pushfd
670 mov eax, [esp]
671 and edx, eax
672 or edx, ecx
673 mov [esp], edx
674 popfd
675 mov [fOldEfl], eax
676# endif
677 }
678# endif
679 return fOldEfl;
680}
681#endif
682
683
684/**
685 * Modifies the [RE]FLAGS register by ORing in one or more flags.
686 * @returns Original value.
687 * @param fOrEfl The flags to be set (ORed in).
688 */
689#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < 15
690DECLASM(RTCCUINTREG) ASMAddFlags(RTCCUINTREG fOrEfl);
691#else
692DECLINLINE(RTCCUINTREG) ASMAddFlags(RTCCUINTREG fOrEfl)
693{
694 RTCCUINTREG fOldEfl;
695# if RT_INLINE_ASM_GNU_STYLE
696# ifdef RT_ARCH_AMD64
697 __asm__ __volatile__("pushfq\n\t"
698 "movq (%%rsp), %0\n\t"
699 "orq %1, (%%rsp)\n\t"
700 "popfq\n\t"
701 : "=&r" (fOldEfl)
702 : "rn" (fOrEfl) );
703# else
704 __asm__ __volatile__("pushfl\n\t"
705 "movl (%%esp), %0\n\t"
706 "orl %1, (%%esp)\n\t"
707 "popfl\n\t"
708 : "=&r" (fOldEfl)
709 : "rn" (fOrEfl) );
710# endif
711# elif RT_INLINE_ASM_USES_INTRIN >= 15
712 fOldEfl = __readeflags();
713 __writeeflags(fOldEfl | fOrEfl);
714# else
715 __asm
716 {
717# ifdef RT_ARCH_AMD64
718 mov rcx, [fOrEfl]
719 pushfq
720 mov rdx, [rsp]
721 or [rsp], rcx
722 popfq
723 mov [fOldEfl], rax
724# else
725 mov ecx, [fOrEfl]
726 pushfd
727 mov edx, [esp]
728 or [esp], ecx
729 popfd
730 mov [fOldEfl], eax
731# endif
732 }
733# endif
734 return fOldEfl;
735}
736#endif
737
738
739/**
740 * Modifies the [RE]FLAGS register by AND'ing out one or more flags.
741 * @returns Original value.
742 * @param fAndEfl The flags to keep.
743 */
744#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < 15
745DECLASM(RTCCUINTREG) ASMClearFlags(RTCCUINTREG fAndEfl);
746#else
747DECLINLINE(RTCCUINTREG) ASMClearFlags(RTCCUINTREG fAndEfl)
748{
749 RTCCUINTREG fOldEfl;
750# if RT_INLINE_ASM_GNU_STYLE
751# ifdef RT_ARCH_AMD64
752 __asm__ __volatile__("pushfq\n\t"
753 "movq (%%rsp), %0\n\t"
754 "andq %1, (%%rsp)\n\t"
755 "popfq\n\t"
756 : "=&r" (fOldEfl)
757 : "rn" (fAndEfl) );
758# else
759 __asm__ __volatile__("pushfl\n\t"
760 "movl (%%esp), %0\n\t"
761 "andl %1, (%%esp)\n\t"
762 "popfl\n\t"
763 : "=&r" (fOldEfl)
764 : "rn" (fAndEfl) );
765# endif
766# elif RT_INLINE_ASM_USES_INTRIN >= 15
767 fOldEfl = __readeflags();
768 __writeeflags(fOldEfl & fAndEfl);
769# else
770 __asm
771 {
772# ifdef RT_ARCH_AMD64
773 mov rdx, [fAndEfl]
774 pushfq
775 mov rdx, [rsp]
776 and [rsp], rdx
777 popfq
778 mov [fOldEfl], rax
779# else
780 mov edx, [fAndEfl]
781 pushfd
782 mov edx, [esp]
783 and [esp], edx
784 popfd
785 mov [fOldEfl], eax
786# endif
787 }
788# endif
789 return fOldEfl;
790}
791#endif
792
793
794/**
795 * Gets the content of the CPU timestamp counter register.
796 *
797 * @returns TSC.
798 */
799#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
800DECLASM(uint64_t) ASMReadTSC(void);
801#else
802DECLINLINE(uint64_t) ASMReadTSC(void)
803{
804 RTUINT64U u;
805# if RT_INLINE_ASM_GNU_STYLE
806 __asm__ __volatile__("rdtsc\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi));
807# else
808# if RT_INLINE_ASM_USES_INTRIN
809 u.u = __rdtsc();
810# else
811 __asm
812 {
813 rdtsc
814 mov [u.s.Lo], eax
815 mov [u.s.Hi], edx
816 }
817# endif
818# endif
819 return u.u;
820}
821#endif
822
823
824/**
825 * Gets the content of the CPU timestamp counter register and the
826 * assoicated AUX value.
827 *
828 * @returns TSC.
829 * @param puAux Where to store the AUX value.
830 */
831#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < 15
832DECLASM(uint64_t) ASMReadTscWithAux(uint32_t *puAux);
833#else
834DECLINLINE(uint64_t) ASMReadTscWithAux(uint32_t *puAux)
835{
836 RTUINT64U u;
837# if RT_INLINE_ASM_GNU_STYLE
838 /* rdtscp is not supported by ancient linux build VM of course :-( */
839 /*__asm__ __volatile__("rdtscp\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi), "=c" (*puAux)); */
840 __asm__ __volatile__(".byte 0x0f,0x01,0xf9\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi), "=c" (*puAux));
841# else
842# if RT_INLINE_ASM_USES_INTRIN >= 15
843 u.u = __rdtscp(puAux);
844# else
845 __asm
846 {
847 rdtscp
848 mov [u.s.Lo], eax
849 mov [u.s.Hi], edx
850 mov eax, [puAux]
851 mov [eax], ecx
852 }
853# endif
854# endif
855 return u.u;
856}
857#endif
858
859
860/**
861 * Performs the cpuid instruction returning all registers.
862 *
863 * @param uOperator CPUID operation (eax).
864 * @param pvEAX Where to store eax.
865 * @param pvEBX Where to store ebx.
866 * @param pvECX Where to store ecx.
867 * @param pvEDX Where to store edx.
868 * @remark We're using void pointers to ease the use of special bitfield structures and such.
869 */
870#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
871DECLASM(void) ASMCpuId(uint32_t uOperator, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
872#else
873DECLINLINE(void) ASMCpuId(uint32_t uOperator, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX)
874{
875# if RT_INLINE_ASM_GNU_STYLE
876# ifdef RT_ARCH_AMD64
877 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
878 __asm__ __volatile__ ("cpuid\n\t"
879 : "=a" (uRAX),
880 "=b" (uRBX),
881 "=c" (uRCX),
882 "=d" (uRDX)
883 : "0" (uOperator), "2" (0));
884 *(uint32_t *)pvEAX = (uint32_t)uRAX;
885 *(uint32_t *)pvEBX = (uint32_t)uRBX;
886 *(uint32_t *)pvECX = (uint32_t)uRCX;
887 *(uint32_t *)pvEDX = (uint32_t)uRDX;
888# else
889 __asm__ __volatile__ ("xchgl %%ebx, %1\n\t"
890 "cpuid\n\t"
891 "xchgl %%ebx, %1\n\t"
892 : "=a" (*(uint32_t *)pvEAX),
893 "=r" (*(uint32_t *)pvEBX),
894 "=c" (*(uint32_t *)pvECX),
895 "=d" (*(uint32_t *)pvEDX)
896 : "0" (uOperator), "2" (0));
897# endif
898
899# elif RT_INLINE_ASM_USES_INTRIN
900 int aInfo[4];
901 __cpuid(aInfo, uOperator);
902 *(uint32_t *)pvEAX = aInfo[0];
903 *(uint32_t *)pvEBX = aInfo[1];
904 *(uint32_t *)pvECX = aInfo[2];
905 *(uint32_t *)pvEDX = aInfo[3];
906
907# else
908 uint32_t uEAX;
909 uint32_t uEBX;
910 uint32_t uECX;
911 uint32_t uEDX;
912 __asm
913 {
914 push ebx
915 mov eax, [uOperator]
916 cpuid
917 mov [uEAX], eax
918 mov [uEBX], ebx
919 mov [uECX], ecx
920 mov [uEDX], edx
921 pop ebx
922 }
923 *(uint32_t *)pvEAX = uEAX;
924 *(uint32_t *)pvEBX = uEBX;
925 *(uint32_t *)pvECX = uECX;
926 *(uint32_t *)pvEDX = uEDX;
927# endif
928}
929#endif
930
931
932/**
933 * Performs the CPUID instruction with EAX and ECX input returning ALL output
934 * registers.
935 *
936 * @param uOperator CPUID operation (eax).
937 * @param uIdxECX ecx index
938 * @param pvEAX Where to store eax.
939 * @param pvEBX Where to store ebx.
940 * @param pvECX Where to store ecx.
941 * @param pvEDX Where to store edx.
942 * @remark We're using void pointers to ease the use of special bitfield structures and such.
943 */
944#if RT_INLINE_ASM_EXTERNAL || RT_INLINE_ASM_USES_INTRIN
945DECLASM(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
946#else
947DECLINLINE(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX)
948{
949# if RT_INLINE_ASM_GNU_STYLE
950# ifdef RT_ARCH_AMD64
951 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
952 __asm__ ("cpuid\n\t"
953 : "=a" (uRAX),
954 "=b" (uRBX),
955 "=c" (uRCX),
956 "=d" (uRDX)
957 : "0" (uOperator),
958 "2" (uIdxECX));
959 *(uint32_t *)pvEAX = (uint32_t)uRAX;
960 *(uint32_t *)pvEBX = (uint32_t)uRBX;
961 *(uint32_t *)pvECX = (uint32_t)uRCX;
962 *(uint32_t *)pvEDX = (uint32_t)uRDX;
963# else
964 __asm__ ("xchgl %%ebx, %1\n\t"
965 "cpuid\n\t"
966 "xchgl %%ebx, %1\n\t"
967 : "=a" (*(uint32_t *)pvEAX),
968 "=r" (*(uint32_t *)pvEBX),
969 "=c" (*(uint32_t *)pvECX),
970 "=d" (*(uint32_t *)pvEDX)
971 : "0" (uOperator),
972 "2" (uIdxECX));
973# endif
974
975# elif RT_INLINE_ASM_USES_INTRIN
976 int aInfo[4];
977 __cpuidex(aInfo, uOperator, uIdxECX);
978 *(uint32_t *)pvEAX = aInfo[0];
979 *(uint32_t *)pvEBX = aInfo[1];
980 *(uint32_t *)pvECX = aInfo[2];
981 *(uint32_t *)pvEDX = aInfo[3];
982
983# else
984 uint32_t uEAX;
985 uint32_t uEBX;
986 uint32_t uECX;
987 uint32_t uEDX;
988 __asm
989 {
990 push ebx
991 mov eax, [uOperator]
992 mov ecx, [uIdxECX]
993 cpuid
994 mov [uEAX], eax
995 mov [uEBX], ebx
996 mov [uECX], ecx
997 mov [uEDX], edx
998 pop ebx
999 }
1000 *(uint32_t *)pvEAX = uEAX;
1001 *(uint32_t *)pvEBX = uEBX;
1002 *(uint32_t *)pvECX = uECX;
1003 *(uint32_t *)pvEDX = uEDX;
1004# endif
1005}
1006#endif
1007
1008
1009/**
1010 * CPUID variant that initializes all 4 registers before the CPUID instruction.
1011 *
1012 * @returns The EAX result value.
1013 * @param uOperator CPUID operation (eax).
1014 * @param uInitEBX The value to assign EBX prior to the CPUID instruction.
1015 * @param uInitECX The value to assign ECX prior to the CPUID instruction.
1016 * @param uInitEDX The value to assign EDX prior to the CPUID instruction.
1017 * @param pvEAX Where to store eax. Optional.
1018 * @param pvEBX Where to store ebx. Optional.
1019 * @param pvECX Where to store ecx. Optional.
1020 * @param pvEDX Where to store edx. Optional.
1021 */
1022DECLASM(uint32_t) ASMCpuIdExSlow(uint32_t uOperator, uint32_t uInitEBX, uint32_t uInitECX, uint32_t uInitEDX,
1023 void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
1024
1025
1026/**
1027 * Performs the cpuid instruction returning ecx and edx.
1028 *
1029 * @param uOperator CPUID operation (eax).
1030 * @param pvECX Where to store ecx.
1031 * @param pvEDX Where to store edx.
1032 * @remark We're using void pointers to ease the use of special bitfield structures and such.
1033 */
1034#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1035DECLASM(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void *pvECX, void *pvEDX);
1036#else
1037DECLINLINE(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void *pvECX, void *pvEDX)
1038{
1039 uint32_t uEBX;
1040 ASMCpuId(uOperator, &uOperator, &uEBX, pvECX, pvEDX);
1041}
1042#endif
1043
1044
1045/**
1046 * Performs the cpuid instruction returning eax.
1047 *
1048 * @param uOperator CPUID operation (eax).
1049 * @returns EAX after cpuid operation.
1050 */
1051#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1052DECLASM(uint32_t) ASMCpuId_EAX(uint32_t uOperator);
1053#else
1054DECLINLINE(uint32_t) ASMCpuId_EAX(uint32_t uOperator)
1055{
1056 RTCCUINTREG xAX;
1057# if RT_INLINE_ASM_GNU_STYLE
1058# ifdef RT_ARCH_AMD64
1059 __asm__ ("cpuid"
1060 : "=a" (xAX)
1061 : "0" (uOperator)
1062 : "rbx", "rcx", "rdx");
1063# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1064 __asm__ ("push %%ebx\n\t"
1065 "cpuid\n\t"
1066 "pop %%ebx\n\t"
1067 : "=a" (xAX)
1068 : "0" (uOperator)
1069 : "ecx", "edx");
1070# else
1071 __asm__ ("cpuid"
1072 : "=a" (xAX)
1073 : "0" (uOperator)
1074 : "edx", "ecx", "ebx");
1075# endif
1076
1077# elif RT_INLINE_ASM_USES_INTRIN
1078 int aInfo[4];
1079 __cpuid(aInfo, uOperator);
1080 xAX = aInfo[0];
1081
1082# else
1083 __asm
1084 {
1085 push ebx
1086 mov eax, [uOperator]
1087 cpuid
1088 mov [xAX], eax
1089 pop ebx
1090 }
1091# endif
1092 return (uint32_t)xAX;
1093}
1094#endif
1095
1096
1097/**
1098 * Performs the cpuid instruction returning ebx.
1099 *
1100 * @param uOperator CPUID operation (eax).
1101 * @returns EBX after cpuid operation.
1102 */
1103#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1104DECLASM(uint32_t) ASMCpuId_EBX(uint32_t uOperator);
1105#else
1106DECLINLINE(uint32_t) ASMCpuId_EBX(uint32_t uOperator)
1107{
1108 RTCCUINTREG xBX;
1109# if RT_INLINE_ASM_GNU_STYLE
1110# ifdef RT_ARCH_AMD64
1111 RTCCUINTREG uSpill;
1112 __asm__ ("cpuid"
1113 : "=a" (uSpill),
1114 "=b" (xBX)
1115 : "0" (uOperator)
1116 : "rdx", "rcx");
1117# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1118 __asm__ ("push %%ebx\n\t"
1119 "cpuid\n\t"
1120 "mov %%ebx, %%edx\n\t"
1121 "pop %%ebx\n\t"
1122 : "=a" (uOperator),
1123 "=d" (xBX)
1124 : "0" (uOperator)
1125 : "ecx");
1126# else
1127 __asm__ ("cpuid"
1128 : "=a" (uOperator),
1129 "=b" (xBX)
1130 : "0" (uOperator)
1131 : "edx", "ecx");
1132# endif
1133
1134# elif RT_INLINE_ASM_USES_INTRIN
1135 int aInfo[4];
1136 __cpuid(aInfo, uOperator);
1137 xBX = aInfo[1];
1138
1139# else
1140 __asm
1141 {
1142 push ebx
1143 mov eax, [uOperator]
1144 cpuid
1145 mov [xBX], ebx
1146 pop ebx
1147 }
1148# endif
1149 return (uint32_t)xBX;
1150}
1151#endif
1152
1153
1154/**
1155 * Performs the cpuid instruction returning ecx.
1156 *
1157 * @param uOperator CPUID operation (eax).
1158 * @returns ECX after cpuid operation.
1159 */
1160#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1161DECLASM(uint32_t) ASMCpuId_ECX(uint32_t uOperator);
1162#else
1163DECLINLINE(uint32_t) ASMCpuId_ECX(uint32_t uOperator)
1164{
1165 RTCCUINTREG xCX;
1166# if RT_INLINE_ASM_GNU_STYLE
1167# ifdef RT_ARCH_AMD64
1168 RTCCUINTREG uSpill;
1169 __asm__ ("cpuid"
1170 : "=a" (uSpill),
1171 "=c" (xCX)
1172 : "0" (uOperator)
1173 : "rbx", "rdx");
1174# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1175 __asm__ ("push %%ebx\n\t"
1176 "cpuid\n\t"
1177 "pop %%ebx\n\t"
1178 : "=a" (uOperator),
1179 "=c" (xCX)
1180 : "0" (uOperator)
1181 : "edx");
1182# else
1183 __asm__ ("cpuid"
1184 : "=a" (uOperator),
1185 "=c" (xCX)
1186 : "0" (uOperator)
1187 : "ebx", "edx");
1188
1189# endif
1190
1191# elif RT_INLINE_ASM_USES_INTRIN
1192 int aInfo[4];
1193 __cpuid(aInfo, uOperator);
1194 xCX = aInfo[2];
1195
1196# else
1197 __asm
1198 {
1199 push ebx
1200 mov eax, [uOperator]
1201 cpuid
1202 mov [xCX], ecx
1203 pop ebx
1204 }
1205# endif
1206 return (uint32_t)xCX;
1207}
1208#endif
1209
1210
1211/**
1212 * Performs the cpuid instruction returning edx.
1213 *
1214 * @param uOperator CPUID operation (eax).
1215 * @returns EDX after cpuid operation.
1216 */
1217#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1218DECLASM(uint32_t) ASMCpuId_EDX(uint32_t uOperator);
1219#else
1220DECLINLINE(uint32_t) ASMCpuId_EDX(uint32_t uOperator)
1221{
1222 RTCCUINTREG xDX;
1223# if RT_INLINE_ASM_GNU_STYLE
1224# ifdef RT_ARCH_AMD64
1225 RTCCUINTREG uSpill;
1226 __asm__ ("cpuid"
1227 : "=a" (uSpill),
1228 "=d" (xDX)
1229 : "0" (uOperator)
1230 : "rbx", "rcx");
1231# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1232 __asm__ ("push %%ebx\n\t"
1233 "cpuid\n\t"
1234 "pop %%ebx\n\t"
1235 : "=a" (uOperator),
1236 "=d" (xDX)
1237 : "0" (uOperator)
1238 : "ecx");
1239# else
1240 __asm__ ("cpuid"
1241 : "=a" (uOperator),
1242 "=d" (xDX)
1243 : "0" (uOperator)
1244 : "ebx", "ecx");
1245# endif
1246
1247# elif RT_INLINE_ASM_USES_INTRIN
1248 int aInfo[4];
1249 __cpuid(aInfo, uOperator);
1250 xDX = aInfo[3];
1251
1252# else
1253 __asm
1254 {
1255 push ebx
1256 mov eax, [uOperator]
1257 cpuid
1258 mov [xDX], edx
1259 pop ebx
1260 }
1261# endif
1262 return (uint32_t)xDX;
1263}
1264#endif
1265
1266
1267/**
1268 * Checks if the current CPU supports CPUID.
1269 *
1270 * @returns true if CPUID is supported.
1271 */
1272#ifdef __WATCOMC__
1273DECLASM(bool) ASMHasCpuId(void);
1274#else
1275DECLINLINE(bool) ASMHasCpuId(void)
1276{
1277# ifdef RT_ARCH_AMD64
1278 return true; /* ASSUME that all amd64 compatible CPUs have cpuid. */
1279# else /* !RT_ARCH_AMD64 */
1280 bool fRet = false;
1281# if RT_INLINE_ASM_GNU_STYLE
1282 uint32_t u1;
1283 uint32_t u2;
1284 __asm__ ("pushf\n\t"
1285 "pop %1\n\t"
1286 "mov %1, %2\n\t"
1287 "xorl $0x200000, %1\n\t"
1288 "push %1\n\t"
1289 "popf\n\t"
1290 "pushf\n\t"
1291 "pop %1\n\t"
1292 "cmpl %1, %2\n\t"
1293 "setne %0\n\t"
1294 "push %2\n\t"
1295 "popf\n\t"
1296 : "=m" (fRet), "=r" (u1), "=r" (u2));
1297# else
1298 __asm
1299 {
1300 pushfd
1301 pop eax
1302 mov ebx, eax
1303 xor eax, 0200000h
1304 push eax
1305 popfd
1306 pushfd
1307 pop eax
1308 cmp eax, ebx
1309 setne fRet
1310 push ebx
1311 popfd
1312 }
1313# endif
1314 return fRet;
1315# endif /* !RT_ARCH_AMD64 */
1316}
1317#endif
1318
1319
1320/**
1321 * Gets the APIC ID of the current CPU.
1322 *
1323 * @returns the APIC ID.
1324 */
1325#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1326DECLASM(uint8_t) ASMGetApicId(void);
1327#else
1328DECLINLINE(uint8_t) ASMGetApicId(void)
1329{
1330 RTCCUINTREG xBX;
1331# if RT_INLINE_ASM_GNU_STYLE
1332# ifdef RT_ARCH_AMD64
1333 RTCCUINTREG uSpill;
1334 __asm__ __volatile__ ("cpuid"
1335 : "=a" (uSpill),
1336 "=b" (xBX)
1337 : "0" (1)
1338 : "rcx", "rdx");
1339# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1340 RTCCUINTREG uSpill;
1341 __asm__ __volatile__ ("mov %%ebx,%1\n\t"
1342 "cpuid\n\t"
1343 "xchgl %%ebx,%1\n\t"
1344 : "=a" (uSpill),
1345 "=rm" (xBX)
1346 : "0" (1)
1347 : "ecx", "edx");
1348# else
1349 RTCCUINTREG uSpill;
1350 __asm__ __volatile__ ("cpuid"
1351 : "=a" (uSpill),
1352 "=b" (xBX)
1353 : "0" (1)
1354 : "ecx", "edx");
1355# endif
1356
1357# elif RT_INLINE_ASM_USES_INTRIN
1358 int aInfo[4];
1359 __cpuid(aInfo, 1);
1360 xBX = aInfo[1];
1361
1362# else
1363 __asm
1364 {
1365 push ebx
1366 mov eax, 1
1367 cpuid
1368 mov [xBX], ebx
1369 pop ebx
1370 }
1371# endif
1372 return (uint8_t)(xBX >> 24);
1373}
1374#endif
1375
1376
1377/**
1378 * Tests if it a genuine Intel CPU based on the ASMCpuId(0) output.
1379 *
1380 * @returns true/false.
1381 * @param uEBX EBX return from ASMCpuId(0)
1382 * @param uECX ECX return from ASMCpuId(0)
1383 * @param uEDX EDX return from ASMCpuId(0)
1384 */
1385DECLINLINE(bool) ASMIsIntelCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
1386{
1387 return uEBX == UINT32_C(0x756e6547)
1388 && uECX == UINT32_C(0x6c65746e)
1389 && uEDX == UINT32_C(0x49656e69);
1390}
1391
1392
1393/**
1394 * Tests if this is a genuine Intel CPU.
1395 *
1396 * @returns true/false.
1397 * @remarks ASSUMES that cpuid is supported by the CPU.
1398 */
1399DECLINLINE(bool) ASMIsIntelCpu(void)
1400{
1401 uint32_t uEAX, uEBX, uECX, uEDX;
1402 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1403 return ASMIsIntelCpuEx(uEBX, uECX, uEDX);
1404}
1405
1406
1407/**
1408 * Tests if it an authentic AMD CPU based on the ASMCpuId(0) output.
1409 *
1410 * @returns true/false.
1411 * @param uEBX EBX return from ASMCpuId(0)
1412 * @param uECX ECX return from ASMCpuId(0)
1413 * @param uEDX EDX return from ASMCpuId(0)
1414 */
1415DECLINLINE(bool) ASMIsAmdCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
1416{
1417 return uEBX == UINT32_C(0x68747541)
1418 && uECX == UINT32_C(0x444d4163)
1419 && uEDX == UINT32_C(0x69746e65);
1420}
1421
1422
1423/**
1424 * Tests if this is an authentic AMD CPU.
1425 *
1426 * @returns true/false.
1427 * @remarks ASSUMES that cpuid is supported by the CPU.
1428 */
1429DECLINLINE(bool) ASMIsAmdCpu(void)
1430{
1431 uint32_t uEAX, uEBX, uECX, uEDX;
1432 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1433 return ASMIsAmdCpuEx(uEBX, uECX, uEDX);
1434}
1435
1436
1437/**
1438 * Tests if it a centaur hauling VIA CPU based on the ASMCpuId(0) output.
1439 *
1440 * @returns true/false.
1441 * @param uEBX EBX return from ASMCpuId(0).
1442 * @param uECX ECX return from ASMCpuId(0).
1443 * @param uEDX EDX return from ASMCpuId(0).
1444 */
1445DECLINLINE(bool) ASMIsViaCentaurCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
1446{
1447 return uEBX == UINT32_C(0x746e6543)
1448 && uECX == UINT32_C(0x736c7561)
1449 && uEDX == UINT32_C(0x48727561);
1450}
1451
1452
1453/**
1454 * Tests if this is a centaur hauling VIA CPU.
1455 *
1456 * @returns true/false.
1457 * @remarks ASSUMES that cpuid is supported by the CPU.
1458 */
1459DECLINLINE(bool) ASMIsViaCentaurCpu(void)
1460{
1461 uint32_t uEAX, uEBX, uECX, uEDX;
1462 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1463 return ASMIsAmdCpuEx(uEBX, uECX, uEDX);
1464}
1465
1466
1467/**
1468 * Checks whether ASMCpuId_EAX(0x00000000) indicates a valid range.
1469 *
1470 *
1471 * @returns true/false.
1472 * @param uEAX The EAX value of CPUID leaf 0x00000000.
1473 *
1474 * @note This only succeeds if there are at least two leaves in the range.
1475 * @remarks The upper range limit is just some half reasonable value we've
1476 * picked out of thin air.
1477 */
1478DECLINLINE(bool) ASMIsValidStdRange(uint32_t uEAX)
1479{
1480 return uEAX >= UINT32_C(0x00000001) && uEAX <= UINT32_C(0x000fffff);
1481}
1482
1483
1484/**
1485 * Checks whether ASMCpuId_EAX(0x80000000) indicates a valid range.
1486 *
1487 * This only succeeds if there are at least two leaves in the range.
1488 *
1489 * @returns true/false.
1490 * @param uEAX The EAX value of CPUID leaf 0x80000000.
1491 *
1492 * @note This only succeeds if there are at least two leaves in the range.
1493 * @remarks The upper range limit is just some half reasonable value we've
1494 * picked out of thin air.
1495 */
1496DECLINLINE(bool) ASMIsValidExtRange(uint32_t uEAX)
1497{
1498 return uEAX >= UINT32_C(0x80000001) && uEAX <= UINT32_C(0x800fffff);
1499}
1500
1501
1502/**
1503 * Extracts the CPU family from ASMCpuId(1) or ASMCpuId(0x80000001)
1504 *
1505 * @returns Family.
1506 * @param uEAX EAX return from ASMCpuId(1) or ASMCpuId(0x80000001).
1507 */
1508DECLINLINE(uint32_t) ASMGetCpuFamily(uint32_t uEAX)
1509{
1510 return ((uEAX >> 8) & 0xf) == 0xf
1511 ? ((uEAX >> 20) & 0x7f) + 0xf
1512 : ((uEAX >> 8) & 0xf);
1513}
1514
1515
1516/**
1517 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001), Intel variant.
1518 *
1519 * @returns Model.
1520 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1521 */
1522DECLINLINE(uint32_t) ASMGetCpuModelIntel(uint32_t uEAX)
1523{
1524 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6) /* family! */
1525 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1526 : ((uEAX >> 4) & 0xf);
1527}
1528
1529
1530/**
1531 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001), AMD variant.
1532 *
1533 * @returns Model.
1534 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1535 */
1536DECLINLINE(uint32_t) ASMGetCpuModelAMD(uint32_t uEAX)
1537{
1538 return ((uEAX >> 8) & 0xf) == 0xf
1539 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1540 : ((uEAX >> 4) & 0xf);
1541}
1542
1543
1544/**
1545 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001)
1546 *
1547 * @returns Model.
1548 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1549 * @param fIntel Whether it's an intel CPU. Use ASMIsIntelCpuEx() or ASMIsIntelCpu().
1550 */
1551DECLINLINE(uint32_t) ASMGetCpuModel(uint32_t uEAX, bool fIntel)
1552{
1553 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6 && fIntel) /* family! */
1554 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1555 : ((uEAX >> 4) & 0xf);
1556}
1557
1558
1559/**
1560 * Extracts the CPU stepping from ASMCpuId(1) or ASMCpuId(0x80000001)
1561 *
1562 * @returns Model.
1563 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1564 */
1565DECLINLINE(uint32_t) ASMGetCpuStepping(uint32_t uEAX)
1566{
1567 return uEAX & 0xf;
1568}
1569
1570
1571/**
1572 * Get cr0.
1573 * @returns cr0.
1574 */
1575#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1576DECLASM(RTCCUINTXREG) ASMGetCR0(void);
1577#else
1578DECLINLINE(RTCCUINTXREG) ASMGetCR0(void)
1579{
1580 RTCCUINTXREG uCR0;
1581# if RT_INLINE_ASM_USES_INTRIN
1582 uCR0 = __readcr0();
1583
1584# elif RT_INLINE_ASM_GNU_STYLE
1585# ifdef RT_ARCH_AMD64
1586 __asm__ __volatile__("movq %%cr0, %0\t\n" : "=r" (uCR0));
1587# else
1588 __asm__ __volatile__("movl %%cr0, %0\t\n" : "=r" (uCR0));
1589# endif
1590# else
1591 __asm
1592 {
1593# ifdef RT_ARCH_AMD64
1594 mov rax, cr0
1595 mov [uCR0], rax
1596# else
1597 mov eax, cr0
1598 mov [uCR0], eax
1599# endif
1600 }
1601# endif
1602 return uCR0;
1603}
1604#endif
1605
1606
1607/**
1608 * Sets the CR0 register.
1609 * @param uCR0 The new CR0 value.
1610 */
1611#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1612DECLASM(void) ASMSetCR0(RTCCUINTXREG uCR0);
1613#else
1614DECLINLINE(void) ASMSetCR0(RTCCUINTXREG uCR0)
1615{
1616# if RT_INLINE_ASM_USES_INTRIN
1617 __writecr0(uCR0);
1618
1619# elif RT_INLINE_ASM_GNU_STYLE
1620# ifdef RT_ARCH_AMD64
1621 __asm__ __volatile__("movq %0, %%cr0\n\t" :: "r" (uCR0));
1622# else
1623 __asm__ __volatile__("movl %0, %%cr0\n\t" :: "r" (uCR0));
1624# endif
1625# else
1626 __asm
1627 {
1628# ifdef RT_ARCH_AMD64
1629 mov rax, [uCR0]
1630 mov cr0, rax
1631# else
1632 mov eax, [uCR0]
1633 mov cr0, eax
1634# endif
1635 }
1636# endif
1637}
1638#endif
1639
1640
1641/**
1642 * Get cr2.
1643 * @returns cr2.
1644 */
1645#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1646DECLASM(RTCCUINTXREG) ASMGetCR2(void);
1647#else
1648DECLINLINE(RTCCUINTXREG) ASMGetCR2(void)
1649{
1650 RTCCUINTXREG uCR2;
1651# if RT_INLINE_ASM_USES_INTRIN
1652 uCR2 = __readcr2();
1653
1654# elif RT_INLINE_ASM_GNU_STYLE
1655# ifdef RT_ARCH_AMD64
1656 __asm__ __volatile__("movq %%cr2, %0\t\n" : "=r" (uCR2));
1657# else
1658 __asm__ __volatile__("movl %%cr2, %0\t\n" : "=r" (uCR2));
1659# endif
1660# else
1661 __asm
1662 {
1663# ifdef RT_ARCH_AMD64
1664 mov rax, cr2
1665 mov [uCR2], rax
1666# else
1667 mov eax, cr2
1668 mov [uCR2], eax
1669# endif
1670 }
1671# endif
1672 return uCR2;
1673}
1674#endif
1675
1676
1677/**
1678 * Sets the CR2 register.
1679 * @param uCR2 The new CR0 value.
1680 */
1681#if RT_INLINE_ASM_EXTERNAL
1682DECLASM(void) ASMSetCR2(RTCCUINTXREG uCR2);
1683#else
1684DECLINLINE(void) ASMSetCR2(RTCCUINTXREG uCR2)
1685{
1686# if RT_INLINE_ASM_GNU_STYLE
1687# ifdef RT_ARCH_AMD64
1688 __asm__ __volatile__("movq %0, %%cr2\n\t" :: "r" (uCR2));
1689# else
1690 __asm__ __volatile__("movl %0, %%cr2\n\t" :: "r" (uCR2));
1691# endif
1692# else
1693 __asm
1694 {
1695# ifdef RT_ARCH_AMD64
1696 mov rax, [uCR2]
1697 mov cr2, rax
1698# else
1699 mov eax, [uCR2]
1700 mov cr2, eax
1701# endif
1702 }
1703# endif
1704}
1705#endif
1706
1707
1708/**
1709 * Get cr3.
1710 * @returns cr3.
1711 */
1712#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1713DECLASM(RTCCUINTXREG) ASMGetCR3(void);
1714#else
1715DECLINLINE(RTCCUINTXREG) ASMGetCR3(void)
1716{
1717 RTCCUINTXREG uCR3;
1718# if RT_INLINE_ASM_USES_INTRIN
1719 uCR3 = __readcr3();
1720
1721# elif RT_INLINE_ASM_GNU_STYLE
1722# ifdef RT_ARCH_AMD64
1723 __asm__ __volatile__("movq %%cr3, %0\t\n" : "=r" (uCR3));
1724# else
1725 __asm__ __volatile__("movl %%cr3, %0\t\n" : "=r" (uCR3));
1726# endif
1727# else
1728 __asm
1729 {
1730# ifdef RT_ARCH_AMD64
1731 mov rax, cr3
1732 mov [uCR3], rax
1733# else
1734 mov eax, cr3
1735 mov [uCR3], eax
1736# endif
1737 }
1738# endif
1739 return uCR3;
1740}
1741#endif
1742
1743
1744/**
1745 * Sets the CR3 register.
1746 *
1747 * @param uCR3 New CR3 value.
1748 */
1749#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1750DECLASM(void) ASMSetCR3(RTCCUINTXREG uCR3);
1751#else
1752DECLINLINE(void) ASMSetCR3(RTCCUINTXREG uCR3)
1753{
1754# if RT_INLINE_ASM_USES_INTRIN
1755 __writecr3(uCR3);
1756
1757# elif RT_INLINE_ASM_GNU_STYLE
1758# ifdef RT_ARCH_AMD64
1759 __asm__ __volatile__("movq %0, %%cr3\n\t" : : "r" (uCR3));
1760# else
1761 __asm__ __volatile__("movl %0, %%cr3\n\t" : : "r" (uCR3));
1762# endif
1763# else
1764 __asm
1765 {
1766# ifdef RT_ARCH_AMD64
1767 mov rax, [uCR3]
1768 mov cr3, rax
1769# else
1770 mov eax, [uCR3]
1771 mov cr3, eax
1772# endif
1773 }
1774# endif
1775}
1776#endif
1777
1778
1779/**
1780 * Reloads the CR3 register.
1781 */
1782#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1783DECLASM(void) ASMReloadCR3(void);
1784#else
1785DECLINLINE(void) ASMReloadCR3(void)
1786{
1787# if RT_INLINE_ASM_USES_INTRIN
1788 __writecr3(__readcr3());
1789
1790# elif RT_INLINE_ASM_GNU_STYLE
1791 RTCCUINTXREG u;
1792# ifdef RT_ARCH_AMD64
1793 __asm__ __volatile__("movq %%cr3, %0\n\t"
1794 "movq %0, %%cr3\n\t"
1795 : "=r" (u));
1796# else
1797 __asm__ __volatile__("movl %%cr3, %0\n\t"
1798 "movl %0, %%cr3\n\t"
1799 : "=r" (u));
1800# endif
1801# else
1802 __asm
1803 {
1804# ifdef RT_ARCH_AMD64
1805 mov rax, cr3
1806 mov cr3, rax
1807# else
1808 mov eax, cr3
1809 mov cr3, eax
1810# endif
1811 }
1812# endif
1813}
1814#endif
1815
1816
1817/**
1818 * Get cr4.
1819 * @returns cr4.
1820 */
1821#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1822DECLASM(RTCCUINTXREG) ASMGetCR4(void);
1823#else
1824DECLINLINE(RTCCUINTXREG) ASMGetCR4(void)
1825{
1826 RTCCUINTXREG uCR4;
1827# if RT_INLINE_ASM_USES_INTRIN
1828 uCR4 = __readcr4();
1829
1830# elif RT_INLINE_ASM_GNU_STYLE
1831# ifdef RT_ARCH_AMD64
1832 __asm__ __volatile__("movq %%cr4, %0\t\n" : "=r" (uCR4));
1833# else
1834 __asm__ __volatile__("movl %%cr4, %0\t\n" : "=r" (uCR4));
1835# endif
1836# else
1837 __asm
1838 {
1839# ifdef RT_ARCH_AMD64
1840 mov rax, cr4
1841 mov [uCR4], rax
1842# else
1843 push eax /* just in case */
1844 /*mov eax, cr4*/
1845 _emit 0x0f
1846 _emit 0x20
1847 _emit 0xe0
1848 mov [uCR4], eax
1849 pop eax
1850# endif
1851 }
1852# endif
1853 return uCR4;
1854}
1855#endif
1856
1857
1858/**
1859 * Sets the CR4 register.
1860 *
1861 * @param uCR4 New CR4 value.
1862 */
1863#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1864DECLASM(void) ASMSetCR4(RTCCUINTXREG uCR4);
1865#else
1866DECLINLINE(void) ASMSetCR4(RTCCUINTXREG uCR4)
1867{
1868# if RT_INLINE_ASM_USES_INTRIN
1869 __writecr4(uCR4);
1870
1871# elif RT_INLINE_ASM_GNU_STYLE
1872# ifdef RT_ARCH_AMD64
1873 __asm__ __volatile__("movq %0, %%cr4\n\t" : : "r" (uCR4));
1874# else
1875 __asm__ __volatile__("movl %0, %%cr4\n\t" : : "r" (uCR4));
1876# endif
1877# else
1878 __asm
1879 {
1880# ifdef RT_ARCH_AMD64
1881 mov rax, [uCR4]
1882 mov cr4, rax
1883# else
1884 mov eax, [uCR4]
1885 _emit 0x0F
1886 _emit 0x22
1887 _emit 0xE0 /* mov cr4, eax */
1888# endif
1889 }
1890# endif
1891}
1892#endif
1893
1894
1895/**
1896 * Get cr8.
1897 * @returns cr8.
1898 * @remark The lock prefix hack for access from non-64-bit modes is NOT used and 0 is returned.
1899 */
1900#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1901DECLASM(RTCCUINTXREG) ASMGetCR8(void);
1902#else
1903DECLINLINE(RTCCUINTXREG) ASMGetCR8(void)
1904{
1905# ifdef RT_ARCH_AMD64
1906 RTCCUINTXREG uCR8;
1907# if RT_INLINE_ASM_USES_INTRIN
1908 uCR8 = __readcr8();
1909
1910# elif RT_INLINE_ASM_GNU_STYLE
1911 __asm__ __volatile__("movq %%cr8, %0\t\n" : "=r" (uCR8));
1912# else
1913 __asm
1914 {
1915 mov rax, cr8
1916 mov [uCR8], rax
1917 }
1918# endif
1919 return uCR8;
1920# else /* !RT_ARCH_AMD64 */
1921 return 0;
1922# endif /* !RT_ARCH_AMD64 */
1923}
1924#endif
1925
1926
1927/**
1928 * Get XCR0 (eXtended feature Control Register 0).
1929 * @returns xcr0.
1930 */
1931DECLASM(uint64_t) ASMGetXcr0(void);
1932
1933/**
1934 * Sets the XCR0 register.
1935 * @param uXcr0 The new XCR0 value.
1936 */
1937DECLASM(void) ASMSetXcr0(uint64_t uXcr0);
1938
1939struct X86XSAVEAREA;
1940/**
1941 * Save extended CPU state.
1942 * @param pXStateArea Where to save the state.
1943 * @param fComponents Which state components to save.
1944 */
1945DECLASM(void) ASMXSave(struct X86XSAVEAREA *pXStateArea, uint64_t fComponents);
1946
1947/**
1948 * Loads extended CPU state.
1949 * @param pXStateArea Where to load the state from.
1950 * @param fComponents Which state components to load.
1951 */
1952DECLASM(void) ASMXRstor(struct X86XSAVEAREA const *pXStateArea, uint64_t fComponents);
1953
1954
1955/**
1956 * Enables interrupts (EFLAGS.IF).
1957 */
1958#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1959DECLASM(void) ASMIntEnable(void);
1960#else
1961DECLINLINE(void) ASMIntEnable(void)
1962{
1963# if RT_INLINE_ASM_GNU_STYLE
1964 __asm("sti\n");
1965# elif RT_INLINE_ASM_USES_INTRIN
1966 _enable();
1967# else
1968 __asm sti
1969# endif
1970}
1971#endif
1972
1973
1974/**
1975 * Disables interrupts (!EFLAGS.IF).
1976 */
1977#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1978DECLASM(void) ASMIntDisable(void);
1979#else
1980DECLINLINE(void) ASMIntDisable(void)
1981{
1982# if RT_INLINE_ASM_GNU_STYLE
1983 __asm("cli\n");
1984# elif RT_INLINE_ASM_USES_INTRIN
1985 _disable();
1986# else
1987 __asm cli
1988# endif
1989}
1990#endif
1991
1992
1993/**
1994 * Disables interrupts and returns previous xFLAGS.
1995 */
1996#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1997DECLASM(RTCCUINTREG) ASMIntDisableFlags(void);
1998#else
1999DECLINLINE(RTCCUINTREG) ASMIntDisableFlags(void)
2000{
2001 RTCCUINTREG xFlags;
2002# if RT_INLINE_ASM_GNU_STYLE
2003# ifdef RT_ARCH_AMD64
2004 __asm__ __volatile__("pushfq\n\t"
2005 "cli\n\t"
2006 "popq %0\n\t"
2007 : "=r" (xFlags));
2008# else
2009 __asm__ __volatile__("pushfl\n\t"
2010 "cli\n\t"
2011 "popl %0\n\t"
2012 : "=r" (xFlags));
2013# endif
2014# elif RT_INLINE_ASM_USES_INTRIN && !defined(RT_ARCH_X86)
2015 xFlags = ASMGetFlags();
2016 _disable();
2017# else
2018 __asm {
2019 pushfd
2020 cli
2021 pop [xFlags]
2022 }
2023# endif
2024 return xFlags;
2025}
2026#endif
2027
2028
2029/**
2030 * Are interrupts enabled?
2031 *
2032 * @returns true / false.
2033 */
2034DECLINLINE(bool) ASMIntAreEnabled(void)
2035{
2036 RTCCUINTREG uFlags = ASMGetFlags();
2037 return uFlags & 0x200 /* X86_EFL_IF */ ? true : false;
2038}
2039
2040
2041/**
2042 * Halts the CPU until interrupted.
2043 */
2044#if RT_INLINE_ASM_EXTERNAL
2045DECLASM(void) ASMHalt(void);
2046#else
2047DECLINLINE(void) ASMHalt(void)
2048{
2049# if RT_INLINE_ASM_GNU_STYLE
2050 __asm__ __volatile__("hlt\n\t");
2051# else
2052 __asm {
2053 hlt
2054 }
2055# endif
2056}
2057#endif
2058
2059
2060/**
2061 * Reads a machine specific register.
2062 *
2063 * @returns Register content.
2064 * @param uRegister Register to read.
2065 */
2066#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2067DECLASM(uint64_t) ASMRdMsr(uint32_t uRegister);
2068#else
2069DECLINLINE(uint64_t) ASMRdMsr(uint32_t uRegister)
2070{
2071 RTUINT64U u;
2072# if RT_INLINE_ASM_GNU_STYLE
2073 __asm__ __volatile__("rdmsr\n\t"
2074 : "=a" (u.s.Lo),
2075 "=d" (u.s.Hi)
2076 : "c" (uRegister));
2077
2078# elif RT_INLINE_ASM_USES_INTRIN
2079 u.u = __readmsr(uRegister);
2080
2081# else
2082 __asm
2083 {
2084 mov ecx, [uRegister]
2085 rdmsr
2086 mov [u.s.Lo], eax
2087 mov [u.s.Hi], edx
2088 }
2089# endif
2090
2091 return u.u;
2092}
2093#endif
2094
2095
2096/**
2097 * Writes a machine specific register.
2098 *
2099 * @returns Register content.
2100 * @param uRegister Register to write to.
2101 * @param u64Val Value to write.
2102 */
2103#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2104DECLASM(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val);
2105#else
2106DECLINLINE(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val)
2107{
2108 RTUINT64U u;
2109
2110 u.u = u64Val;
2111# if RT_INLINE_ASM_GNU_STYLE
2112 __asm__ __volatile__("wrmsr\n\t"
2113 ::"a" (u.s.Lo),
2114 "d" (u.s.Hi),
2115 "c" (uRegister));
2116
2117# elif RT_INLINE_ASM_USES_INTRIN
2118 __writemsr(uRegister, u.u);
2119
2120# else
2121 __asm
2122 {
2123 mov ecx, [uRegister]
2124 mov edx, [u.s.Hi]
2125 mov eax, [u.s.Lo]
2126 wrmsr
2127 }
2128# endif
2129}
2130#endif
2131
2132
2133/**
2134 * Reads a machine specific register, extended version (for AMD).
2135 *
2136 * @returns Register content.
2137 * @param uRegister Register to read.
2138 * @param uXDI RDI/EDI value.
2139 */
2140#if RT_INLINE_ASM_EXTERNAL
2141DECLASM(uint64_t) ASMRdMsrEx(uint32_t uRegister, RTCCUINTXREG uXDI);
2142#else
2143DECLINLINE(uint64_t) ASMRdMsrEx(uint32_t uRegister, RTCCUINTXREG uXDI)
2144{
2145 RTUINT64U u;
2146# if RT_INLINE_ASM_GNU_STYLE
2147 __asm__ __volatile__("rdmsr\n\t"
2148 : "=a" (u.s.Lo),
2149 "=d" (u.s.Hi)
2150 : "c" (uRegister),
2151 "D" (uXDI));
2152
2153# else
2154 __asm
2155 {
2156 mov ecx, [uRegister]
2157 xchg edi, [uXDI]
2158 rdmsr
2159 mov [u.s.Lo], eax
2160 mov [u.s.Hi], edx
2161 xchg edi, [uXDI]
2162 }
2163# endif
2164
2165 return u.u;
2166}
2167#endif
2168
2169
2170/**
2171 * Writes a machine specific register, extended version (for AMD).
2172 *
2173 * @returns Register content.
2174 * @param uRegister Register to write to.
2175 * @param uXDI RDI/EDI value.
2176 * @param u64Val Value to write.
2177 */
2178#if RT_INLINE_ASM_EXTERNAL
2179DECLASM(void) ASMWrMsrEx(uint32_t uRegister, RTCCUINTXREG uXDI, uint64_t u64Val);
2180#else
2181DECLINLINE(void) ASMWrMsrEx(uint32_t uRegister, RTCCUINTXREG uXDI, uint64_t u64Val)
2182{
2183 RTUINT64U u;
2184
2185 u.u = u64Val;
2186# if RT_INLINE_ASM_GNU_STYLE
2187 __asm__ __volatile__("wrmsr\n\t"
2188 ::"a" (u.s.Lo),
2189 "d" (u.s.Hi),
2190 "c" (uRegister),
2191 "D" (uXDI));
2192
2193# else
2194 __asm
2195 {
2196 mov ecx, [uRegister]
2197 xchg edi, [uXDI]
2198 mov edx, [u.s.Hi]
2199 mov eax, [u.s.Lo]
2200 wrmsr
2201 xchg edi, [uXDI]
2202 }
2203# endif
2204}
2205#endif
2206
2207
2208
2209/**
2210 * Reads low part of a machine specific register.
2211 *
2212 * @returns Register content.
2213 * @param uRegister Register to read.
2214 */
2215#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2216DECLASM(uint32_t) ASMRdMsr_Low(uint32_t uRegister);
2217#else
2218DECLINLINE(uint32_t) ASMRdMsr_Low(uint32_t uRegister)
2219{
2220 uint32_t u32;
2221# if RT_INLINE_ASM_GNU_STYLE
2222 __asm__ __volatile__("rdmsr\n\t"
2223 : "=a" (u32)
2224 : "c" (uRegister)
2225 : "edx");
2226
2227# elif RT_INLINE_ASM_USES_INTRIN
2228 u32 = (uint32_t)__readmsr(uRegister);
2229
2230#else
2231 __asm
2232 {
2233 mov ecx, [uRegister]
2234 rdmsr
2235 mov [u32], eax
2236 }
2237# endif
2238
2239 return u32;
2240}
2241#endif
2242
2243
2244/**
2245 * Reads high part of a machine specific register.
2246 *
2247 * @returns Register content.
2248 * @param uRegister Register to read.
2249 */
2250#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2251DECLASM(uint32_t) ASMRdMsr_High(uint32_t uRegister);
2252#else
2253DECLINLINE(uint32_t) ASMRdMsr_High(uint32_t uRegister)
2254{
2255 uint32_t u32;
2256# if RT_INLINE_ASM_GNU_STYLE
2257 __asm__ __volatile__("rdmsr\n\t"
2258 : "=d" (u32)
2259 : "c" (uRegister)
2260 : "eax");
2261
2262# elif RT_INLINE_ASM_USES_INTRIN
2263 u32 = (uint32_t)(__readmsr(uRegister) >> 32);
2264
2265# else
2266 __asm
2267 {
2268 mov ecx, [uRegister]
2269 rdmsr
2270 mov [u32], edx
2271 }
2272# endif
2273
2274 return u32;
2275}
2276#endif
2277
2278
2279/**
2280 * Gets dr0.
2281 *
2282 * @returns dr0.
2283 */
2284#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2285DECLASM(RTCCUINTXREG) ASMGetDR0(void);
2286#else
2287DECLINLINE(RTCCUINTXREG) ASMGetDR0(void)
2288{
2289 RTCCUINTXREG uDR0;
2290# if RT_INLINE_ASM_USES_INTRIN
2291 uDR0 = __readdr(0);
2292# elif RT_INLINE_ASM_GNU_STYLE
2293# ifdef RT_ARCH_AMD64
2294 __asm__ __volatile__("movq %%dr0, %0\n\t" : "=r" (uDR0));
2295# else
2296 __asm__ __volatile__("movl %%dr0, %0\n\t" : "=r" (uDR0));
2297# endif
2298# else
2299 __asm
2300 {
2301# ifdef RT_ARCH_AMD64
2302 mov rax, dr0
2303 mov [uDR0], rax
2304# else
2305 mov eax, dr0
2306 mov [uDR0], eax
2307# endif
2308 }
2309# endif
2310 return uDR0;
2311}
2312#endif
2313
2314
2315/**
2316 * Gets dr1.
2317 *
2318 * @returns dr1.
2319 */
2320#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2321DECLASM(RTCCUINTXREG) ASMGetDR1(void);
2322#else
2323DECLINLINE(RTCCUINTXREG) ASMGetDR1(void)
2324{
2325 RTCCUINTXREG uDR1;
2326# if RT_INLINE_ASM_USES_INTRIN
2327 uDR1 = __readdr(1);
2328# elif RT_INLINE_ASM_GNU_STYLE
2329# ifdef RT_ARCH_AMD64
2330 __asm__ __volatile__("movq %%dr1, %0\n\t" : "=r" (uDR1));
2331# else
2332 __asm__ __volatile__("movl %%dr1, %0\n\t" : "=r" (uDR1));
2333# endif
2334# else
2335 __asm
2336 {
2337# ifdef RT_ARCH_AMD64
2338 mov rax, dr1
2339 mov [uDR1], rax
2340# else
2341 mov eax, dr1
2342 mov [uDR1], eax
2343# endif
2344 }
2345# endif
2346 return uDR1;
2347}
2348#endif
2349
2350
2351/**
2352 * Gets dr2.
2353 *
2354 * @returns dr2.
2355 */
2356#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2357DECLASM(RTCCUINTXREG) ASMGetDR2(void);
2358#else
2359DECLINLINE(RTCCUINTXREG) ASMGetDR2(void)
2360{
2361 RTCCUINTXREG uDR2;
2362# if RT_INLINE_ASM_USES_INTRIN
2363 uDR2 = __readdr(2);
2364# elif RT_INLINE_ASM_GNU_STYLE
2365# ifdef RT_ARCH_AMD64
2366 __asm__ __volatile__("movq %%dr2, %0\n\t" : "=r" (uDR2));
2367# else
2368 __asm__ __volatile__("movl %%dr2, %0\n\t" : "=r" (uDR2));
2369# endif
2370# else
2371 __asm
2372 {
2373# ifdef RT_ARCH_AMD64
2374 mov rax, dr2
2375 mov [uDR2], rax
2376# else
2377 mov eax, dr2
2378 mov [uDR2], eax
2379# endif
2380 }
2381# endif
2382 return uDR2;
2383}
2384#endif
2385
2386
2387/**
2388 * Gets dr3.
2389 *
2390 * @returns dr3.
2391 */
2392#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2393DECLASM(RTCCUINTXREG) ASMGetDR3(void);
2394#else
2395DECLINLINE(RTCCUINTXREG) ASMGetDR3(void)
2396{
2397 RTCCUINTXREG uDR3;
2398# if RT_INLINE_ASM_USES_INTRIN
2399 uDR3 = __readdr(3);
2400# elif RT_INLINE_ASM_GNU_STYLE
2401# ifdef RT_ARCH_AMD64
2402 __asm__ __volatile__("movq %%dr3, %0\n\t" : "=r" (uDR3));
2403# else
2404 __asm__ __volatile__("movl %%dr3, %0\n\t" : "=r" (uDR3));
2405# endif
2406# else
2407 __asm
2408 {
2409# ifdef RT_ARCH_AMD64
2410 mov rax, dr3
2411 mov [uDR3], rax
2412# else
2413 mov eax, dr3
2414 mov [uDR3], eax
2415# endif
2416 }
2417# endif
2418 return uDR3;
2419}
2420#endif
2421
2422
2423/**
2424 * Gets dr6.
2425 *
2426 * @returns dr6.
2427 */
2428#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2429DECLASM(RTCCUINTXREG) ASMGetDR6(void);
2430#else
2431DECLINLINE(RTCCUINTXREG) ASMGetDR6(void)
2432{
2433 RTCCUINTXREG uDR6;
2434# if RT_INLINE_ASM_USES_INTRIN
2435 uDR6 = __readdr(6);
2436# elif RT_INLINE_ASM_GNU_STYLE
2437# ifdef RT_ARCH_AMD64
2438 __asm__ __volatile__("movq %%dr6, %0\n\t" : "=r" (uDR6));
2439# else
2440 __asm__ __volatile__("movl %%dr6, %0\n\t" : "=r" (uDR6));
2441# endif
2442# else
2443 __asm
2444 {
2445# ifdef RT_ARCH_AMD64
2446 mov rax, dr6
2447 mov [uDR6], rax
2448# else
2449 mov eax, dr6
2450 mov [uDR6], eax
2451# endif
2452 }
2453# endif
2454 return uDR6;
2455}
2456#endif
2457
2458
2459/**
2460 * Reads and clears DR6.
2461 *
2462 * @returns DR6.
2463 */
2464#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2465DECLASM(RTCCUINTXREG) ASMGetAndClearDR6(void);
2466#else
2467DECLINLINE(RTCCUINTXREG) ASMGetAndClearDR6(void)
2468{
2469 RTCCUINTXREG uDR6;
2470# if RT_INLINE_ASM_USES_INTRIN
2471 uDR6 = __readdr(6);
2472 __writedr(6, 0xffff0ff0U); /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2473# elif RT_INLINE_ASM_GNU_STYLE
2474 RTCCUINTXREG uNewValue = 0xffff0ff0U;/* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2475# ifdef RT_ARCH_AMD64
2476 __asm__ __volatile__("movq %%dr6, %0\n\t"
2477 "movq %1, %%dr6\n\t"
2478 : "=r" (uDR6)
2479 : "r" (uNewValue));
2480# else
2481 __asm__ __volatile__("movl %%dr6, %0\n\t"
2482 "movl %1, %%dr6\n\t"
2483 : "=r" (uDR6)
2484 : "r" (uNewValue));
2485# endif
2486# else
2487 __asm
2488 {
2489# ifdef RT_ARCH_AMD64
2490 mov rax, dr6
2491 mov [uDR6], rax
2492 mov rcx, rax
2493 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2494 mov dr6, rcx
2495# else
2496 mov eax, dr6
2497 mov [uDR6], eax
2498 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 is zero. */
2499 mov dr6, ecx
2500# endif
2501 }
2502# endif
2503 return uDR6;
2504}
2505#endif
2506
2507
2508/**
2509 * Gets dr7.
2510 *
2511 * @returns dr7.
2512 */
2513#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2514DECLASM(RTCCUINTXREG) ASMGetDR7(void);
2515#else
2516DECLINLINE(RTCCUINTXREG) ASMGetDR7(void)
2517{
2518 RTCCUINTXREG uDR7;
2519# if RT_INLINE_ASM_USES_INTRIN
2520 uDR7 = __readdr(7);
2521# elif RT_INLINE_ASM_GNU_STYLE
2522# ifdef RT_ARCH_AMD64
2523 __asm__ __volatile__("movq %%dr7, %0\n\t" : "=r" (uDR7));
2524# else
2525 __asm__ __volatile__("movl %%dr7, %0\n\t" : "=r" (uDR7));
2526# endif
2527# else
2528 __asm
2529 {
2530# ifdef RT_ARCH_AMD64
2531 mov rax, dr7
2532 mov [uDR7], rax
2533# else
2534 mov eax, dr7
2535 mov [uDR7], eax
2536# endif
2537 }
2538# endif
2539 return uDR7;
2540}
2541#endif
2542
2543
2544/**
2545 * Sets dr0.
2546 *
2547 * @param uDRVal Debug register value to write
2548 */
2549#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2550DECLASM(void) ASMSetDR0(RTCCUINTXREG uDRVal);
2551#else
2552DECLINLINE(void) ASMSetDR0(RTCCUINTXREG uDRVal)
2553{
2554# if RT_INLINE_ASM_USES_INTRIN
2555 __writedr(0, uDRVal);
2556# elif RT_INLINE_ASM_GNU_STYLE
2557# ifdef RT_ARCH_AMD64
2558 __asm__ __volatile__("movq %0, %%dr0\n\t" : : "r" (uDRVal));
2559# else
2560 __asm__ __volatile__("movl %0, %%dr0\n\t" : : "r" (uDRVal));
2561# endif
2562# else
2563 __asm
2564 {
2565# ifdef RT_ARCH_AMD64
2566 mov rax, [uDRVal]
2567 mov dr0, rax
2568# else
2569 mov eax, [uDRVal]
2570 mov dr0, eax
2571# endif
2572 }
2573# endif
2574}
2575#endif
2576
2577
2578/**
2579 * Sets dr1.
2580 *
2581 * @param uDRVal Debug register value to write
2582 */
2583#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2584DECLASM(void) ASMSetDR1(RTCCUINTXREG uDRVal);
2585#else
2586DECLINLINE(void) ASMSetDR1(RTCCUINTXREG uDRVal)
2587{
2588# if RT_INLINE_ASM_USES_INTRIN
2589 __writedr(1, uDRVal);
2590# elif RT_INLINE_ASM_GNU_STYLE
2591# ifdef RT_ARCH_AMD64
2592 __asm__ __volatile__("movq %0, %%dr1\n\t" : : "r" (uDRVal));
2593# else
2594 __asm__ __volatile__("movl %0, %%dr1\n\t" : : "r" (uDRVal));
2595# endif
2596# else
2597 __asm
2598 {
2599# ifdef RT_ARCH_AMD64
2600 mov rax, [uDRVal]
2601 mov dr1, rax
2602# else
2603 mov eax, [uDRVal]
2604 mov dr1, eax
2605# endif
2606 }
2607# endif
2608}
2609#endif
2610
2611
2612/**
2613 * Sets dr2.
2614 *
2615 * @param uDRVal Debug register value to write
2616 */
2617#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2618DECLASM(void) ASMSetDR2(RTCCUINTXREG uDRVal);
2619#else
2620DECLINLINE(void) ASMSetDR2(RTCCUINTXREG uDRVal)
2621{
2622# if RT_INLINE_ASM_USES_INTRIN
2623 __writedr(2, uDRVal);
2624# elif RT_INLINE_ASM_GNU_STYLE
2625# ifdef RT_ARCH_AMD64
2626 __asm__ __volatile__("movq %0, %%dr2\n\t" : : "r" (uDRVal));
2627# else
2628 __asm__ __volatile__("movl %0, %%dr2\n\t" : : "r" (uDRVal));
2629# endif
2630# else
2631 __asm
2632 {
2633# ifdef RT_ARCH_AMD64
2634 mov rax, [uDRVal]
2635 mov dr2, rax
2636# else
2637 mov eax, [uDRVal]
2638 mov dr2, eax
2639# endif
2640 }
2641# endif
2642}
2643#endif
2644
2645
2646/**
2647 * Sets dr3.
2648 *
2649 * @param uDRVal Debug register value to write
2650 */
2651#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2652DECLASM(void) ASMSetDR3(RTCCUINTXREG uDRVal);
2653#else
2654DECLINLINE(void) ASMSetDR3(RTCCUINTXREG uDRVal)
2655{
2656# if RT_INLINE_ASM_USES_INTRIN
2657 __writedr(3, uDRVal);
2658# elif RT_INLINE_ASM_GNU_STYLE
2659# ifdef RT_ARCH_AMD64
2660 __asm__ __volatile__("movq %0, %%dr3\n\t" : : "r" (uDRVal));
2661# else
2662 __asm__ __volatile__("movl %0, %%dr3\n\t" : : "r" (uDRVal));
2663# endif
2664# else
2665 __asm
2666 {
2667# ifdef RT_ARCH_AMD64
2668 mov rax, [uDRVal]
2669 mov dr3, rax
2670# else
2671 mov eax, [uDRVal]
2672 mov dr3, eax
2673# endif
2674 }
2675# endif
2676}
2677#endif
2678
2679
2680/**
2681 * Sets dr6.
2682 *
2683 * @param uDRVal Debug register value to write
2684 */
2685#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2686DECLASM(void) ASMSetDR6(RTCCUINTXREG uDRVal);
2687#else
2688DECLINLINE(void) ASMSetDR6(RTCCUINTXREG uDRVal)
2689{
2690# if RT_INLINE_ASM_USES_INTRIN
2691 __writedr(6, uDRVal);
2692# elif RT_INLINE_ASM_GNU_STYLE
2693# ifdef RT_ARCH_AMD64
2694 __asm__ __volatile__("movq %0, %%dr6\n\t" : : "r" (uDRVal));
2695# else
2696 __asm__ __volatile__("movl %0, %%dr6\n\t" : : "r" (uDRVal));
2697# endif
2698# else
2699 __asm
2700 {
2701# ifdef RT_ARCH_AMD64
2702 mov rax, [uDRVal]
2703 mov dr6, rax
2704# else
2705 mov eax, [uDRVal]
2706 mov dr6, eax
2707# endif
2708 }
2709# endif
2710}
2711#endif
2712
2713
2714/**
2715 * Sets dr7.
2716 *
2717 * @param uDRVal Debug register value to write
2718 */
2719#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2720DECLASM(void) ASMSetDR7(RTCCUINTXREG uDRVal);
2721#else
2722DECLINLINE(void) ASMSetDR7(RTCCUINTXREG uDRVal)
2723{
2724# if RT_INLINE_ASM_USES_INTRIN
2725 __writedr(7, uDRVal);
2726# elif RT_INLINE_ASM_GNU_STYLE
2727# ifdef RT_ARCH_AMD64
2728 __asm__ __volatile__("movq %0, %%dr7\n\t" : : "r" (uDRVal));
2729# else
2730 __asm__ __volatile__("movl %0, %%dr7\n\t" : : "r" (uDRVal));
2731# endif
2732# else
2733 __asm
2734 {
2735# ifdef RT_ARCH_AMD64
2736 mov rax, [uDRVal]
2737 mov dr7, rax
2738# else
2739 mov eax, [uDRVal]
2740 mov dr7, eax
2741# endif
2742 }
2743# endif
2744}
2745#endif
2746
2747
2748/**
2749 * Writes a 8-bit unsigned integer to an I/O port, ordered.
2750 *
2751 * @param Port I/O port to write to.
2752 * @param u8 8-bit integer to write.
2753 */
2754#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2755DECLASM(void) ASMOutU8(RTIOPORT Port, uint8_t u8);
2756#else
2757DECLINLINE(void) ASMOutU8(RTIOPORT Port, uint8_t u8)
2758{
2759# if RT_INLINE_ASM_GNU_STYLE
2760 __asm__ __volatile__("outb %b1, %w0\n\t"
2761 :: "Nd" (Port),
2762 "a" (u8));
2763
2764# elif RT_INLINE_ASM_USES_INTRIN
2765 __outbyte(Port, u8);
2766
2767# else
2768 __asm
2769 {
2770 mov dx, [Port]
2771 mov al, [u8]
2772 out dx, al
2773 }
2774# endif
2775}
2776#endif
2777
2778
2779/**
2780 * Reads a 8-bit unsigned integer from an I/O port, ordered.
2781 *
2782 * @returns 8-bit integer.
2783 * @param Port I/O port to read from.
2784 */
2785#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2786DECLASM(uint8_t) ASMInU8(RTIOPORT Port);
2787#else
2788DECLINLINE(uint8_t) ASMInU8(RTIOPORT Port)
2789{
2790 uint8_t u8;
2791# if RT_INLINE_ASM_GNU_STYLE
2792 __asm__ __volatile__("inb %w1, %b0\n\t"
2793 : "=a" (u8)
2794 : "Nd" (Port));
2795
2796# elif RT_INLINE_ASM_USES_INTRIN
2797 u8 = __inbyte(Port);
2798
2799# else
2800 __asm
2801 {
2802 mov dx, [Port]
2803 in al, dx
2804 mov [u8], al
2805 }
2806# endif
2807 return u8;
2808}
2809#endif
2810
2811
2812/**
2813 * Writes a 16-bit unsigned integer to an I/O port, ordered.
2814 *
2815 * @param Port I/O port to write to.
2816 * @param u16 16-bit integer to write.
2817 */
2818#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2819DECLASM(void) ASMOutU16(RTIOPORT Port, uint16_t u16);
2820#else
2821DECLINLINE(void) ASMOutU16(RTIOPORT Port, uint16_t u16)
2822{
2823# if RT_INLINE_ASM_GNU_STYLE
2824 __asm__ __volatile__("outw %w1, %w0\n\t"
2825 :: "Nd" (Port),
2826 "a" (u16));
2827
2828# elif RT_INLINE_ASM_USES_INTRIN
2829 __outword(Port, u16);
2830
2831# else
2832 __asm
2833 {
2834 mov dx, [Port]
2835 mov ax, [u16]
2836 out dx, ax
2837 }
2838# endif
2839}
2840#endif
2841
2842
2843/**
2844 * Reads a 16-bit unsigned integer from an I/O port, ordered.
2845 *
2846 * @returns 16-bit integer.
2847 * @param Port I/O port to read from.
2848 */
2849#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2850DECLASM(uint16_t) ASMInU16(RTIOPORT Port);
2851#else
2852DECLINLINE(uint16_t) ASMInU16(RTIOPORT Port)
2853{
2854 uint16_t u16;
2855# if RT_INLINE_ASM_GNU_STYLE
2856 __asm__ __volatile__("inw %w1, %w0\n\t"
2857 : "=a" (u16)
2858 : "Nd" (Port));
2859
2860# elif RT_INLINE_ASM_USES_INTRIN
2861 u16 = __inword(Port);
2862
2863# else
2864 __asm
2865 {
2866 mov dx, [Port]
2867 in ax, dx
2868 mov [u16], ax
2869 }
2870# endif
2871 return u16;
2872}
2873#endif
2874
2875
2876/**
2877 * Writes a 32-bit unsigned integer to an I/O port, ordered.
2878 *
2879 * @param Port I/O port to write to.
2880 * @param u32 32-bit integer to write.
2881 */
2882#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2883DECLASM(void) ASMOutU32(RTIOPORT Port, uint32_t u32);
2884#else
2885DECLINLINE(void) ASMOutU32(RTIOPORT Port, uint32_t u32)
2886{
2887# if RT_INLINE_ASM_GNU_STYLE
2888 __asm__ __volatile__("outl %1, %w0\n\t"
2889 :: "Nd" (Port),
2890 "a" (u32));
2891
2892# elif RT_INLINE_ASM_USES_INTRIN
2893 __outdword(Port, u32);
2894
2895# else
2896 __asm
2897 {
2898 mov dx, [Port]
2899 mov eax, [u32]
2900 out dx, eax
2901 }
2902# endif
2903}
2904#endif
2905
2906
2907/**
2908 * Reads a 32-bit unsigned integer from an I/O port, ordered.
2909 *
2910 * @returns 32-bit integer.
2911 * @param Port I/O port to read from.
2912 */
2913#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2914DECLASM(uint32_t) ASMInU32(RTIOPORT Port);
2915#else
2916DECLINLINE(uint32_t) ASMInU32(RTIOPORT Port)
2917{
2918 uint32_t u32;
2919# if RT_INLINE_ASM_GNU_STYLE
2920 __asm__ __volatile__("inl %w1, %0\n\t"
2921 : "=a" (u32)
2922 : "Nd" (Port));
2923
2924# elif RT_INLINE_ASM_USES_INTRIN
2925 u32 = __indword(Port);
2926
2927# else
2928 __asm
2929 {
2930 mov dx, [Port]
2931 in eax, dx
2932 mov [u32], eax
2933 }
2934# endif
2935 return u32;
2936}
2937#endif
2938
2939
2940/**
2941 * Writes a string of 8-bit unsigned integer items to an I/O port, ordered.
2942 *
2943 * @param Port I/O port to write to.
2944 * @param pau8 Pointer to the string buffer.
2945 * @param c The number of items to write.
2946 */
2947#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2948DECLASM(void) ASMOutStrU8(RTIOPORT Port, uint8_t const *pau8, size_t c);
2949#else
2950DECLINLINE(void) ASMOutStrU8(RTIOPORT Port, uint8_t const *pau8, size_t c)
2951{
2952# if RT_INLINE_ASM_GNU_STYLE
2953 __asm__ __volatile__("rep; outsb\n\t"
2954 : "+S" (pau8),
2955 "+c" (c)
2956 : "d" (Port));
2957
2958# elif RT_INLINE_ASM_USES_INTRIN
2959 __outbytestring(Port, (unsigned char *)pau8, (unsigned long)c);
2960
2961# else
2962 __asm
2963 {
2964 mov dx, [Port]
2965 mov ecx, [c]
2966 mov eax, [pau8]
2967 xchg esi, eax
2968 rep outsb
2969 xchg esi, eax
2970 }
2971# endif
2972}
2973#endif
2974
2975
2976/**
2977 * Reads a string of 8-bit unsigned integer items from an I/O port, ordered.
2978 *
2979 * @param Port I/O port to read from.
2980 * @param pau8 Pointer to the string buffer (output).
2981 * @param c The number of items to read.
2982 */
2983#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2984DECLASM(void) ASMInStrU8(RTIOPORT Port, uint8_t *pau8, size_t c);
2985#else
2986DECLINLINE(void) ASMInStrU8(RTIOPORT Port, uint8_t *pau8, size_t c)
2987{
2988# if RT_INLINE_ASM_GNU_STYLE
2989 __asm__ __volatile__("rep; insb\n\t"
2990 : "+D" (pau8),
2991 "+c" (c)
2992 : "d" (Port));
2993
2994# elif RT_INLINE_ASM_USES_INTRIN
2995 __inbytestring(Port, pau8, (unsigned long)c);
2996
2997# else
2998 __asm
2999 {
3000 mov dx, [Port]
3001 mov ecx, [c]
3002 mov eax, [pau8]
3003 xchg edi, eax
3004 rep insb
3005 xchg edi, eax
3006 }
3007# endif
3008}
3009#endif
3010
3011
3012/**
3013 * Writes a string of 16-bit unsigned integer items to an I/O port, ordered.
3014 *
3015 * @param Port I/O port to write to.
3016 * @param pau16 Pointer to the string buffer.
3017 * @param c The number of items to write.
3018 */
3019#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3020DECLASM(void) ASMOutStrU16(RTIOPORT Port, uint16_t const *pau16, size_t c);
3021#else
3022DECLINLINE(void) ASMOutStrU16(RTIOPORT Port, uint16_t const *pau16, size_t c)
3023{
3024# if RT_INLINE_ASM_GNU_STYLE
3025 __asm__ __volatile__("rep; outsw\n\t"
3026 : "+S" (pau16),
3027 "+c" (c)
3028 : "d" (Port));
3029
3030# elif RT_INLINE_ASM_USES_INTRIN
3031 __outwordstring(Port, (unsigned short *)pau16, (unsigned long)c);
3032
3033# else
3034 __asm
3035 {
3036 mov dx, [Port]
3037 mov ecx, [c]
3038 mov eax, [pau16]
3039 xchg esi, eax
3040 rep outsw
3041 xchg esi, eax
3042 }
3043# endif
3044}
3045#endif
3046
3047
3048/**
3049 * Reads a string of 16-bit unsigned integer items from an I/O port, ordered.
3050 *
3051 * @param Port I/O port to read from.
3052 * @param pau16 Pointer to the string buffer (output).
3053 * @param c The number of items to read.
3054 */
3055#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3056DECLASM(void) ASMInStrU16(RTIOPORT Port, uint16_t *pau16, size_t c);
3057#else
3058DECLINLINE(void) ASMInStrU16(RTIOPORT Port, uint16_t *pau16, size_t c)
3059{
3060# if RT_INLINE_ASM_GNU_STYLE
3061 __asm__ __volatile__("rep; insw\n\t"
3062 : "+D" (pau16),
3063 "+c" (c)
3064 : "d" (Port));
3065
3066# elif RT_INLINE_ASM_USES_INTRIN
3067 __inwordstring(Port, pau16, (unsigned long)c);
3068
3069# else
3070 __asm
3071 {
3072 mov dx, [Port]
3073 mov ecx, [c]
3074 mov eax, [pau16]
3075 xchg edi, eax
3076 rep insw
3077 xchg edi, eax
3078 }
3079# endif
3080}
3081#endif
3082
3083
3084/**
3085 * Writes a string of 32-bit unsigned integer items to an I/O port, ordered.
3086 *
3087 * @param Port I/O port to write to.
3088 * @param pau32 Pointer to the string buffer.
3089 * @param c The number of items to write.
3090 */
3091#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3092DECLASM(void) ASMOutStrU32(RTIOPORT Port, uint32_t const *pau32, size_t c);
3093#else
3094DECLINLINE(void) ASMOutStrU32(RTIOPORT Port, uint32_t const *pau32, size_t c)
3095{
3096# if RT_INLINE_ASM_GNU_STYLE
3097 __asm__ __volatile__("rep; outsl\n\t"
3098 : "+S" (pau32),
3099 "+c" (c)
3100 : "d" (Port));
3101
3102# elif RT_INLINE_ASM_USES_INTRIN
3103 __outdwordstring(Port, (unsigned long *)pau32, (unsigned long)c);
3104
3105# else
3106 __asm
3107 {
3108 mov dx, [Port]
3109 mov ecx, [c]
3110 mov eax, [pau32]
3111 xchg esi, eax
3112 rep outsd
3113 xchg esi, eax
3114 }
3115# endif
3116}
3117#endif
3118
3119
3120/**
3121 * Reads a string of 32-bit unsigned integer items from an I/O port, ordered.
3122 *
3123 * @param Port I/O port to read from.
3124 * @param pau32 Pointer to the string buffer (output).
3125 * @param c The number of items to read.
3126 */
3127#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3128DECLASM(void) ASMInStrU32(RTIOPORT Port, uint32_t *pau32, size_t c);
3129#else
3130DECLINLINE(void) ASMInStrU32(RTIOPORT Port, uint32_t *pau32, size_t c)
3131{
3132# if RT_INLINE_ASM_GNU_STYLE
3133 __asm__ __volatile__("rep; insl\n\t"
3134 : "+D" (pau32),
3135 "+c" (c)
3136 : "d" (Port));
3137
3138# elif RT_INLINE_ASM_USES_INTRIN
3139 __indwordstring(Port, (unsigned long *)pau32, (unsigned long)c);
3140
3141# else
3142 __asm
3143 {
3144 mov dx, [Port]
3145 mov ecx, [c]
3146 mov eax, [pau32]
3147 xchg edi, eax
3148 rep insd
3149 xchg edi, eax
3150 }
3151# endif
3152}
3153#endif
3154
3155
3156/**
3157 * Invalidate page.
3158 *
3159 * @param pv Address of the page to invalidate.
3160 */
3161#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3162DECLASM(void) ASMInvalidatePage(void *pv);
3163#else
3164DECLINLINE(void) ASMInvalidatePage(void *pv)
3165{
3166# if RT_INLINE_ASM_USES_INTRIN
3167 __invlpg(pv);
3168
3169# elif RT_INLINE_ASM_GNU_STYLE
3170 __asm__ __volatile__("invlpg %0\n\t"
3171 : : "m" (*(uint8_t *)pv));
3172# else
3173 __asm
3174 {
3175# ifdef RT_ARCH_AMD64
3176 mov rax, [pv]
3177 invlpg [rax]
3178# else
3179 mov eax, [pv]
3180 invlpg [eax]
3181# endif
3182 }
3183# endif
3184}
3185#endif
3186
3187
3188/**
3189 * Write back the internal caches and invalidate them.
3190 */
3191#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3192DECLASM(void) ASMWriteBackAndInvalidateCaches(void);
3193#else
3194DECLINLINE(void) ASMWriteBackAndInvalidateCaches(void)
3195{
3196# if RT_INLINE_ASM_USES_INTRIN
3197 __wbinvd();
3198
3199# elif RT_INLINE_ASM_GNU_STYLE
3200 __asm__ __volatile__("wbinvd");
3201# else
3202 __asm
3203 {
3204 wbinvd
3205 }
3206# endif
3207}
3208#endif
3209
3210
3211/**
3212 * Invalidate internal and (perhaps) external caches without first
3213 * flushing dirty cache lines. Use with extreme care.
3214 */
3215#if RT_INLINE_ASM_EXTERNAL
3216DECLASM(void) ASMInvalidateInternalCaches(void);
3217#else
3218DECLINLINE(void) ASMInvalidateInternalCaches(void)
3219{
3220# if RT_INLINE_ASM_GNU_STYLE
3221 __asm__ __volatile__("invd");
3222# else
3223 __asm
3224 {
3225 invd
3226 }
3227# endif
3228}
3229#endif
3230
3231
3232/**
3233 * Memory load/store fence, waits for any pending writes and reads to complete.
3234 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
3235 */
3236DECLINLINE(void) ASMMemoryFenceSSE2(void)
3237{
3238#if RT_INLINE_ASM_GNU_STYLE
3239 __asm__ __volatile__ (".byte 0x0f,0xae,0xf0\n\t");
3240#elif RT_INLINE_ASM_USES_INTRIN
3241 _mm_mfence();
3242#else
3243 __asm
3244 {
3245 _emit 0x0f
3246 _emit 0xae
3247 _emit 0xf0
3248 }
3249#endif
3250}
3251
3252
3253/**
3254 * Memory store fence, waits for any writes to complete.
3255 * Requires the X86_CPUID_FEATURE_EDX_SSE CPUID bit set.
3256 */
3257DECLINLINE(void) ASMWriteFenceSSE(void)
3258{
3259#if RT_INLINE_ASM_GNU_STYLE
3260 __asm__ __volatile__ (".byte 0x0f,0xae,0xf8\n\t");
3261#elif RT_INLINE_ASM_USES_INTRIN
3262 _mm_sfence();
3263#else
3264 __asm
3265 {
3266 _emit 0x0f
3267 _emit 0xae
3268 _emit 0xf8
3269 }
3270#endif
3271}
3272
3273
3274/**
3275 * Memory load fence, waits for any pending reads to complete.
3276 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
3277 */
3278DECLINLINE(void) ASMReadFenceSSE2(void)
3279{
3280#if RT_INLINE_ASM_GNU_STYLE
3281 __asm__ __volatile__ (".byte 0x0f,0xae,0xe8\n\t");
3282#elif RT_INLINE_ASM_USES_INTRIN
3283 _mm_lfence();
3284#else
3285 __asm
3286 {
3287 _emit 0x0f
3288 _emit 0xae
3289 _emit 0xe8
3290 }
3291#endif
3292}
3293
3294#if !defined(_MSC_VER) || !defined(RT_ARCH_AMD64)
3295
3296/*
3297 * Clear the AC bit in the EFLAGS register.
3298 * Requires the X86_CPUID_STEXT_FEATURE_EBX_SMAP CPUID bit set.
3299 * Requires to be executed in R0.
3300 */
3301DECLINLINE(void) ASMClearAC(void)
3302{
3303#if RT_INLINE_ASM_GNU_STYLE
3304 __asm__ __volatile__ (".byte 0x0f,0x01,0xca\n\t");
3305#else
3306 __asm
3307 {
3308 _emit 0x0f
3309 _emit 0x01
3310 _emit 0xca
3311 }
3312#endif
3313}
3314
3315
3316/*
3317 * Set the AC bit in the EFLAGS register.
3318 * Requires the X86_CPUID_STEXT_FEATURE_EBX_SMAP CPUID bit set.
3319 * Requires to be executed in R0.
3320 */
3321DECLINLINE(void) ASMSetAC(void)
3322{
3323#if RT_INLINE_ASM_GNU_STYLE
3324 __asm__ __volatile__ (".byte 0x0f,0x01,0xcb\n\t");
3325#else
3326 __asm
3327 {
3328 _emit 0x0f
3329 _emit 0x01
3330 _emit 0xcb
3331 }
3332#endif
3333}
3334
3335#endif /* !_MSC_VER) || !RT_ARCH_AMD64 */
3336
3337/** @} */
3338#endif
3339
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette