VirtualBox

source: vbox/trunk/src/VBox/Runtime/testcase/tstInlineAsm.cpp@ 3672

Last change on this file since 3672 was 3672, checked in by vboxsync, 17 years ago

RT_OS_* and RT_ARCH_* for Runtime/ and Support/

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id
File size: 30.9 KB
Line 
1/* $Id: tstInlineAsm.cpp 3672 2007-07-17 12:39:30Z vboxsync $ */
2/** @file
3 * innotek Portable Runtime Testcase - inline assembly.
4 */
5
6/*
7 * Copyright (C) 2006-2007 innotek GmbH
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License as published by the Free Software Foundation,
13 * in version 2 as it comes in the "COPYING" file of the VirtualBox OSE
14 * distribution. VirtualBox OSE is distributed in the hope that it will
15 * be useful, but WITHOUT ANY WARRANTY of any kind.
16 *
17 * If you received this file as part of a commercial VirtualBox
18 * distribution, then only the terms of your commercial VirtualBox
19 * license agreement apply instead of the previous paragraph.
20 */
21
22/*******************************************************************************
23* Header Files *
24*******************************************************************************/
25#include <iprt/asm.h>
26#include <iprt/stream.h>
27#include <iprt/string.h>
28#include <iprt/runtime.h>
29#include <iprt/param.h>
30
31
32/*******************************************************************************
33* Global Variables *
34*******************************************************************************/
35/** Global error count. */
36static unsigned g_cErrors;
37
38
39/*******************************************************************************
40* Defined Constants And Macros *
41*******************************************************************************/
42#define CHECKVAL(val, expect, fmt) \
43 do \
44 { \
45 if ((val) != (expect)) \
46 { \
47 g_cErrors++; \
48 RTPrintf("%s, %d: " #val ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (expect), (val)); \
49 } \
50 } while (0)
51
52#define CHECKOP(op, expect, fmt, type) \
53 do \
54 { \
55 type val = op; \
56 if (val != (type)(expect)) \
57 { \
58 g_cErrors++; \
59 RTPrintf("%s, %d: " #op ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (type)(expect), val); \
60 } \
61 } while (0)
62
63
64#if !defined(PIC) || !defined(RT_ARCH_X86)
65const char *getCacheAss(unsigned u)
66{
67 if (u == 0)
68 return "res0 ";
69 if (u == 1)
70 return "direct";
71 if (u >= 256)
72 return "???";
73
74 char *pszRet;
75 RTStrAPrintf(&pszRet, "%d way", u); /* intentional leak! */
76 return pszRet;
77}
78
79
80const char *getL2CacheAss(unsigned u)
81{
82 switch (u)
83 {
84 case 0: return "off ";
85 case 1: return "direct";
86 case 2: return "2 way ";
87 case 3: return "res3 ";
88 case 4: return "4 way ";
89 case 5: return "res5 ";
90 case 6: return "8 way ";
91 case 7: return "res7 ";
92 case 8: return "16 way";
93 case 9: return "res9 ";
94 case 10: return "res10 ";
95 case 11: return "res11 ";
96 case 12: return "res12 ";
97 case 13: return "res13 ";
98 case 14: return "res14 ";
99 case 15: return "fully ";
100 default:
101 return "????";
102 }
103}
104
105
106/**
107 * Test and dump all possible info from the CPUID instruction.
108 *
109 * @remark Bits shared with the libc cpuid.c program. This all written by me, so no worries.
110 * @todo transform the dumping into a generic runtime function. We'll need it for logging!
111 */
112void tstASMCpuId(void)
113{
114 unsigned iBit;
115 struct
116 {
117 uint32_t uEBX, uEAX, uEDX, uECX;
118 } s;
119 if (!ASMHasCpuId())
120 {
121 RTPrintf("tstInlineAsm: warning! CPU doesn't support CPUID\n");
122 return;
123 }
124
125 /*
126 * Try the 0 function and use that for checking the ASMCpuId_* variants.
127 */
128 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
129
130 uint32_t u32 = ASMCpuId_ECX(0);
131 CHECKVAL(u32, s.uECX, "%x");
132
133 u32 = ASMCpuId_EDX(0);
134 CHECKVAL(u32, s.uEDX, "%x");
135
136 uint32_t uECX2 = s.uECX - 1;
137 uint32_t uEDX2 = s.uEDX - 1;
138 ASMCpuId_ECX_EDX(0, &uECX2, &uEDX2);
139
140 CHECKVAL(uECX2, s.uECX, "%x");
141 CHECKVAL(uEDX2, s.uEDX, "%x");
142
143 /*
144 * Done testing, dump the information.
145 */
146 RTPrintf("tstInlineAsm: CPUID Dump\n");
147 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
148 const uint32_t cFunctions = s.uEAX;
149
150 /* raw dump */
151 RTPrintf("\n"
152 " RAW Standard CPUIDs\n"
153 "Function eax ebx ecx edx\n");
154 for (unsigned iStd = 0; iStd <= cFunctions + 3; iStd++)
155 {
156 ASMCpuId(iStd, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
157 RTPrintf("%08x %08x %08x %08x %08x%s\n",
158 iStd, s.uEAX, s.uEBX, s.uECX, s.uEDX, iStd <= cFunctions ? "" : "*");
159 }
160
161 /*
162 * Understandable output
163 */
164 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
165 RTPrintf("Name: %.04s%.04s%.04s\n"
166 "Support: 0-%u\n",
167 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX);
168
169 /*
170 * Get Features.
171 */
172 if (cFunctions >= 1)
173 {
174 ASMCpuId(1, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
175 RTPrintf("Family: %d \tExtended: %d \tEffectiv: %d\n"
176 "Model: %d \tExtended: %d \tEffectiv: %d\n"
177 "Stepping: %d\n"
178 "APIC ID: %#04x\n"
179 "Logical CPUs: %d\n"
180 "CLFLUSH Size: %d\n"
181 "Brand ID: %#04x\n",
182 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ((s.uEAX >> 8) & 0xf) + (((s.uEAX >> 8) & 0xf) == 0xf ? (s.uEAX >> 20) & 0x7f : 0),
183 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ((s.uEAX >> 4) & 0xf) | (((s.uEAX >> 4) & 0xf) == 0xf ? (s.uEAX >> 16) & 0x0f : 0),
184 (s.uEAX >> 0) & 0xf,
185 (s.uEBX >> 24) & 0xff,
186 (s.uEBX >> 16) & 0xff,
187 (s.uEBX >> 8) & 0xff,
188 (s.uEBX >> 0) & 0xff);
189
190 RTPrintf("Features EDX: ");
191 if (s.uEDX & BIT(0)) RTPrintf(" FPU");
192 if (s.uEDX & BIT(1)) RTPrintf(" VME");
193 if (s.uEDX & BIT(2)) RTPrintf(" DE");
194 if (s.uEDX & BIT(3)) RTPrintf(" PSE");
195 if (s.uEDX & BIT(4)) RTPrintf(" TSC");
196 if (s.uEDX & BIT(5)) RTPrintf(" MSR");
197 if (s.uEDX & BIT(6)) RTPrintf(" PAE");
198 if (s.uEDX & BIT(7)) RTPrintf(" MCE");
199 if (s.uEDX & BIT(8)) RTPrintf(" CX8");
200 if (s.uEDX & BIT(9)) RTPrintf(" APIC");
201 if (s.uEDX & BIT(10)) RTPrintf(" 10");
202 if (s.uEDX & BIT(11)) RTPrintf(" SEP");
203 if (s.uEDX & BIT(12)) RTPrintf(" MTRR");
204 if (s.uEDX & BIT(13)) RTPrintf(" PGE");
205 if (s.uEDX & BIT(14)) RTPrintf(" MCA");
206 if (s.uEDX & BIT(15)) RTPrintf(" CMOV");
207 if (s.uEDX & BIT(16)) RTPrintf(" PAT");
208 if (s.uEDX & BIT(17)) RTPrintf(" PSE36");
209 if (s.uEDX & BIT(18)) RTPrintf(" PSN");
210 if (s.uEDX & BIT(19)) RTPrintf(" CLFSH");
211 if (s.uEDX & BIT(20)) RTPrintf(" 20");
212 if (s.uEDX & BIT(21)) RTPrintf(" DS");
213 if (s.uEDX & BIT(22)) RTPrintf(" ACPI");
214 if (s.uEDX & BIT(23)) RTPrintf(" MMX");
215 if (s.uEDX & BIT(24)) RTPrintf(" FXSR");
216 if (s.uEDX & BIT(25)) RTPrintf(" SSE");
217 if (s.uEDX & BIT(26)) RTPrintf(" SSE2");
218 if (s.uEDX & BIT(27)) RTPrintf(" SS");
219 if (s.uEDX & BIT(28)) RTPrintf(" HTT");
220 if (s.uEDX & BIT(29)) RTPrintf(" 29");
221 if (s.uEDX & BIT(30)) RTPrintf(" 30");
222 if (s.uEDX & BIT(31)) RTPrintf(" 31");
223 RTPrintf("\n");
224
225 /** @todo check intel docs. */
226 RTPrintf("Features ECX: ");
227 if (s.uECX & BIT(0)) RTPrintf(" SSE3");
228 for (iBit = 1; iBit < 13; iBit++)
229 if (s.uECX & BIT(iBit))
230 RTPrintf(" %d", iBit);
231 if (s.uECX & BIT(13)) RTPrintf(" CX16");
232 for (iBit = 14; iBit < 32; iBit++)
233 if (s.uECX & BIT(iBit))
234 RTPrintf(" %d", iBit);
235 RTPrintf("\n");
236 }
237
238 /*
239 * Extended.
240 * Implemented after AMD specs.
241 */
242 /** @todo check out the intel specs. */
243 ASMCpuId(0x80000000, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
244 if (!s.uEAX && !s.uEBX && !s.uECX && !s.uEDX)
245 {
246 RTPrintf("No extended CPUID info? Check the manual on how to detect this...\n");
247 return;
248 }
249 const uint32_t cExtFunctions = s.uEAX | 0x80000000;
250
251 /* raw dump */
252 RTPrintf("\n"
253 " RAW Extended CPUIDs\n"
254 "Function eax ebx ecx edx\n");
255 for (unsigned iExt = 0x80000000; iExt <= cExtFunctions + 3; iExt++)
256 {
257 ASMCpuId(iExt, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
258 RTPrintf("%08x %08x %08x %08x %08x%s\n",
259 iExt, s.uEAX, s.uEBX, s.uECX, s.uEDX, iExt <= cExtFunctions ? "" : "*");
260 }
261
262 /*
263 * Understandable output
264 */
265 ASMCpuId(0x80000000, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
266 RTPrintf("Ext Name: %.4s%.4s%.4s\n"
267 "Ext Supports: 0x80000000-%#010x\n",
268 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX);
269
270 if (cExtFunctions >= 0x80000001)
271 {
272 ASMCpuId(0x80000001, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
273 RTPrintf("Family: %d \tExtended: %d \tEffectiv: %d\n"
274 "Model: %d \tExtended: %d \tEffectiv: %d\n"
275 "Stepping: %d\n"
276 "Brand ID: %#05x\n",
277 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ((s.uEAX >> 8) & 0xf) + (((s.uEAX >> 8) & 0xf) == 0xf ? (s.uEAX >> 20) & 0x7f : 0),
278 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ((s.uEAX >> 4) & 0xf) | (((s.uEAX >> 4) & 0xf) == 0xf ? (s.uEAX >> 16) & 0x0f : 0),
279 (s.uEAX >> 0) & 0xf,
280 s.uEBX & 0xfff);
281
282 RTPrintf("Features EDX: ");
283 if (s.uEDX & BIT(0)) RTPrintf(" FPU");
284 if (s.uEDX & BIT(1)) RTPrintf(" VME");
285 if (s.uEDX & BIT(2)) RTPrintf(" DE");
286 if (s.uEDX & BIT(3)) RTPrintf(" PSE");
287 if (s.uEDX & BIT(4)) RTPrintf(" TSC");
288 if (s.uEDX & BIT(5)) RTPrintf(" MSR");
289 if (s.uEDX & BIT(6)) RTPrintf(" PAE");
290 if (s.uEDX & BIT(7)) RTPrintf(" MCE");
291 if (s.uEDX & BIT(8)) RTPrintf(" CMPXCHG8B");
292 if (s.uEDX & BIT(9)) RTPrintf(" APIC");
293 if (s.uEDX & BIT(10)) RTPrintf(" 10");
294 if (s.uEDX & BIT(11)) RTPrintf(" SysCallSysRet");
295 if (s.uEDX & BIT(12)) RTPrintf(" MTRR");
296 if (s.uEDX & BIT(13)) RTPrintf(" PGE");
297 if (s.uEDX & BIT(14)) RTPrintf(" MCA");
298 if (s.uEDX & BIT(15)) RTPrintf(" CMOV");
299 if (s.uEDX & BIT(16)) RTPrintf(" PAT");
300 if (s.uEDX & BIT(17)) RTPrintf(" PSE36");
301 if (s.uEDX & BIT(18)) RTPrintf(" 18");
302 if (s.uEDX & BIT(19)) RTPrintf(" 19");
303 if (s.uEDX & BIT(20)) RTPrintf(" NX");
304 if (s.uEDX & BIT(21)) RTPrintf(" 21");
305 if (s.uEDX & BIT(22)) RTPrintf(" MmxExt");
306 if (s.uEDX & BIT(23)) RTPrintf(" MMX");
307 if (s.uEDX & BIT(24)) RTPrintf(" FXSR");
308 if (s.uEDX & BIT(25)) RTPrintf(" FastFXSR");
309 if (s.uEDX & BIT(26)) RTPrintf(" 26");
310 if (s.uEDX & BIT(27)) RTPrintf(" RDTSCP");
311 if (s.uEDX & BIT(28)) RTPrintf(" 28");
312 if (s.uEDX & BIT(29)) RTPrintf(" LongMode");
313 if (s.uEDX & BIT(30)) RTPrintf(" 3DNowExt");
314 if (s.uEDX & BIT(31)) RTPrintf(" 3DNow");
315 RTPrintf("\n");
316
317 RTPrintf("Features ECX: ");
318 if (s.uECX & BIT(0)) RTPrintf(" LahfSahf");
319 if (s.uECX & BIT(1)) RTPrintf(" CmpLegacy");
320 if (s.uECX & BIT(2)) RTPrintf(" SVM");
321 if (s.uECX & BIT(3)) RTPrintf(" 3");
322 if (s.uECX & BIT(4)) RTPrintf(" AltMovCr8");
323 for (iBit = 5; iBit < 32; iBit++)
324 if (s.uECX & BIT(iBit))
325 RTPrintf(" %d", iBit);
326 RTPrintf("\n");
327 }
328
329 char szString[4*4*3+1] = {0};
330 if (cExtFunctions >= 0x80000002)
331 ASMCpuId(0x80000002, &szString[0 + 0], &szString[0 + 4], &szString[0 + 8], &szString[0 + 12]);
332 if (cExtFunctions >= 0x80000003)
333 ASMCpuId(0x80000003, &szString[16 + 0], &szString[16 + 4], &szString[16 + 8], &szString[16 + 12]);
334 if (cExtFunctions >= 0x80000004)
335 ASMCpuId(0x80000004, &szString[32 + 0], &szString[32 + 4], &szString[32 + 8], &szString[32 + 12]);
336 if (cExtFunctions >= 0x80000002)
337 RTPrintf("Full Name: %s\n", szString);
338
339 if (cExtFunctions >= 0x80000005)
340 {
341 ASMCpuId(0x80000005, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
342 RTPrintf("TLB 2/4M Instr/Uni: %s %3d entries\n"
343 "TLB 2/4M Data: %s %3d entries\n",
344 getCacheAss((s.uEAX >> 8) & 0xff), (s.uEAX >> 0) & 0xff,
345 getCacheAss((s.uEAX >> 24) & 0xff), (s.uEAX >> 16) & 0xff);
346 RTPrintf("TLB 4K Instr/Uni: %s %3d entries\n"
347 "TLB 4K Data: %s %3d entries\n",
348 getCacheAss((s.uEBX >> 8) & 0xff), (s.uEBX >> 0) & 0xff,
349 getCacheAss((s.uEBX >> 24) & 0xff), (s.uEBX >> 16) & 0xff);
350 RTPrintf("L1 Instr Cache Line Size: %d bytes\n"
351 "L1 Instr Cache Lines Per Tag: %d\n"
352 "L1 Instr Cache Associativity: %s\n"
353 "L1 Instr Cache Size: %d KB\n",
354 (s.uEDX >> 0) & 0xff,
355 (s.uEDX >> 8) & 0xff,
356 getCacheAss((s.uEDX >> 16) & 0xff),
357 (s.uEDX >> 24) & 0xff);
358 RTPrintf("L1 Data Cache Line Size: %d bytes\n"
359 "L1 Data Cache Lines Per Tag: %d\n"
360 "L1 Data Cache Associativity: %s\n"
361 "L1 Data Cache Size: %d KB\n",
362 (s.uECX >> 0) & 0xff,
363 (s.uECX >> 8) & 0xff,
364 getCacheAss((s.uECX >> 16) & 0xff),
365 (s.uECX >> 24) & 0xff);
366 }
367
368 if (cExtFunctions >= 0x80000006)
369 {
370 ASMCpuId(0x80000006, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
371 RTPrintf("L2 TLB 2/4M Instr/Uni: %s %4d entries\n"
372 "L2 TLB 2/4M Data: %s %4d entries\n",
373 getL2CacheAss((s.uEAX >> 12) & 0xf), (s.uEAX >> 0) & 0xfff,
374 getL2CacheAss((s.uEAX >> 28) & 0xf), (s.uEAX >> 16) & 0xfff);
375 RTPrintf("L2 TLB 4K Instr/Uni: %s %4d entries\n"
376 "L2 TLB 4K Data: %s %4d entries\n",
377 getL2CacheAss((s.uEBX >> 12) & 0xf), (s.uEBX >> 0) & 0xfff,
378 getL2CacheAss((s.uEBX >> 28) & 0xf), (s.uEBX >> 16) & 0xfff);
379 RTPrintf("L2 Cache Line Size: %d bytes\n"
380 "L2 Cache Lines Per Tag: %d\n"
381 "L2 Cache Associativity: %s\n"
382 "L2 Cache Size: %d KB\n",
383 (s.uEDX >> 0) & 0xff,
384 (s.uEDX >> 8) & 0xf,
385 getL2CacheAss((s.uEDX >> 12) & 0xf),
386 (s.uEDX >> 16) & 0xffff);
387 }
388
389 if (cExtFunctions >= 0x80000007)
390 {
391 ASMCpuId(0x80000007, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
392 RTPrintf("APM Features: ");
393 if (s.uEDX & BIT(0)) RTPrintf(" TS");
394 if (s.uEDX & BIT(1)) RTPrintf(" FID");
395 if (s.uEDX & BIT(2)) RTPrintf(" VID");
396 if (s.uEDX & BIT(3)) RTPrintf(" TTP");
397 if (s.uEDX & BIT(4)) RTPrintf(" TM");
398 if (s.uEDX & BIT(5)) RTPrintf(" STC");
399 if (s.uEDX & BIT(6)) RTPrintf(" 6");
400 if (s.uEDX & BIT(7)) RTPrintf(" 7");
401 if (s.uEDX & BIT(8)) RTPrintf(" TscInvariant");
402 for (iBit = 9; iBit < 32; iBit++)
403 if (s.uEDX & BIT(iBit))
404 RTPrintf(" %d", iBit);
405 RTPrintf("\n");
406 }
407
408 if (cExtFunctions >= 0x80000008)
409 {
410 ASMCpuId(0x80000008, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
411 RTPrintf("Physical Address Width: %d bits\n"
412 "Virtual Address Width: %d bits\n",
413 (s.uEAX >> 0) & 0xff,
414 (s.uEAX >> 8) & 0xff);
415 RTPrintf("Physical Core Count: %d\n",
416 ((s.uECX >> 0) & 0xff) + 1);
417 if ((s.uECX >> 12) & 0xf)
418 RTPrintf("ApicIdCoreIdSize: %d bits\n", (s.uECX >> 12) & 0xf);
419 }
420
421 if (cExtFunctions >= 0x8000000a)
422 {
423 ASMCpuId(0x8000000a, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
424 RTPrintf("SVM Revision: %d (%#x)\n"
425 "Number of Address Space IDs: %d (%#x)\n",
426 s.uEAX & 0xff, s.uEAX & 0xff,
427 s.uEBX, s.uEBX);
428 }
429}
430#endif /* !PIC || !X86 */
431
432
433static void tstASMAtomicXchgU8(void)
434{
435 struct
436 {
437 uint8_t u8Dummy0;
438 uint8_t u8;
439 uint8_t u8Dummy1;
440 } s;
441
442 s.u8 = 0;
443 s.u8Dummy0 = s.u8Dummy1 = 0x42;
444 CHECKOP(ASMAtomicXchgU8(&s.u8, 1), 0, "%#x", uint8_t);
445 CHECKVAL(s.u8, 1, "%#x");
446
447 CHECKOP(ASMAtomicXchgU8(&s.u8, 0), 1, "%#x", uint8_t);
448 CHECKVAL(s.u8, 0, "%#x");
449
450 CHECKOP(ASMAtomicXchgU8(&s.u8, 0xff), 0, "%#x", uint8_t);
451 CHECKVAL(s.u8, 0xff, "%#x");
452
453 CHECKOP(ASMAtomicXchgU8(&s.u8, 0x87), 0xffff, "%#x", uint8_t);
454 CHECKVAL(s.u8, 0x87, "%#x");
455 CHECKVAL(s.u8Dummy0, 0x42, "%#x");
456 CHECKVAL(s.u8Dummy1, 0x42, "%#x");
457}
458
459
460static void tstASMAtomicXchgU16(void)
461{
462 struct
463 {
464 uint16_t u16Dummy0;
465 uint16_t u16;
466 uint16_t u16Dummy1;
467 } s;
468
469 s.u16 = 0;
470 s.u16Dummy0 = s.u16Dummy1 = 0x1234;
471 CHECKOP(ASMAtomicXchgU16(&s.u16, 1), 0, "%#x", uint16_t);
472 CHECKVAL(s.u16, 1, "%#x");
473
474 CHECKOP(ASMAtomicXchgU16(&s.u16, 0), 1, "%#x", uint16_t);
475 CHECKVAL(s.u16, 0, "%#x");
476
477 CHECKOP(ASMAtomicXchgU16(&s.u16, 0xffff), 0, "%#x", uint16_t);
478 CHECKVAL(s.u16, 0xffff, "%#x");
479
480 CHECKOP(ASMAtomicXchgU16(&s.u16, 0x8765), 0xffff, "%#x", uint16_t);
481 CHECKVAL(s.u16, 0x8765, "%#x");
482 CHECKVAL(s.u16Dummy0, 0x1234, "%#x");
483 CHECKVAL(s.u16Dummy1, 0x1234, "%#x");
484}
485
486
487static void tstASMAtomicXchgU32(void)
488{
489 struct
490 {
491 uint32_t u32Dummy0;
492 uint32_t u32;
493 uint32_t u32Dummy1;
494 } s;
495
496 s.u32 = 0;
497 s.u32Dummy0 = s.u32Dummy1 = 0x11223344;
498
499 CHECKOP(ASMAtomicXchgU32(&s.u32, 1), 0, "%#x", uint32_t);
500 CHECKVAL(s.u32, 1, "%#x");
501
502 CHECKOP(ASMAtomicXchgU32(&s.u32, 0), 1, "%#x", uint32_t);
503 CHECKVAL(s.u32, 0, "%#x");
504
505 CHECKOP(ASMAtomicXchgU32(&s.u32, ~0U), 0, "%#x", uint32_t);
506 CHECKVAL(s.u32, ~0U, "%#x");
507
508 CHECKOP(ASMAtomicXchgU32(&s.u32, 0x87654321), ~0U, "%#x", uint32_t);
509 CHECKVAL(s.u32, 0x87654321, "%#x");
510
511 CHECKVAL(s.u32Dummy0, 0x11223344, "%#x");
512 CHECKVAL(s.u32Dummy1, 0x11223344, "%#x");
513}
514
515
516static void tstASMAtomicXchgU64(void)
517{
518 struct
519 {
520 uint64_t u64Dummy0;
521 uint64_t u64;
522 uint64_t u64Dummy1;
523 } s;
524
525 s.u64 = 0;
526 s.u64Dummy0 = s.u64Dummy1 = 0x1122334455667788ULL;
527
528 CHECKOP(ASMAtomicXchgU64(&s.u64, 1), 0ULL, "%#llx", uint64_t);
529 CHECKVAL(s.u64, 1ULL, "%#llx");
530
531 CHECKOP(ASMAtomicXchgU64(&s.u64, 0), 1ULL, "%#llx", uint64_t);
532 CHECKVAL(s.u64, 0ULL, "%#llx");
533
534 CHECKOP(ASMAtomicXchgU64(&s.u64, ~0ULL), 0ULL, "%#llx", uint64_t);
535 CHECKVAL(s.u64, ~0ULL, "%#llx");
536
537 CHECKOP(ASMAtomicXchgU64(&s.u64, 0xfedcba0987654321ULL), ~0ULL, "%#llx", uint64_t);
538 CHECKVAL(s.u64, 0xfedcba0987654321ULL, "%#llx");
539
540 CHECKVAL(s.u64Dummy0, 0x1122334455667788ULL, "%#x");
541 CHECKVAL(s.u64Dummy1, 0x1122334455667788ULL, "%#x");
542}
543
544
545#ifdef RT_ARCH_AMD64
546static void tstASMAtomicXchgU128(void)
547{
548 struct
549 {
550 RTUINT128U u128Dummy0;
551 RTUINT128U u128;
552 RTUINT128U u128Dummy1;
553 } s;
554 RTUINT128U u128Ret;
555 RTUINT128U u128Arg;
556
557
558 s.u128Dummy0.s.Lo = s.u128Dummy0.s.Hi = 0x1122334455667788;
559 s.u128.s.Lo = 0;
560 s.u128.s.Hi = 0;
561 s.u128Dummy1 = s.u128Dummy0;
562
563 u128Arg.s.Lo = 1;
564 u128Arg.s.Hi = 0;
565 u128Ret.u = ASMAtomicXchgU128(&s.u128.u, u128Arg.u);
566 CHECKVAL(u128Ret.s.Lo, 0ULL, "%#llx");
567 CHECKVAL(u128Ret.s.Hi, 0ULL, "%#llx");
568 CHECKVAL(s.u128.s.Lo, 1ULL, "%#llx");
569 CHECKVAL(s.u128.s.Hi, 0ULL, "%#llx");
570
571 u128Arg.s.Lo = 0;
572 u128Arg.s.Hi = 0;
573 u128Ret.u = ASMAtomicXchgU128(&s.u128.u, u128Arg.u);
574 CHECKVAL(u128Ret.s.Lo, 1ULL, "%#llx");
575 CHECKVAL(u128Ret.s.Hi, 0ULL, "%#llx");
576 CHECKVAL(s.u128.s.Lo, 0ULL, "%#llx");
577 CHECKVAL(s.u128.s.Hi, 0ULL, "%#llx");
578
579 u128Arg.s.Lo = ~0ULL;
580 u128Arg.s.Hi = ~0ULL;
581 u128Ret.u = ASMAtomicXchgU128(&s.u128.u, u128Arg.u);
582 CHECKVAL(u128Ret.s.Lo, 0ULL, "%#llx");
583 CHECKVAL(u128Ret.s.Hi, 0ULL, "%#llx");
584 CHECKVAL(s.u128.s.Lo, ~0ULL, "%#llx");
585 CHECKVAL(s.u128.s.Hi, ~0ULL, "%#llx");
586
587
588 u128Arg.s.Lo = 0xfedcba0987654321ULL;
589 u128Arg.s.Hi = 0x8897a6b5c4d3e2f1ULL;
590 u128Ret.u = ASMAtomicXchgU128(&s.u128.u, u128Arg.u);
591 CHECKVAL(u128Ret.s.Lo, ~0ULL, "%#llx");
592 CHECKVAL(u128Ret.s.Hi, ~0ULL, "%#llx");
593 CHECKVAL(s.u128.s.Lo, 0xfedcba0987654321ULL, "%#llx");
594 CHECKVAL(s.u128.s.Hi, 0x8897a6b5c4d3e2f1ULL, "%#llx");
595
596 CHECKVAL(s.u128Dummy0.s.Lo, 0x1122334455667788, "%#llx");
597 CHECKVAL(s.u128Dummy0.s.Hi, 0x1122334455667788, "%#llx");
598 CHECKVAL(s.u128Dummy1.s.Lo, 0x1122334455667788, "%#llx");
599 CHECKVAL(s.u128Dummy1.s.Hi, 0x1122334455667788, "%#llx");
600}
601#endif
602
603
604static void tstASMAtomicXchgPtr(void)
605{
606 void *pv = NULL;
607
608 CHECKOP(ASMAtomicXchgPtr(&pv, (void *)(~(uintptr_t)0)), NULL, "%p", void *);
609 CHECKVAL(pv, (void *)(~(uintptr_t)0), "%p");
610
611 CHECKOP(ASMAtomicXchgPtr(&pv, (void *)0x87654321), (void *)(~(uintptr_t)0), "%p", void *);
612 CHECKVAL(pv, (void *)0x87654321, "%p");
613
614 CHECKOP(ASMAtomicXchgPtr(&pv, NULL), (void *)0x87654321, "%p", void *);
615 CHECKVAL(pv, NULL, "%p");
616}
617
618
619static void tstASMAtomicCmpXchgU32(void)
620{
621 uint32_t u32 = 0xffffffff;
622
623 CHECKOP(ASMAtomicCmpXchgU32(&u32, 0, 0), false, "%d", bool);
624 CHECKVAL(u32, 0xffffffff, "%x");
625
626 CHECKOP(ASMAtomicCmpXchgU32(&u32, 0, 0xffffffff), true, "%d", bool);
627 CHECKVAL(u32, 0, "%x");
628
629 CHECKOP(ASMAtomicCmpXchgU32(&u32, 0x8008efd, 0xffffffff), false, "%d", bool);
630 CHECKVAL(u32, 0, "%x");
631
632 CHECKOP(ASMAtomicCmpXchgU32(&u32, 0x8008efd, 0), true, "%d", bool);
633 CHECKVAL(u32, 0x8008efd, "%x");
634}
635
636
637static void tstASMAtomicCmpXchgU64(void)
638{
639 uint64_t u64 = 0xffffffffffffffULL;
640
641 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0, 0), false, "%d", bool);
642 CHECKVAL(u64, 0xffffffffffffffULL, "%x");
643
644 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0, 0xffffffffffffffULL), true, "%d", bool);
645 CHECKVAL(u64, 0, "%x");
646
647 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0x80040008008efdULL, 0xffffffff), false, "%d", bool);
648 CHECKVAL(u64, 0, "%x");
649
650 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0x80040008008efdULL, 0xffffffff00000000ULL), false, "%d", bool);
651 CHECKVAL(u64, 0, "%x");
652
653 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0x80040008008efdULL, 0), true, "%d", bool);
654 CHECKVAL(u64, 0x80040008008efdULL, "%x");
655}
656
657
658static void tstASMAtomicReadU64(void)
659{
660 uint64_t u64 = 0;
661
662 CHECKOP(ASMAtomicReadU64(&u64), 0ULL, "%#llx", uint64_t);
663 CHECKVAL(u64, 0ULL, "%#llx");
664
665 u64 = ~0ULL;
666 CHECKOP(ASMAtomicReadU64(&u64), ~0ULL, "%#llx", uint64_t);
667 CHECKVAL(u64, ~0ULL, "%#llx");
668
669 u64 = 0xfedcba0987654321ULL;
670 CHECKOP(ASMAtomicReadU64(&u64), 0xfedcba0987654321ULL, "%#llx", uint64_t);
671 CHECKVAL(u64, 0xfedcba0987654321ULL, "%#llx");
672}
673
674
675static void tstASMAtomicDecIncS32(void)
676{
677 int32_t i32Rc;
678 int32_t i32 = 10;
679#define MYCHECK(op, rc) \
680 do { \
681 i32Rc = op; \
682 if (i32Rc != (rc)) \
683 { \
684 RTPrintf("%s, %d: FAILURE: %s -> %d expected %d\n", __FUNCTION__, __LINE__, #op, i32Rc, rc); \
685 g_cErrors++; \
686 } \
687 if (i32 != (rc)) \
688 { \
689 RTPrintf("%s, %d: FAILURE: %s => i32=%d expected %d\n", __FUNCTION__, __LINE__, #op, i32, rc); \
690 g_cErrors++; \
691 } \
692 } while (0)
693 MYCHECK(ASMAtomicDecS32(&i32), 9);
694 MYCHECK(ASMAtomicDecS32(&i32), 8);
695 MYCHECK(ASMAtomicDecS32(&i32), 7);
696 MYCHECK(ASMAtomicDecS32(&i32), 6);
697 MYCHECK(ASMAtomicDecS32(&i32), 5);
698 MYCHECK(ASMAtomicDecS32(&i32), 4);
699 MYCHECK(ASMAtomicDecS32(&i32), 3);
700 MYCHECK(ASMAtomicDecS32(&i32), 2);
701 MYCHECK(ASMAtomicDecS32(&i32), 1);
702 MYCHECK(ASMAtomicDecS32(&i32), 0);
703 MYCHECK(ASMAtomicDecS32(&i32), -1);
704 MYCHECK(ASMAtomicDecS32(&i32), -2);
705 MYCHECK(ASMAtomicIncS32(&i32), -1);
706 MYCHECK(ASMAtomicIncS32(&i32), 0);
707 MYCHECK(ASMAtomicIncS32(&i32), 1);
708 MYCHECK(ASMAtomicIncS32(&i32), 2);
709 MYCHECK(ASMAtomicIncS32(&i32), 3);
710 MYCHECK(ASMAtomicDecS32(&i32), 2);
711 MYCHECK(ASMAtomicIncS32(&i32), 3);
712 MYCHECK(ASMAtomicDecS32(&i32), 2);
713 MYCHECK(ASMAtomicIncS32(&i32), 3);
714#undef MYCHECK
715
716}
717
718
719static void tstASMAtomicAndOrU32(void)
720{
721 uint32_t u32 = 0xffffffff;
722
723 ASMAtomicOrU32(&u32, 0xffffffff);
724 CHECKVAL(u32, 0xffffffff, "%x");
725
726 ASMAtomicAndU32(&u32, 0xffffffff);
727 CHECKVAL(u32, 0xffffffff, "%x");
728
729 ASMAtomicAndU32(&u32, 0x8f8f8f8f);
730 CHECKVAL(u32, 0x8f8f8f8f, "%x");
731
732 ASMAtomicOrU32(&u32, 0x70707070);
733 CHECKVAL(u32, 0xffffffff, "%x");
734
735 ASMAtomicAndU32(&u32, 1);
736 CHECKVAL(u32, 1, "%x");
737
738 ASMAtomicOrU32(&u32, 0x80000000);
739 CHECKVAL(u32, 0x80000001, "%x");
740
741 ASMAtomicAndU32(&u32, 0x80000000);
742 CHECKVAL(u32, 0x80000000, "%x");
743
744 ASMAtomicAndU32(&u32, 0);
745 CHECKVAL(u32, 0, "%x");
746
747 ASMAtomicOrU32(&u32, 0x42424242);
748 CHECKVAL(u32, 0x42424242, "%x");
749}
750
751
752void tstASMMemZeroPage(void)
753{
754 struct
755 {
756 uint64_t u64Magic1;
757 uint8_t abPage[PAGE_SIZE];
758 uint64_t u64Magic2;
759 } Buf1, Buf2, Buf3;
760
761 Buf1.u64Magic1 = UINT64_C(0xffffffffffffffff);
762 memset(Buf1.abPage, 0x55, sizeof(Buf1.abPage));
763 Buf1.u64Magic2 = UINT64_C(0xffffffffffffffff);
764 Buf2.u64Magic1 = UINT64_C(0xffffffffffffffff);
765 memset(Buf2.abPage, 0x77, sizeof(Buf2.abPage));
766 Buf2.u64Magic2 = UINT64_C(0xffffffffffffffff);
767 Buf3.u64Magic1 = UINT64_C(0xffffffffffffffff);
768 memset(Buf3.abPage, 0x99, sizeof(Buf3.abPage));
769 Buf3.u64Magic2 = UINT64_C(0xffffffffffffffff);
770 ASMMemZeroPage(Buf1.abPage);
771 ASMMemZeroPage(Buf2.abPage);
772 ASMMemZeroPage(Buf3.abPage);
773 if ( Buf1.u64Magic1 != UINT64_C(0xffffffffffffffff)
774 || Buf1.u64Magic2 != UINT64_C(0xffffffffffffffff)
775 || Buf1.u64Magic1 != UINT64_C(0xffffffffffffffff)
776 || Buf1.u64Magic2 != UINT64_C(0xffffffffffffffff)
777 || Buf2.u64Magic1 != UINT64_C(0xffffffffffffffff)
778 || Buf2.u64Magic2 != UINT64_C(0xffffffffffffffff))
779 {
780 RTPrintf("tstInlineAsm: ASMMemZeroPage violated one/both magic(s)!\n");
781 g_cErrors++;
782 }
783 for (unsigned i = 0; i < sizeof(Buf1.abPage); i++)
784 if (Buf1.abPage[i])
785 {
786 RTPrintf("tstInlineAsm: ASMMemZeroPage didn't clear byte at offset %#x!\n", i);
787 g_cErrors++;
788 }
789 for (unsigned i = 0; i < sizeof(Buf1.abPage); i++)
790 if (Buf1.abPage[i])
791 {
792 RTPrintf("tstInlineAsm: ASMMemZeroPage didn't clear byte at offset %#x!\n", i);
793 g_cErrors++;
794 }
795 for (unsigned i = 0; i < sizeof(Buf2.abPage); i++)
796 if (Buf2.abPage[i])
797 {
798 RTPrintf("tstInlineAsm: ASMMemZeroPage didn't clear byte at offset %#x!\n", i);
799 g_cErrors++;
800 }
801}
802
803
804void tstASMMath(void)
805{
806 uint64_t u64 = ASMMult2xU32RetU64(UINT32_C(0x80000000), UINT32_C(0x10000000));
807 CHECKVAL(u64, UINT64_C(0x0800000000000000), "%#018RX64");
808
809 uint32_t u32 = ASMDivU64ByU32RetU32(UINT64_C(0x0800000000000000), UINT32_C(0x10000000));
810 CHECKVAL(u32, UINT32_C(0x80000000), "%#010RX32");
811
812 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x0000000000000001), UINT32_C(0x00000001), UINT32_C(0x00000001));
813 CHECKVAL(u64, UINT64_C(0x0000000000000001), "%#018RX64");
814 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x0000000100000000), UINT32_C(0x80000000), UINT32_C(0x00000002));
815 CHECKVAL(u64, UINT64_C(0x4000000000000000), "%#018RX64");
816 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xfedcba9876543210), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
817 CHECKVAL(u64, UINT64_C(0xfedcba9876543210), "%#018RX64");
818 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xffffffffffffffff), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
819 CHECKVAL(u64, UINT64_C(0xffffffffffffffff), "%#018RX64");
820 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xffffffffffffffff), UINT32_C(0xfffffff0), UINT32_C(0xffffffff));
821 CHECKVAL(u64, UINT64_C(0xfffffff0fffffff0), "%#018RX64");
822 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x3415934810359583), UINT32_C(0x58734981), UINT32_C(0xf8694045));
823 CHECKVAL(u64, UINT64_C(0x128b9c3d43184763), "%#018RX64");
824 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x3415934810359583), UINT32_C(0xf8694045), UINT32_C(0x58734981));
825 CHECKVAL(u64, UINT64_C(0x924719355cd35a27), "%#018RX64");
826
827#if 0 /* bird: question is whether this should trap or not:
828 *
829 * frank: Of course it must trap:
830 *
831 * 0xfffffff8 * 0x77d7daf8 = 0x77d7daf441412840
832 *
833 * During the following division, the quotient must fit into a 32-bit register.
834 * Therefore the smallest valid divisor is
835 *
836 * (0x77d7daf441412840 >> 32) + 1 = 0x77d7daf5
837 *
838 * which is definitely greater than 0x3b9aca00.
839 *
840 * bird: No, the C version does *not* crash. So, the question is whether there any
841 * code depending on it not crashing.
842 *
843 * Of course the assembly versions of the code crash right now for the reasons you've
844 * given, but the the 32-bit MSC version does not crash.
845 *
846 * frank: The C version does not crash but delivers incorrect results for this case.
847 * The reason is
848 *
849 * u.s.Hi = (unsigned long)(u64Hi / u32C);
850 *
851 * Here the division is actually 64-bit by 64-bit but the 64-bit result is truncated
852 * to 32 bit. If using this (optimized and fast) function we should just be sure that
853 * the operands are in a valid range.
854 */
855 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xfffffff8c65d6731), UINT32_C(0x77d7daf8), UINT32_C(0x3b9aca00));
856 CHECKVAL(u64, UINT64_C(0x02b8f9a2aa74e3dc), "%#018RX64");
857#endif
858}
859
860
861int main(int argc, char *argv[])
862{
863 RTR3Init();
864 RTPrintf("tstInlineAsm: TESTING\n");
865
866 /*
867 * Execute the tests.
868 */
869#if !defined(PIC) || !defined(RT_ARCH_X86)
870 tstASMCpuId();
871#endif
872 tstASMAtomicXchgU8();
873 tstASMAtomicXchgU16();
874 tstASMAtomicXchgU32();
875 tstASMAtomicXchgU64();
876#ifdef RT_ARCH_AMD64
877 tstASMAtomicXchgU128();
878#endif
879 tstASMAtomicXchgPtr();
880 tstASMAtomicCmpXchgU32();
881 tstASMAtomicCmpXchgU64();
882 tstASMAtomicReadU64();
883 tstASMAtomicDecIncS32();
884 tstASMAtomicAndOrU32();
885 tstASMMemZeroPage();
886 tstASMMath();
887
888 /*
889 * Show the result.
890 */
891 if (!g_cErrors)
892 RTPrintf("tstInlineAsm: SUCCESS\n", g_cErrors);
893 else
894 RTPrintf("tstInlineAsm: FAILURE - %d errors\n", g_cErrors);
895 return !!g_cErrors;
896}
897
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette