VirtualBox

source: vbox/trunk/src/VBox/Runtime/testcase/tstInlineAsm.cpp@ 4968

Last change on this file since 4968 was 4071, checked in by vboxsync, 17 years ago

Biggest check-in ever. New source code headers for all (C) innotek files.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id
File size: 30.7 KB
Line 
1/* $Id: tstInlineAsm.cpp 4071 2007-08-07 17:07:59Z vboxsync $ */
2/** @file
3 * innotek Portable Runtime Testcase - inline assembly.
4 */
5
6/*
7 * Copyright (C) 2006-2007 innotek GmbH
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License as published by the Free Software Foundation,
13 * in version 2 as it comes in the "COPYING" file of the VirtualBox OSE
14 * distribution. VirtualBox OSE is distributed in the hope that it will
15 * be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18/*******************************************************************************
19* Header Files *
20*******************************************************************************/
21#include <iprt/asm.h>
22#include <iprt/stream.h>
23#include <iprt/string.h>
24#include <iprt/runtime.h>
25#include <iprt/param.h>
26
27
28/*******************************************************************************
29* Global Variables *
30*******************************************************************************/
31/** Global error count. */
32static unsigned g_cErrors;
33
34
35/*******************************************************************************
36* Defined Constants And Macros *
37*******************************************************************************/
38#define CHECKVAL(val, expect, fmt) \
39 do \
40 { \
41 if ((val) != (expect)) \
42 { \
43 g_cErrors++; \
44 RTPrintf("%s, %d: " #val ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (expect), (val)); \
45 } \
46 } while (0)
47
48#define CHECKOP(op, expect, fmt, type) \
49 do \
50 { \
51 type val = op; \
52 if (val != (type)(expect)) \
53 { \
54 g_cErrors++; \
55 RTPrintf("%s, %d: " #op ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (type)(expect), val); \
56 } \
57 } while (0)
58
59
60#if !defined(PIC) || !defined(RT_ARCH_X86)
61const char *getCacheAss(unsigned u)
62{
63 if (u == 0)
64 return "res0 ";
65 if (u == 1)
66 return "direct";
67 if (u >= 256)
68 return "???";
69
70 char *pszRet;
71 RTStrAPrintf(&pszRet, "%d way", u); /* intentional leak! */
72 return pszRet;
73}
74
75
76const char *getL2CacheAss(unsigned u)
77{
78 switch (u)
79 {
80 case 0: return "off ";
81 case 1: return "direct";
82 case 2: return "2 way ";
83 case 3: return "res3 ";
84 case 4: return "4 way ";
85 case 5: return "res5 ";
86 case 6: return "8 way ";
87 case 7: return "res7 ";
88 case 8: return "16 way";
89 case 9: return "res9 ";
90 case 10: return "res10 ";
91 case 11: return "res11 ";
92 case 12: return "res12 ";
93 case 13: return "res13 ";
94 case 14: return "res14 ";
95 case 15: return "fully ";
96 default:
97 return "????";
98 }
99}
100
101
102/**
103 * Test and dump all possible info from the CPUID instruction.
104 *
105 * @remark Bits shared with the libc cpuid.c program. This all written by me, so no worries.
106 * @todo transform the dumping into a generic runtime function. We'll need it for logging!
107 */
108void tstASMCpuId(void)
109{
110 unsigned iBit;
111 struct
112 {
113 uint32_t uEBX, uEAX, uEDX, uECX;
114 } s;
115 if (!ASMHasCpuId())
116 {
117 RTPrintf("tstInlineAsm: warning! CPU doesn't support CPUID\n");
118 return;
119 }
120
121 /*
122 * Try the 0 function and use that for checking the ASMCpuId_* variants.
123 */
124 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
125
126 uint32_t u32 = ASMCpuId_ECX(0);
127 CHECKVAL(u32, s.uECX, "%x");
128
129 u32 = ASMCpuId_EDX(0);
130 CHECKVAL(u32, s.uEDX, "%x");
131
132 uint32_t uECX2 = s.uECX - 1;
133 uint32_t uEDX2 = s.uEDX - 1;
134 ASMCpuId_ECX_EDX(0, &uECX2, &uEDX2);
135
136 CHECKVAL(uECX2, s.uECX, "%x");
137 CHECKVAL(uEDX2, s.uEDX, "%x");
138
139 /*
140 * Done testing, dump the information.
141 */
142 RTPrintf("tstInlineAsm: CPUID Dump\n");
143 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
144 const uint32_t cFunctions = s.uEAX;
145
146 /* raw dump */
147 RTPrintf("\n"
148 " RAW Standard CPUIDs\n"
149 "Function eax ebx ecx edx\n");
150 for (unsigned iStd = 0; iStd <= cFunctions + 3; iStd++)
151 {
152 ASMCpuId(iStd, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
153 RTPrintf("%08x %08x %08x %08x %08x%s\n",
154 iStd, s.uEAX, s.uEBX, s.uECX, s.uEDX, iStd <= cFunctions ? "" : "*");
155 }
156
157 /*
158 * Understandable output
159 */
160 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
161 RTPrintf("Name: %.04s%.04s%.04s\n"
162 "Support: 0-%u\n",
163 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX);
164
165 /*
166 * Get Features.
167 */
168 if (cFunctions >= 1)
169 {
170 ASMCpuId(1, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
171 RTPrintf("Family: %d \tExtended: %d \tEffectiv: %d\n"
172 "Model: %d \tExtended: %d \tEffectiv: %d\n"
173 "Stepping: %d\n"
174 "APIC ID: %#04x\n"
175 "Logical CPUs: %d\n"
176 "CLFLUSH Size: %d\n"
177 "Brand ID: %#04x\n",
178 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ((s.uEAX >> 8) & 0xf) + (((s.uEAX >> 8) & 0xf) == 0xf ? (s.uEAX >> 20) & 0x7f : 0),
179 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ((s.uEAX >> 4) & 0xf) | (((s.uEAX >> 4) & 0xf) == 0xf ? (s.uEAX >> 16) & 0x0f : 0),
180 (s.uEAX >> 0) & 0xf,
181 (s.uEBX >> 24) & 0xff,
182 (s.uEBX >> 16) & 0xff,
183 (s.uEBX >> 8) & 0xff,
184 (s.uEBX >> 0) & 0xff);
185
186 RTPrintf("Features EDX: ");
187 if (s.uEDX & BIT(0)) RTPrintf(" FPU");
188 if (s.uEDX & BIT(1)) RTPrintf(" VME");
189 if (s.uEDX & BIT(2)) RTPrintf(" DE");
190 if (s.uEDX & BIT(3)) RTPrintf(" PSE");
191 if (s.uEDX & BIT(4)) RTPrintf(" TSC");
192 if (s.uEDX & BIT(5)) RTPrintf(" MSR");
193 if (s.uEDX & BIT(6)) RTPrintf(" PAE");
194 if (s.uEDX & BIT(7)) RTPrintf(" MCE");
195 if (s.uEDX & BIT(8)) RTPrintf(" CX8");
196 if (s.uEDX & BIT(9)) RTPrintf(" APIC");
197 if (s.uEDX & BIT(10)) RTPrintf(" 10");
198 if (s.uEDX & BIT(11)) RTPrintf(" SEP");
199 if (s.uEDX & BIT(12)) RTPrintf(" MTRR");
200 if (s.uEDX & BIT(13)) RTPrintf(" PGE");
201 if (s.uEDX & BIT(14)) RTPrintf(" MCA");
202 if (s.uEDX & BIT(15)) RTPrintf(" CMOV");
203 if (s.uEDX & BIT(16)) RTPrintf(" PAT");
204 if (s.uEDX & BIT(17)) RTPrintf(" PSE36");
205 if (s.uEDX & BIT(18)) RTPrintf(" PSN");
206 if (s.uEDX & BIT(19)) RTPrintf(" CLFSH");
207 if (s.uEDX & BIT(20)) RTPrintf(" 20");
208 if (s.uEDX & BIT(21)) RTPrintf(" DS");
209 if (s.uEDX & BIT(22)) RTPrintf(" ACPI");
210 if (s.uEDX & BIT(23)) RTPrintf(" MMX");
211 if (s.uEDX & BIT(24)) RTPrintf(" FXSR");
212 if (s.uEDX & BIT(25)) RTPrintf(" SSE");
213 if (s.uEDX & BIT(26)) RTPrintf(" SSE2");
214 if (s.uEDX & BIT(27)) RTPrintf(" SS");
215 if (s.uEDX & BIT(28)) RTPrintf(" HTT");
216 if (s.uEDX & BIT(29)) RTPrintf(" 29");
217 if (s.uEDX & BIT(30)) RTPrintf(" 30");
218 if (s.uEDX & BIT(31)) RTPrintf(" 31");
219 RTPrintf("\n");
220
221 /** @todo check intel docs. */
222 RTPrintf("Features ECX: ");
223 if (s.uECX & BIT(0)) RTPrintf(" SSE3");
224 for (iBit = 1; iBit < 13; iBit++)
225 if (s.uECX & BIT(iBit))
226 RTPrintf(" %d", iBit);
227 if (s.uECX & BIT(13)) RTPrintf(" CX16");
228 for (iBit = 14; iBit < 32; iBit++)
229 if (s.uECX & BIT(iBit))
230 RTPrintf(" %d", iBit);
231 RTPrintf("\n");
232 }
233
234 /*
235 * Extended.
236 * Implemented after AMD specs.
237 */
238 /** @todo check out the intel specs. */
239 ASMCpuId(0x80000000, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
240 if (!s.uEAX && !s.uEBX && !s.uECX && !s.uEDX)
241 {
242 RTPrintf("No extended CPUID info? Check the manual on how to detect this...\n");
243 return;
244 }
245 const uint32_t cExtFunctions = s.uEAX | 0x80000000;
246
247 /* raw dump */
248 RTPrintf("\n"
249 " RAW Extended CPUIDs\n"
250 "Function eax ebx ecx edx\n");
251 for (unsigned iExt = 0x80000000; iExt <= cExtFunctions + 3; iExt++)
252 {
253 ASMCpuId(iExt, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
254 RTPrintf("%08x %08x %08x %08x %08x%s\n",
255 iExt, s.uEAX, s.uEBX, s.uECX, s.uEDX, iExt <= cExtFunctions ? "" : "*");
256 }
257
258 /*
259 * Understandable output
260 */
261 ASMCpuId(0x80000000, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
262 RTPrintf("Ext Name: %.4s%.4s%.4s\n"
263 "Ext Supports: 0x80000000-%#010x\n",
264 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX);
265
266 if (cExtFunctions >= 0x80000001)
267 {
268 ASMCpuId(0x80000001, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
269 RTPrintf("Family: %d \tExtended: %d \tEffectiv: %d\n"
270 "Model: %d \tExtended: %d \tEffectiv: %d\n"
271 "Stepping: %d\n"
272 "Brand ID: %#05x\n",
273 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ((s.uEAX >> 8) & 0xf) + (((s.uEAX >> 8) & 0xf) == 0xf ? (s.uEAX >> 20) & 0x7f : 0),
274 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ((s.uEAX >> 4) & 0xf) | (((s.uEAX >> 4) & 0xf) == 0xf ? (s.uEAX >> 16) & 0x0f : 0),
275 (s.uEAX >> 0) & 0xf,
276 s.uEBX & 0xfff);
277
278 RTPrintf("Features EDX: ");
279 if (s.uEDX & BIT(0)) RTPrintf(" FPU");
280 if (s.uEDX & BIT(1)) RTPrintf(" VME");
281 if (s.uEDX & BIT(2)) RTPrintf(" DE");
282 if (s.uEDX & BIT(3)) RTPrintf(" PSE");
283 if (s.uEDX & BIT(4)) RTPrintf(" TSC");
284 if (s.uEDX & BIT(5)) RTPrintf(" MSR");
285 if (s.uEDX & BIT(6)) RTPrintf(" PAE");
286 if (s.uEDX & BIT(7)) RTPrintf(" MCE");
287 if (s.uEDX & BIT(8)) RTPrintf(" CMPXCHG8B");
288 if (s.uEDX & BIT(9)) RTPrintf(" APIC");
289 if (s.uEDX & BIT(10)) RTPrintf(" 10");
290 if (s.uEDX & BIT(11)) RTPrintf(" SysCallSysRet");
291 if (s.uEDX & BIT(12)) RTPrintf(" MTRR");
292 if (s.uEDX & BIT(13)) RTPrintf(" PGE");
293 if (s.uEDX & BIT(14)) RTPrintf(" MCA");
294 if (s.uEDX & BIT(15)) RTPrintf(" CMOV");
295 if (s.uEDX & BIT(16)) RTPrintf(" PAT");
296 if (s.uEDX & BIT(17)) RTPrintf(" PSE36");
297 if (s.uEDX & BIT(18)) RTPrintf(" 18");
298 if (s.uEDX & BIT(19)) RTPrintf(" 19");
299 if (s.uEDX & BIT(20)) RTPrintf(" NX");
300 if (s.uEDX & BIT(21)) RTPrintf(" 21");
301 if (s.uEDX & BIT(22)) RTPrintf(" MmxExt");
302 if (s.uEDX & BIT(23)) RTPrintf(" MMX");
303 if (s.uEDX & BIT(24)) RTPrintf(" FXSR");
304 if (s.uEDX & BIT(25)) RTPrintf(" FastFXSR");
305 if (s.uEDX & BIT(26)) RTPrintf(" 26");
306 if (s.uEDX & BIT(27)) RTPrintf(" RDTSCP");
307 if (s.uEDX & BIT(28)) RTPrintf(" 28");
308 if (s.uEDX & BIT(29)) RTPrintf(" LongMode");
309 if (s.uEDX & BIT(30)) RTPrintf(" 3DNowExt");
310 if (s.uEDX & BIT(31)) RTPrintf(" 3DNow");
311 RTPrintf("\n");
312
313 RTPrintf("Features ECX: ");
314 if (s.uECX & BIT(0)) RTPrintf(" LahfSahf");
315 if (s.uECX & BIT(1)) RTPrintf(" CmpLegacy");
316 if (s.uECX & BIT(2)) RTPrintf(" SVM");
317 if (s.uECX & BIT(3)) RTPrintf(" 3");
318 if (s.uECX & BIT(4)) RTPrintf(" AltMovCr8");
319 for (iBit = 5; iBit < 32; iBit++)
320 if (s.uECX & BIT(iBit))
321 RTPrintf(" %d", iBit);
322 RTPrintf("\n");
323 }
324
325 char szString[4*4*3+1] = {0};
326 if (cExtFunctions >= 0x80000002)
327 ASMCpuId(0x80000002, &szString[0 + 0], &szString[0 + 4], &szString[0 + 8], &szString[0 + 12]);
328 if (cExtFunctions >= 0x80000003)
329 ASMCpuId(0x80000003, &szString[16 + 0], &szString[16 + 4], &szString[16 + 8], &szString[16 + 12]);
330 if (cExtFunctions >= 0x80000004)
331 ASMCpuId(0x80000004, &szString[32 + 0], &szString[32 + 4], &szString[32 + 8], &szString[32 + 12]);
332 if (cExtFunctions >= 0x80000002)
333 RTPrintf("Full Name: %s\n", szString);
334
335 if (cExtFunctions >= 0x80000005)
336 {
337 ASMCpuId(0x80000005, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
338 RTPrintf("TLB 2/4M Instr/Uni: %s %3d entries\n"
339 "TLB 2/4M Data: %s %3d entries\n",
340 getCacheAss((s.uEAX >> 8) & 0xff), (s.uEAX >> 0) & 0xff,
341 getCacheAss((s.uEAX >> 24) & 0xff), (s.uEAX >> 16) & 0xff);
342 RTPrintf("TLB 4K Instr/Uni: %s %3d entries\n"
343 "TLB 4K Data: %s %3d entries\n",
344 getCacheAss((s.uEBX >> 8) & 0xff), (s.uEBX >> 0) & 0xff,
345 getCacheAss((s.uEBX >> 24) & 0xff), (s.uEBX >> 16) & 0xff);
346 RTPrintf("L1 Instr Cache Line Size: %d bytes\n"
347 "L1 Instr Cache Lines Per Tag: %d\n"
348 "L1 Instr Cache Associativity: %s\n"
349 "L1 Instr Cache Size: %d KB\n",
350 (s.uEDX >> 0) & 0xff,
351 (s.uEDX >> 8) & 0xff,
352 getCacheAss((s.uEDX >> 16) & 0xff),
353 (s.uEDX >> 24) & 0xff);
354 RTPrintf("L1 Data Cache Line Size: %d bytes\n"
355 "L1 Data Cache Lines Per Tag: %d\n"
356 "L1 Data Cache Associativity: %s\n"
357 "L1 Data Cache Size: %d KB\n",
358 (s.uECX >> 0) & 0xff,
359 (s.uECX >> 8) & 0xff,
360 getCacheAss((s.uECX >> 16) & 0xff),
361 (s.uECX >> 24) & 0xff);
362 }
363
364 if (cExtFunctions >= 0x80000006)
365 {
366 ASMCpuId(0x80000006, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
367 RTPrintf("L2 TLB 2/4M Instr/Uni: %s %4d entries\n"
368 "L2 TLB 2/4M Data: %s %4d entries\n",
369 getL2CacheAss((s.uEAX >> 12) & 0xf), (s.uEAX >> 0) & 0xfff,
370 getL2CacheAss((s.uEAX >> 28) & 0xf), (s.uEAX >> 16) & 0xfff);
371 RTPrintf("L2 TLB 4K Instr/Uni: %s %4d entries\n"
372 "L2 TLB 4K Data: %s %4d entries\n",
373 getL2CacheAss((s.uEBX >> 12) & 0xf), (s.uEBX >> 0) & 0xfff,
374 getL2CacheAss((s.uEBX >> 28) & 0xf), (s.uEBX >> 16) & 0xfff);
375 RTPrintf("L2 Cache Line Size: %d bytes\n"
376 "L2 Cache Lines Per Tag: %d\n"
377 "L2 Cache Associativity: %s\n"
378 "L2 Cache Size: %d KB\n",
379 (s.uEDX >> 0) & 0xff,
380 (s.uEDX >> 8) & 0xf,
381 getL2CacheAss((s.uEDX >> 12) & 0xf),
382 (s.uEDX >> 16) & 0xffff);
383 }
384
385 if (cExtFunctions >= 0x80000007)
386 {
387 ASMCpuId(0x80000007, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
388 RTPrintf("APM Features: ");
389 if (s.uEDX & BIT(0)) RTPrintf(" TS");
390 if (s.uEDX & BIT(1)) RTPrintf(" FID");
391 if (s.uEDX & BIT(2)) RTPrintf(" VID");
392 if (s.uEDX & BIT(3)) RTPrintf(" TTP");
393 if (s.uEDX & BIT(4)) RTPrintf(" TM");
394 if (s.uEDX & BIT(5)) RTPrintf(" STC");
395 if (s.uEDX & BIT(6)) RTPrintf(" 6");
396 if (s.uEDX & BIT(7)) RTPrintf(" 7");
397 if (s.uEDX & BIT(8)) RTPrintf(" TscInvariant");
398 for (iBit = 9; iBit < 32; iBit++)
399 if (s.uEDX & BIT(iBit))
400 RTPrintf(" %d", iBit);
401 RTPrintf("\n");
402 }
403
404 if (cExtFunctions >= 0x80000008)
405 {
406 ASMCpuId(0x80000008, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
407 RTPrintf("Physical Address Width: %d bits\n"
408 "Virtual Address Width: %d bits\n",
409 (s.uEAX >> 0) & 0xff,
410 (s.uEAX >> 8) & 0xff);
411 RTPrintf("Physical Core Count: %d\n",
412 ((s.uECX >> 0) & 0xff) + 1);
413 if ((s.uECX >> 12) & 0xf)
414 RTPrintf("ApicIdCoreIdSize: %d bits\n", (s.uECX >> 12) & 0xf);
415 }
416
417 if (cExtFunctions >= 0x8000000a)
418 {
419 ASMCpuId(0x8000000a, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
420 RTPrintf("SVM Revision: %d (%#x)\n"
421 "Number of Address Space IDs: %d (%#x)\n",
422 s.uEAX & 0xff, s.uEAX & 0xff,
423 s.uEBX, s.uEBX);
424 }
425}
426#endif /* !PIC || !X86 */
427
428
429static void tstASMAtomicXchgU8(void)
430{
431 struct
432 {
433 uint8_t u8Dummy0;
434 uint8_t u8;
435 uint8_t u8Dummy1;
436 } s;
437
438 s.u8 = 0;
439 s.u8Dummy0 = s.u8Dummy1 = 0x42;
440 CHECKOP(ASMAtomicXchgU8(&s.u8, 1), 0, "%#x", uint8_t);
441 CHECKVAL(s.u8, 1, "%#x");
442
443 CHECKOP(ASMAtomicXchgU8(&s.u8, 0), 1, "%#x", uint8_t);
444 CHECKVAL(s.u8, 0, "%#x");
445
446 CHECKOP(ASMAtomicXchgU8(&s.u8, 0xff), 0, "%#x", uint8_t);
447 CHECKVAL(s.u8, 0xff, "%#x");
448
449 CHECKOP(ASMAtomicXchgU8(&s.u8, 0x87), 0xffff, "%#x", uint8_t);
450 CHECKVAL(s.u8, 0x87, "%#x");
451 CHECKVAL(s.u8Dummy0, 0x42, "%#x");
452 CHECKVAL(s.u8Dummy1, 0x42, "%#x");
453}
454
455
456static void tstASMAtomicXchgU16(void)
457{
458 struct
459 {
460 uint16_t u16Dummy0;
461 uint16_t u16;
462 uint16_t u16Dummy1;
463 } s;
464
465 s.u16 = 0;
466 s.u16Dummy0 = s.u16Dummy1 = 0x1234;
467 CHECKOP(ASMAtomicXchgU16(&s.u16, 1), 0, "%#x", uint16_t);
468 CHECKVAL(s.u16, 1, "%#x");
469
470 CHECKOP(ASMAtomicXchgU16(&s.u16, 0), 1, "%#x", uint16_t);
471 CHECKVAL(s.u16, 0, "%#x");
472
473 CHECKOP(ASMAtomicXchgU16(&s.u16, 0xffff), 0, "%#x", uint16_t);
474 CHECKVAL(s.u16, 0xffff, "%#x");
475
476 CHECKOP(ASMAtomicXchgU16(&s.u16, 0x8765), 0xffff, "%#x", uint16_t);
477 CHECKVAL(s.u16, 0x8765, "%#x");
478 CHECKVAL(s.u16Dummy0, 0x1234, "%#x");
479 CHECKVAL(s.u16Dummy1, 0x1234, "%#x");
480}
481
482
483static void tstASMAtomicXchgU32(void)
484{
485 struct
486 {
487 uint32_t u32Dummy0;
488 uint32_t u32;
489 uint32_t u32Dummy1;
490 } s;
491
492 s.u32 = 0;
493 s.u32Dummy0 = s.u32Dummy1 = 0x11223344;
494
495 CHECKOP(ASMAtomicXchgU32(&s.u32, 1), 0, "%#x", uint32_t);
496 CHECKVAL(s.u32, 1, "%#x");
497
498 CHECKOP(ASMAtomicXchgU32(&s.u32, 0), 1, "%#x", uint32_t);
499 CHECKVAL(s.u32, 0, "%#x");
500
501 CHECKOP(ASMAtomicXchgU32(&s.u32, ~0U), 0, "%#x", uint32_t);
502 CHECKVAL(s.u32, ~0U, "%#x");
503
504 CHECKOP(ASMAtomicXchgU32(&s.u32, 0x87654321), ~0U, "%#x", uint32_t);
505 CHECKVAL(s.u32, 0x87654321, "%#x");
506
507 CHECKVAL(s.u32Dummy0, 0x11223344, "%#x");
508 CHECKVAL(s.u32Dummy1, 0x11223344, "%#x");
509}
510
511
512static void tstASMAtomicXchgU64(void)
513{
514 struct
515 {
516 uint64_t u64Dummy0;
517 uint64_t u64;
518 uint64_t u64Dummy1;
519 } s;
520
521 s.u64 = 0;
522 s.u64Dummy0 = s.u64Dummy1 = 0x1122334455667788ULL;
523
524 CHECKOP(ASMAtomicXchgU64(&s.u64, 1), 0ULL, "%#llx", uint64_t);
525 CHECKVAL(s.u64, 1ULL, "%#llx");
526
527 CHECKOP(ASMAtomicXchgU64(&s.u64, 0), 1ULL, "%#llx", uint64_t);
528 CHECKVAL(s.u64, 0ULL, "%#llx");
529
530 CHECKOP(ASMAtomicXchgU64(&s.u64, ~0ULL), 0ULL, "%#llx", uint64_t);
531 CHECKVAL(s.u64, ~0ULL, "%#llx");
532
533 CHECKOP(ASMAtomicXchgU64(&s.u64, 0xfedcba0987654321ULL), ~0ULL, "%#llx", uint64_t);
534 CHECKVAL(s.u64, 0xfedcba0987654321ULL, "%#llx");
535
536 CHECKVAL(s.u64Dummy0, 0x1122334455667788ULL, "%#x");
537 CHECKVAL(s.u64Dummy1, 0x1122334455667788ULL, "%#x");
538}
539
540
541#ifdef RT_ARCH_AMD64
542static void tstASMAtomicXchgU128(void)
543{
544 struct
545 {
546 RTUINT128U u128Dummy0;
547 RTUINT128U u128;
548 RTUINT128U u128Dummy1;
549 } s;
550 RTUINT128U u128Ret;
551 RTUINT128U u128Arg;
552
553
554 s.u128Dummy0.s.Lo = s.u128Dummy0.s.Hi = 0x1122334455667788;
555 s.u128.s.Lo = 0;
556 s.u128.s.Hi = 0;
557 s.u128Dummy1 = s.u128Dummy0;
558
559 u128Arg.s.Lo = 1;
560 u128Arg.s.Hi = 0;
561 u128Ret.u = ASMAtomicXchgU128(&s.u128.u, u128Arg.u);
562 CHECKVAL(u128Ret.s.Lo, 0ULL, "%#llx");
563 CHECKVAL(u128Ret.s.Hi, 0ULL, "%#llx");
564 CHECKVAL(s.u128.s.Lo, 1ULL, "%#llx");
565 CHECKVAL(s.u128.s.Hi, 0ULL, "%#llx");
566
567 u128Arg.s.Lo = 0;
568 u128Arg.s.Hi = 0;
569 u128Ret.u = ASMAtomicXchgU128(&s.u128.u, u128Arg.u);
570 CHECKVAL(u128Ret.s.Lo, 1ULL, "%#llx");
571 CHECKVAL(u128Ret.s.Hi, 0ULL, "%#llx");
572 CHECKVAL(s.u128.s.Lo, 0ULL, "%#llx");
573 CHECKVAL(s.u128.s.Hi, 0ULL, "%#llx");
574
575 u128Arg.s.Lo = ~0ULL;
576 u128Arg.s.Hi = ~0ULL;
577 u128Ret.u = ASMAtomicXchgU128(&s.u128.u, u128Arg.u);
578 CHECKVAL(u128Ret.s.Lo, 0ULL, "%#llx");
579 CHECKVAL(u128Ret.s.Hi, 0ULL, "%#llx");
580 CHECKVAL(s.u128.s.Lo, ~0ULL, "%#llx");
581 CHECKVAL(s.u128.s.Hi, ~0ULL, "%#llx");
582
583
584 u128Arg.s.Lo = 0xfedcba0987654321ULL;
585 u128Arg.s.Hi = 0x8897a6b5c4d3e2f1ULL;
586 u128Ret.u = ASMAtomicXchgU128(&s.u128.u, u128Arg.u);
587 CHECKVAL(u128Ret.s.Lo, ~0ULL, "%#llx");
588 CHECKVAL(u128Ret.s.Hi, ~0ULL, "%#llx");
589 CHECKVAL(s.u128.s.Lo, 0xfedcba0987654321ULL, "%#llx");
590 CHECKVAL(s.u128.s.Hi, 0x8897a6b5c4d3e2f1ULL, "%#llx");
591
592 CHECKVAL(s.u128Dummy0.s.Lo, 0x1122334455667788, "%#llx");
593 CHECKVAL(s.u128Dummy0.s.Hi, 0x1122334455667788, "%#llx");
594 CHECKVAL(s.u128Dummy1.s.Lo, 0x1122334455667788, "%#llx");
595 CHECKVAL(s.u128Dummy1.s.Hi, 0x1122334455667788, "%#llx");
596}
597#endif
598
599
600static void tstASMAtomicXchgPtr(void)
601{
602 void *pv = NULL;
603
604 CHECKOP(ASMAtomicXchgPtr(&pv, (void *)(~(uintptr_t)0)), NULL, "%p", void *);
605 CHECKVAL(pv, (void *)(~(uintptr_t)0), "%p");
606
607 CHECKOP(ASMAtomicXchgPtr(&pv, (void *)0x87654321), (void *)(~(uintptr_t)0), "%p", void *);
608 CHECKVAL(pv, (void *)0x87654321, "%p");
609
610 CHECKOP(ASMAtomicXchgPtr(&pv, NULL), (void *)0x87654321, "%p", void *);
611 CHECKVAL(pv, NULL, "%p");
612}
613
614
615static void tstASMAtomicCmpXchgU32(void)
616{
617 uint32_t u32 = 0xffffffff;
618
619 CHECKOP(ASMAtomicCmpXchgU32(&u32, 0, 0), false, "%d", bool);
620 CHECKVAL(u32, 0xffffffff, "%x");
621
622 CHECKOP(ASMAtomicCmpXchgU32(&u32, 0, 0xffffffff), true, "%d", bool);
623 CHECKVAL(u32, 0, "%x");
624
625 CHECKOP(ASMAtomicCmpXchgU32(&u32, 0x8008efd, 0xffffffff), false, "%d", bool);
626 CHECKVAL(u32, 0, "%x");
627
628 CHECKOP(ASMAtomicCmpXchgU32(&u32, 0x8008efd, 0), true, "%d", bool);
629 CHECKVAL(u32, 0x8008efd, "%x");
630}
631
632
633static void tstASMAtomicCmpXchgU64(void)
634{
635 uint64_t u64 = 0xffffffffffffffULL;
636
637 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0, 0), false, "%d", bool);
638 CHECKVAL(u64, 0xffffffffffffffULL, "%x");
639
640 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0, 0xffffffffffffffULL), true, "%d", bool);
641 CHECKVAL(u64, 0, "%x");
642
643 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0x80040008008efdULL, 0xffffffff), false, "%d", bool);
644 CHECKVAL(u64, 0, "%x");
645
646 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0x80040008008efdULL, 0xffffffff00000000ULL), false, "%d", bool);
647 CHECKVAL(u64, 0, "%x");
648
649 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0x80040008008efdULL, 0), true, "%d", bool);
650 CHECKVAL(u64, 0x80040008008efdULL, "%x");
651}
652
653
654static void tstASMAtomicReadU64(void)
655{
656 uint64_t u64 = 0;
657
658 CHECKOP(ASMAtomicReadU64(&u64), 0ULL, "%#llx", uint64_t);
659 CHECKVAL(u64, 0ULL, "%#llx");
660
661 u64 = ~0ULL;
662 CHECKOP(ASMAtomicReadU64(&u64), ~0ULL, "%#llx", uint64_t);
663 CHECKVAL(u64, ~0ULL, "%#llx");
664
665 u64 = 0xfedcba0987654321ULL;
666 CHECKOP(ASMAtomicReadU64(&u64), 0xfedcba0987654321ULL, "%#llx", uint64_t);
667 CHECKVAL(u64, 0xfedcba0987654321ULL, "%#llx");
668}
669
670
671static void tstASMAtomicDecIncS32(void)
672{
673 int32_t i32Rc;
674 int32_t i32 = 10;
675#define MYCHECK(op, rc) \
676 do { \
677 i32Rc = op; \
678 if (i32Rc != (rc)) \
679 { \
680 RTPrintf("%s, %d: FAILURE: %s -> %d expected %d\n", __FUNCTION__, __LINE__, #op, i32Rc, rc); \
681 g_cErrors++; \
682 } \
683 if (i32 != (rc)) \
684 { \
685 RTPrintf("%s, %d: FAILURE: %s => i32=%d expected %d\n", __FUNCTION__, __LINE__, #op, i32, rc); \
686 g_cErrors++; \
687 } \
688 } while (0)
689 MYCHECK(ASMAtomicDecS32(&i32), 9);
690 MYCHECK(ASMAtomicDecS32(&i32), 8);
691 MYCHECK(ASMAtomicDecS32(&i32), 7);
692 MYCHECK(ASMAtomicDecS32(&i32), 6);
693 MYCHECK(ASMAtomicDecS32(&i32), 5);
694 MYCHECK(ASMAtomicDecS32(&i32), 4);
695 MYCHECK(ASMAtomicDecS32(&i32), 3);
696 MYCHECK(ASMAtomicDecS32(&i32), 2);
697 MYCHECK(ASMAtomicDecS32(&i32), 1);
698 MYCHECK(ASMAtomicDecS32(&i32), 0);
699 MYCHECK(ASMAtomicDecS32(&i32), -1);
700 MYCHECK(ASMAtomicDecS32(&i32), -2);
701 MYCHECK(ASMAtomicIncS32(&i32), -1);
702 MYCHECK(ASMAtomicIncS32(&i32), 0);
703 MYCHECK(ASMAtomicIncS32(&i32), 1);
704 MYCHECK(ASMAtomicIncS32(&i32), 2);
705 MYCHECK(ASMAtomicIncS32(&i32), 3);
706 MYCHECK(ASMAtomicDecS32(&i32), 2);
707 MYCHECK(ASMAtomicIncS32(&i32), 3);
708 MYCHECK(ASMAtomicDecS32(&i32), 2);
709 MYCHECK(ASMAtomicIncS32(&i32), 3);
710#undef MYCHECK
711
712}
713
714
715static void tstASMAtomicAndOrU32(void)
716{
717 uint32_t u32 = 0xffffffff;
718
719 ASMAtomicOrU32(&u32, 0xffffffff);
720 CHECKVAL(u32, 0xffffffff, "%x");
721
722 ASMAtomicAndU32(&u32, 0xffffffff);
723 CHECKVAL(u32, 0xffffffff, "%x");
724
725 ASMAtomicAndU32(&u32, 0x8f8f8f8f);
726 CHECKVAL(u32, 0x8f8f8f8f, "%x");
727
728 ASMAtomicOrU32(&u32, 0x70707070);
729 CHECKVAL(u32, 0xffffffff, "%x");
730
731 ASMAtomicAndU32(&u32, 1);
732 CHECKVAL(u32, 1, "%x");
733
734 ASMAtomicOrU32(&u32, 0x80000000);
735 CHECKVAL(u32, 0x80000001, "%x");
736
737 ASMAtomicAndU32(&u32, 0x80000000);
738 CHECKVAL(u32, 0x80000000, "%x");
739
740 ASMAtomicAndU32(&u32, 0);
741 CHECKVAL(u32, 0, "%x");
742
743 ASMAtomicOrU32(&u32, 0x42424242);
744 CHECKVAL(u32, 0x42424242, "%x");
745}
746
747
748void tstASMMemZeroPage(void)
749{
750 struct
751 {
752 uint64_t u64Magic1;
753 uint8_t abPage[PAGE_SIZE];
754 uint64_t u64Magic2;
755 } Buf1, Buf2, Buf3;
756
757 Buf1.u64Magic1 = UINT64_C(0xffffffffffffffff);
758 memset(Buf1.abPage, 0x55, sizeof(Buf1.abPage));
759 Buf1.u64Magic2 = UINT64_C(0xffffffffffffffff);
760 Buf2.u64Magic1 = UINT64_C(0xffffffffffffffff);
761 memset(Buf2.abPage, 0x77, sizeof(Buf2.abPage));
762 Buf2.u64Magic2 = UINT64_C(0xffffffffffffffff);
763 Buf3.u64Magic1 = UINT64_C(0xffffffffffffffff);
764 memset(Buf3.abPage, 0x99, sizeof(Buf3.abPage));
765 Buf3.u64Magic2 = UINT64_C(0xffffffffffffffff);
766 ASMMemZeroPage(Buf1.abPage);
767 ASMMemZeroPage(Buf2.abPage);
768 ASMMemZeroPage(Buf3.abPage);
769 if ( Buf1.u64Magic1 != UINT64_C(0xffffffffffffffff)
770 || Buf1.u64Magic2 != UINT64_C(0xffffffffffffffff)
771 || Buf1.u64Magic1 != UINT64_C(0xffffffffffffffff)
772 || Buf1.u64Magic2 != UINT64_C(0xffffffffffffffff)
773 || Buf2.u64Magic1 != UINT64_C(0xffffffffffffffff)
774 || Buf2.u64Magic2 != UINT64_C(0xffffffffffffffff))
775 {
776 RTPrintf("tstInlineAsm: ASMMemZeroPage violated one/both magic(s)!\n");
777 g_cErrors++;
778 }
779 for (unsigned i = 0; i < sizeof(Buf1.abPage); i++)
780 if (Buf1.abPage[i])
781 {
782 RTPrintf("tstInlineAsm: ASMMemZeroPage didn't clear byte at offset %#x!\n", i);
783 g_cErrors++;
784 }
785 for (unsigned i = 0; i < sizeof(Buf1.abPage); i++)
786 if (Buf1.abPage[i])
787 {
788 RTPrintf("tstInlineAsm: ASMMemZeroPage didn't clear byte at offset %#x!\n", i);
789 g_cErrors++;
790 }
791 for (unsigned i = 0; i < sizeof(Buf2.abPage); i++)
792 if (Buf2.abPage[i])
793 {
794 RTPrintf("tstInlineAsm: ASMMemZeroPage didn't clear byte at offset %#x!\n", i);
795 g_cErrors++;
796 }
797}
798
799
800void tstASMMath(void)
801{
802 uint64_t u64 = ASMMult2xU32RetU64(UINT32_C(0x80000000), UINT32_C(0x10000000));
803 CHECKVAL(u64, UINT64_C(0x0800000000000000), "%#018RX64");
804
805 uint32_t u32 = ASMDivU64ByU32RetU32(UINT64_C(0x0800000000000000), UINT32_C(0x10000000));
806 CHECKVAL(u32, UINT32_C(0x80000000), "%#010RX32");
807
808 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x0000000000000001), UINT32_C(0x00000001), UINT32_C(0x00000001));
809 CHECKVAL(u64, UINT64_C(0x0000000000000001), "%#018RX64");
810 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x0000000100000000), UINT32_C(0x80000000), UINT32_C(0x00000002));
811 CHECKVAL(u64, UINT64_C(0x4000000000000000), "%#018RX64");
812 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xfedcba9876543210), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
813 CHECKVAL(u64, UINT64_C(0xfedcba9876543210), "%#018RX64");
814 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xffffffffffffffff), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
815 CHECKVAL(u64, UINT64_C(0xffffffffffffffff), "%#018RX64");
816 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xffffffffffffffff), UINT32_C(0xfffffff0), UINT32_C(0xffffffff));
817 CHECKVAL(u64, UINT64_C(0xfffffff0fffffff0), "%#018RX64");
818 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x3415934810359583), UINT32_C(0x58734981), UINT32_C(0xf8694045));
819 CHECKVAL(u64, UINT64_C(0x128b9c3d43184763), "%#018RX64");
820 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x3415934810359583), UINT32_C(0xf8694045), UINT32_C(0x58734981));
821 CHECKVAL(u64, UINT64_C(0x924719355cd35a27), "%#018RX64");
822
823#if 0 /* bird: question is whether this should trap or not:
824 *
825 * frank: Of course it must trap:
826 *
827 * 0xfffffff8 * 0x77d7daf8 = 0x77d7daf441412840
828 *
829 * During the following division, the quotient must fit into a 32-bit register.
830 * Therefore the smallest valid divisor is
831 *
832 * (0x77d7daf441412840 >> 32) + 1 = 0x77d7daf5
833 *
834 * which is definitely greater than 0x3b9aca00.
835 *
836 * bird: No, the C version does *not* crash. So, the question is whether there any
837 * code depending on it not crashing.
838 *
839 * Of course the assembly versions of the code crash right now for the reasons you've
840 * given, but the the 32-bit MSC version does not crash.
841 *
842 * frank: The C version does not crash but delivers incorrect results for this case.
843 * The reason is
844 *
845 * u.s.Hi = (unsigned long)(u64Hi / u32C);
846 *
847 * Here the division is actually 64-bit by 64-bit but the 64-bit result is truncated
848 * to 32 bit. If using this (optimized and fast) function we should just be sure that
849 * the operands are in a valid range.
850 */
851 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xfffffff8c65d6731), UINT32_C(0x77d7daf8), UINT32_C(0x3b9aca00));
852 CHECKVAL(u64, UINT64_C(0x02b8f9a2aa74e3dc), "%#018RX64");
853#endif
854}
855
856
857int main(int argc, char *argv[])
858{
859 RTR3Init();
860 RTPrintf("tstInlineAsm: TESTING\n");
861
862 /*
863 * Execute the tests.
864 */
865#if !defined(PIC) || !defined(RT_ARCH_X86)
866 tstASMCpuId();
867#endif
868 tstASMAtomicXchgU8();
869 tstASMAtomicXchgU16();
870 tstASMAtomicXchgU32();
871 tstASMAtomicXchgU64();
872#ifdef RT_ARCH_AMD64
873 tstASMAtomicXchgU128();
874#endif
875 tstASMAtomicXchgPtr();
876 tstASMAtomicCmpXchgU32();
877 tstASMAtomicCmpXchgU64();
878 tstASMAtomicReadU64();
879 tstASMAtomicDecIncS32();
880 tstASMAtomicAndOrU32();
881 tstASMMemZeroPage();
882 tstASMMath();
883
884 /*
885 * Show the result.
886 */
887 if (!g_cErrors)
888 RTPrintf("tstInlineAsm: SUCCESS\n", g_cErrors);
889 else
890 RTPrintf("tstInlineAsm: FAILURE - %d errors\n", g_cErrors);
891 return !!g_cErrors;
892}
893
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette