VirtualBox

source: vbox/trunk/src/VBox/Runtime/common/asm/asm-fake.cpp@ 76408

Last change on this file since 76408 was 71917, checked in by vboxsync, 7 years ago

Build fixes

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 11.6 KB
Line 
1/* $Id: asm-fake.cpp 71917 2018-04-19 10:09:31Z vboxsync $ */
2/** @file
3 * IPRT - Fake asm.h routines for use early in a new port.
4 */
5
6/*
7 * Copyright (C) 2010-2018 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 *
17 * The contents of this file may alternatively be used under the terms
18 * of the Common Development and Distribution License Version 1.0
19 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
20 * VirtualBox OSE distribution, in which case the provisions of the
21 * CDDL are applicable instead of those of the GPL.
22 *
23 * You may elect to license modified versions of this file under the
24 * terms and conditions of either the GPL or the CDDL or both.
25 */
26
27
28/*********************************************************************************************************************************
29* Header Files *
30*********************************************************************************************************************************/
31#include <iprt/asm.h>
32#include "internal/iprt.h"
33
34#include <iprt/string.h>
35#include <iprt/param.h>
36
37
38RTDECL(uint8_t) ASMAtomicXchgU8(volatile uint8_t *pu8, uint8_t u8)
39{
40 uint8_t u8Ret = *pu8;
41 *pu8 = u8;
42 return u8Ret;
43}
44
45RTDECL(uint16_t) ASMAtomicXchgU16(volatile uint16_t *pu16, uint16_t u16)
46{
47 uint16_t u16Ret = *pu16;
48 *pu16 = u16;
49 return u16Ret;
50}
51
52RTDECL(uint32_t) ASMAtomicXchgU32(volatile uint32_t *pu32, uint32_t u32)
53{
54 uint32_t u32Ret = *pu32;
55 *pu32 = u32;
56 return u32Ret;
57}
58
59RTDECL(uint64_t) ASMAtomicXchgU64(volatile uint64_t *pu64, uint64_t u64)
60{
61 uint64_t u64Ret = *pu64;
62 *pu64 = u64;
63 return u64Ret;
64}
65
66RTDECL(bool) ASMAtomicCmpXchgU8(volatile uint8_t *pu8, const uint8_t u8New, const uint8_t u8Old)
67{
68 if (*pu8 == u8Old)
69 {
70 *pu8 = u8New;
71 return true;
72 }
73 return false;
74}
75
76RTDECL(bool) ASMAtomicCmpXchgU32(volatile uint32_t *pu32, const uint32_t u32New, const uint32_t u32Old)
77{
78 if (*pu32 == u32Old)
79 {
80 *pu32 = u32New;
81 return true;
82 }
83 return false;
84}
85
86RTDECL(bool) ASMAtomicCmpXchgU64(volatile uint64_t *pu64, const uint64_t u64New, const uint64_t u64Old)
87{
88 if (*pu64 == u64Old)
89 {
90 *pu64 = u64New;
91 return true;
92 }
93 return false;
94}
95
96RTDECL(bool) ASMAtomicCmpXchgExU32(volatile uint32_t *pu32, const uint32_t u32New, const uint32_t u32Old, uint32_t *pu32Old)
97{
98 uint32_t u32Cur = *pu32;
99 if (u32Cur == u32Old)
100 {
101 *pu32 = u32New;
102 *pu32Old = u32Old;
103 return true;
104 }
105 *pu32Old = u32Cur;
106 return false;
107}
108
109RTDECL(bool) ASMAtomicCmpXchgExU64(volatile uint64_t *pu64, const uint64_t u64New, const uint64_t u64Old, uint64_t *pu64Old)
110{
111 uint64_t u64Cur = *pu64;
112 if (u64Cur == u64Old)
113 {
114 *pu64 = u64New;
115 *pu64Old = u64Old;
116 return true;
117 }
118 *pu64Old = u64Cur;
119 return false;
120}
121
122RTDECL(uint32_t) ASMAtomicAddU32(uint32_t volatile *pu32, uint32_t u32)
123{
124 uint32_t u32Old = *pu32;
125 *pu32 = u32Old + u32;
126 return u32Old;
127}
128
129RTDECL(uint64_t) ASMAtomicAddU64(uint64_t volatile *pu64, uint64_t u64)
130{
131 uint64_t u64Old = *pu64;
132 *pu64 = u64Old + u64;
133 return u64Old;
134}
135
136RTDECL(uint32_t) ASMAtomicIncU32(uint32_t volatile *pu32)
137{
138 return *pu32 += 1;
139}
140
141RTDECL(uint32_t) ASMAtomicUoIncU32(uint32_t volatile *pu32)
142{
143 return *pu32 += 1;
144}
145
146RTDECL(uint32_t) ASMAtomicDecU32(uint32_t volatile *pu32)
147{
148 return *pu32 -= 1;
149}
150
151RTDECL(uint32_t) ASMAtomicUoDecU32(uint32_t volatile *pu32)
152{
153 return *pu32 -= 1;
154}
155
156RTDECL(uint64_t) ASMAtomicIncU64(uint64_t volatile *pu64)
157{
158 return *pu64 += 1;
159}
160
161RTDECL(uint64_t) ASMAtomicDecU64(uint64_t volatile *pu64)
162{
163 return *pu64 -= 1;
164}
165
166RTDECL(void) ASMAtomicOrU32(uint32_t volatile *pu32, uint32_t u32)
167{
168 *pu32 |= u32;
169}
170
171RTDECL(void) ASMAtomicUoOrU32(uint32_t volatile *pu32, uint32_t u32)
172{
173 *pu32 |= u32;
174}
175
176RTDECL(void) ASMAtomicAndU32(uint32_t volatile *pu32, uint32_t u32)
177{
178 *pu32 &= u32;
179}
180
181RTDECL(void) ASMAtomicUoAndU32(uint32_t volatile *pu32, uint32_t u32)
182{
183 *pu32 &= u32;
184}
185
186RTDECL(void) ASMAtomicOrU64(uint64_t volatile *pu64, uint64_t u64)
187{
188 *pu64 |= u64;
189}
190
191RTDECL(void) ASMAtomicAndU64(uint64_t volatile *pu64, uint64_t u64)
192{
193 *pu64 &= u64;
194}
195
196RTDECL(void) ASMSerializeInstruction(void)
197{
198
199}
200
201RTDECL(uint64_t) ASMAtomicReadU64(volatile uint64_t *pu64)
202{
203 return *pu64;
204}
205
206RTDECL(uint64_t) ASMAtomicUoReadU64(volatile uint64_t *pu64)
207{
208 return *pu64;
209}
210
211RTDECL(void) ASMMemZeroPage(volatile void *pv)
212{
213 uintptr_t volatile *puPtr = (uintptr_t volatile *)pv;
214 uint32_t cbLeft = PAGE_SIZE / sizeof(uintptr_t);
215 while (cbLeft-- > 0)
216 *puPtr++ = 0;
217}
218
219RTDECL(void) ASMMemZero32(volatile void *pv, size_t cb)
220{
221 uint32_t volatile *pu32 = (uint32_t volatile *)pv;
222 uint32_t cbLeft = cb / sizeof(uint32_t);
223 while (cbLeft-- > 0)
224 *pu32++ = 0;
225}
226
227RTDECL(void) ASMMemFill32(volatile void *pv, size_t cb, uint32_t u32)
228{
229 uint32_t volatile *pu32 = (uint32_t volatile *)pv;
230 while (cb > 0)
231 {
232 *pu32 = u32;
233 cb -= sizeof(uint32_t);
234 pu32++;
235 }
236}
237
238RTDECL(uint8_t) ASMProbeReadByte(const void *pvByte)
239{
240 return *(volatile uint8_t *)pvByte;
241}
242
243#if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)
244RTDECL(void) ASMNopPause(void)
245{
246}
247#endif
248
249RTDECL(void) ASMBitSet(volatile void *pvBitmap, int32_t iBit)
250{
251 uint8_t volatile *pau8Bitmap = (uint8_t volatile *)pvBitmap;
252 pau8Bitmap[iBit / 8] |= (uint8_t)RT_BIT_32(iBit & 7);
253}
254
255RTDECL(void) ASMAtomicBitSet(volatile void *pvBitmap, int32_t iBit)
256{
257 ASMBitSet(pvBitmap, iBit);
258}
259
260RTDECL(void) ASMBitClear(volatile void *pvBitmap, int32_t iBit)
261{
262 uint8_t volatile *pau8Bitmap = (uint8_t volatile *)pvBitmap;
263 pau8Bitmap[iBit / 8] &= ~((uint8_t)RT_BIT_32(iBit & 7));
264}
265
266RTDECL(void) ASMAtomicBitClear(volatile void *pvBitmap, int32_t iBit)
267{
268 ASMBitClear(pvBitmap, iBit);
269}
270
271RTDECL(void) ASMBitToggle(volatile void *pvBitmap, int32_t iBit)
272{
273 uint8_t volatile *pau8Bitmap = (uint8_t volatile *)pvBitmap;
274 pau8Bitmap[iBit / 8] ^= (uint8_t)RT_BIT_32(iBit & 7);
275}
276
277RTDECL(void) ASMAtomicBitToggle(volatile void *pvBitmap, int32_t iBit)
278{
279 ASMBitToggle(pvBitmap, iBit);
280}
281
282RTDECL(bool) ASMBitTestAndSet(volatile void *pvBitmap, int32_t iBit)
283{
284 if (ASMBitTest(pvBitmap, iBit))
285 return true;
286 ASMBitSet(pvBitmap, iBit);
287 return false;
288}
289
290RTDECL(bool) ASMAtomicBitTestAndSet(volatile void *pvBitmap, int32_t iBit)
291{
292 return ASMBitTestAndSet(pvBitmap, iBit);
293}
294
295RTDECL(bool) ASMBitTestAndClear(volatile void *pvBitmap, int32_t iBit)
296{
297 if (!ASMBitTest(pvBitmap, iBit))
298 return false;
299 ASMBitClear(pvBitmap, iBit);
300 return true;
301}
302
303RTDECL(bool) ASMAtomicBitTestAndClear(volatile void *pvBitmap, int32_t iBit)
304{
305 return ASMBitTestAndClear(pvBitmap, iBit);
306}
307
308RTDECL(bool) ASMBitTestAndToggle(volatile void *pvBitmap, int32_t iBit)
309{
310 bool fRet = ASMBitTest(pvBitmap, iBit);
311 ASMBitToggle(pvBitmap, iBit);
312 return fRet;
313}
314
315RTDECL(bool) ASMAtomicBitTestAndToggle(volatile void *pvBitmap, int32_t iBit)
316{
317 return ASMBitTestAndToggle(pvBitmap, iBit);
318}
319
320RTDECL(bool) ASMBitTest(const volatile void *pvBitmap, int32_t iBit)
321{
322 uint8_t volatile *pau8Bitmap = (uint8_t volatile *)pvBitmap;
323 return pau8Bitmap[iBit / 8] & (uint8_t)RT_BIT_32(iBit & 7) ? true : false;
324}
325
326RTDECL(int) ASMBitFirstClear(const volatile void *pvBitmap, uint32_t cBits)
327{
328 uint32_t iBit = 0;
329 uint8_t volatile *pu8 = (uint8_t volatile *)pvBitmap;
330
331 while (iBit < cBits)
332 {
333 uint8_t u8 = *pu8;
334 if (u8 != UINT8_MAX)
335 {
336 while (u8 & 1)
337 {
338 u8 >>= 1;
339 iBit++;
340 }
341 if (iBit >= cBits)
342 return -1;
343 return iBit;
344 }
345
346 iBit += 8;
347 pu8++;
348 }
349 return -1;
350}
351
352RTDECL(int) ASMBitNextClear(const volatile void *pvBitmap, uint32_t cBits, uint32_t iBitPrev)
353{
354 const volatile uint8_t *pau8Bitmap = (const volatile uint8_t *)pvBitmap;
355 int iBit = ++iBitPrev & 7;
356 if (iBit)
357 {
358 /*
359 * Inspect the byte containing the unaligned bit.
360 */
361 uint8_t u8 = ~pau8Bitmap[iBitPrev / 8] >> iBit;
362 if (u8)
363 {
364 iBit = 0;
365 while (!(u8 & 1))
366 {
367 u8 >>= 1;
368 iBit++;
369 }
370 return iBitPrev + iBit;
371 }
372
373 /*
374 * Skip ahead and see if there is anything left to search.
375 */
376 iBitPrev |= 7;
377 iBitPrev++;
378 if (cBits <= iBitPrev)
379 return -1;
380 }
381
382 /*
383 * Byte search, let ASMBitFirstClear do the dirty work.
384 */
385 iBit = ASMBitFirstClear(&pau8Bitmap[iBitPrev / 8], cBits - iBitPrev);
386 if (iBit >= 0)
387 iBit += iBitPrev;
388 return iBit;
389}
390
391RTDECL(int) ASMBitFirstSet(const volatile void *pvBitmap, uint32_t cBits)
392{
393 uint32_t iBit = 0;
394 uint8_t volatile *pu8 = (uint8_t volatile *)pvBitmap;
395 while (iBit < cBits)
396 {
397 uint8_t u8 = *pu8;
398 if (u8 != 0)
399 {
400 while (!(u8 & 1))
401 {
402 u8 >>= 1;
403 iBit++;
404 }
405 if (iBit >= cBits)
406 return -1;
407 return iBit;
408 }
409
410 iBit += 8;
411 pu8++;
412 }
413 return -1;
414}
415
416RTDECL(int) ASMBitNextSet(const volatile void *pvBitmap, uint32_t cBits, uint32_t iBitPrev)
417{
418 const volatile uint8_t *pau8Bitmap = (const volatile uint8_t *)pvBitmap;
419 int iBit = ++iBitPrev & 7;
420 if (iBit)
421 {
422 /*
423 * Inspect the byte containing the unaligned bit.
424 */
425 uint8_t u8 = pau8Bitmap[iBitPrev / 8] >> iBit;
426 if (u8)
427 {
428 iBit = 0;
429 while (!(u8 & 1))
430 {
431 u8 >>= 1;
432 iBit++;
433 }
434 return iBitPrev + iBit;
435 }
436
437 /*
438 * Skip ahead and see if there is anything left to search.
439 */
440 iBitPrev |= 7;
441 iBitPrev++;
442 if (cBits <= iBitPrev)
443 return -1;
444 }
445
446 /*
447 * Byte search, let ASMBitFirstSet do the dirty work.
448 */
449 iBit = ASMBitFirstSet(&pau8Bitmap[iBitPrev / 8], cBits - iBitPrev);
450 if (iBit >= 0)
451 iBit += iBitPrev;
452 return iBit;
453}
454
455RTDECL(unsigned) ASMBitFirstSetU32(uint32_t u32)
456{
457 uint32_t iBit;
458 for (iBit = 0; iBit < 32; iBit++)
459 if (u32 & RT_BIT_32(iBit))
460 return iBit + 1;
461 return 0;
462}
463
464RTDECL(unsigned) ASMBitLastSetU32(uint32_t u32)
465{
466 int32_t iBit = 32;
467 while (iBit-- > 0)
468 if (u32 & RT_BIT_32(iBit))
469 return iBit + 1;
470 return 0;
471}
472
473RTDECL(unsigned) ASMBitFirstSetU64(uint64_t u64)
474{
475 uint32_t iBit;
476 for (iBit = 0; iBit < 64; iBit++)
477 if (u64 & RT_BIT_64(iBit))
478 return iBit + 1;
479 return 0;
480}
481
482RTDECL(unsigned) ASMBitLastSetU64(uint64_t u64)
483{
484 int32_t iBit = 64;
485 while (iBit-- > 0)
486 if (u64 & RT_BIT_64(iBit))
487 return iBit + 1;
488 return 0;
489}
490
491RTDECL(uint16_t) ASMByteSwapU16(uint16_t u16)
492{
493 return RT_MAKE_U16(RT_HIBYTE(u16), RT_LOBYTE(u16));
494}
495
496RTDECL(uint32_t) ASMByteSwapU32(uint32_t u32)
497{
498 return RT_MAKE_U32_FROM_U8(RT_BYTE4(u32), RT_BYTE3(u32), RT_BYTE2(u32), RT_BYTE1(u32));
499}
500
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette