VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/PDMAllCritSectRw.cpp@ 45276

Last change on this file since 45276 was 45189, checked in by vboxsync, 12 years ago

STAM,VM: ring-3 only testing of pdmcritsectrw.h (disabled).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 41.6 KB
Line 
1/* $Id: PDMAllCritSectRw.cpp 45189 2013-03-26 09:31:59Z vboxsync $ */
2/** @file
3 * IPRT - Read/Write Critical Section, Generic.
4 */
5
6/*
7 * Copyright (C) 2009-2013 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Header Files *
21*******************************************************************************/
22#define LOG_GROUP LOG_GROUP_PDM//_CRITSECT
23#include "PDMInternal.h"
24#include <VBox/vmm/pdmcritsectrw.h>
25#include <VBox/vmm/mm.h>
26#include <VBox/vmm/vmm.h>
27#include <VBox/vmm/vm.h>
28#include <VBox/err.h>
29#include <VBox/vmm/hm.h>
30
31#include <VBox/log.h>
32#include <iprt/asm.h>
33#include <iprt/asm-amd64-x86.h>
34#include <iprt/assert.h>
35#ifdef IN_RING3
36# include <iprt/lockvalidator.h>
37# include <iprt/semaphore.h>
38#endif
39#if defined(IN_RING3) || defined(IN_RING0)
40# include <iprt/thread.h>
41#endif
42
43
44/*******************************************************************************
45* Defined Constants And Macros *
46*******************************************************************************/
47/** The number loops to spin for shared access in ring-3. */
48#define PDMCRITSECTRW_SHRD_SPIN_COUNT_R3 20
49/** The number loops to spin for shared access in ring-0. */
50#define PDMCRITSECTRW_SHRD_SPIN_COUNT_R0 128
51/** The number loops to spin for shared access in the raw-mode context. */
52#define PDMCRITSECTRW_SHRD_SPIN_COUNT_RC 128
53
54/** The number loops to spin for exclusive access in ring-3. */
55#define PDMCRITSECTRW_EXCL_SPIN_COUNT_R3 20
56/** The number loops to spin for exclusive access in ring-0. */
57#define PDMCRITSECTRW_EXCL_SPIN_COUNT_R0 256
58/** The number loops to spin for exclusive access in the raw-mode context. */
59#define PDMCRITSECTRW_EXCL_SPIN_COUNT_RC 256
60
61
62/* Undefine the automatic VBOX_STRICT API mappings. */
63#undef PDMCritSectRwEnterExcl
64#undef PDMCritSectRwTryEnterExcl
65#undef PDMCritSectRwEnterShared
66#undef PDMCritSectRwTryEnterShared
67
68
69/**
70 * Gets the ring-3 native thread handle of the calling thread.
71 *
72 * @returns native thread handle (ring-3).
73 * @param pThis The read/write critical section. This is only used in
74 * R0 and RC.
75 */
76DECL_FORCE_INLINE(RTNATIVETHREAD) pdmCritSectRwGetNativeSelf(PCPDMCRITSECTRW pThis)
77{
78#ifdef IN_RING3
79 NOREF(pThis);
80 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
81#else
82 AssertMsgReturn(pThis->s.Core.u32Magic == RTCRITSECT_MAGIC, ("%RX32\n", pThis->s.Core.u32Magic),
83 NIL_RTNATIVETHREAD);
84 PVM pVM = pThis->s.CTX_SUFF(pVM); AssertPtr(pVM);
85 PVMCPU pVCpu = VMMGetCpu(pVM); AssertPtr(pVCpu);
86 RTNATIVETHREAD hNativeSelf = pVCpu->hNativeThread; Assert(hNativeSelf != NIL_RTNATIVETHREAD);
87#endif
88 return hNativeSelf;
89}
90
91
92
93
94
95#ifdef IN_RING3
96/**
97 * Changes the lock validator sub-class of the read/write critical section.
98 *
99 * It is recommended to try make sure that nobody is using this critical section
100 * while changing the value.
101 *
102 * @returns The old sub-class. RTLOCKVAL_SUB_CLASS_INVALID is returns if the
103 * lock validator isn't compiled in or either of the parameters are
104 * invalid.
105 * @param pThis Pointer to the read/write critical section.
106 * @param uSubClass The new sub-class value.
107 */
108VMMDECL(uint32_t) PDMR3CritSectRwSetSubClass(PPDMCRITSECTRW pThis, uint32_t uSubClass)
109{
110 AssertPtrReturn(pThis, RTLOCKVAL_SUB_CLASS_INVALID);
111 AssertReturn(pThis->s.Core.u32Magic == RTCRITSECTRW_MAGIC, RTLOCKVAL_SUB_CLASS_INVALID);
112#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
113 AssertReturn(!(pThis->s.Core.fFlags & RTCRITSECT_FLAGS_NOP), RTLOCKVAL_SUB_CLASS_INVALID);
114
115 RTLockValidatorRecSharedSetSubClass(pThis->s.Core.pValidatorRead, uSubClass);
116 return RTLockValidatorRecExclSetSubClass(pThis->s.Core.pValidatorWrite, uSubClass);
117# else
118 NOREF(uSubClass);
119 return RTLOCKVAL_SUB_CLASS_INVALID;
120# endif
121}
122#endif /* IN_RING3 */
123
124
125static int pdmCritSectRwEnterShared(PPDMCRITSECTRW pThis, int rcBusy, PCRTLOCKVALSRCPOS pSrcPos, bool fTryOnly)
126{
127 /*
128 * Validate input.
129 */
130 AssertPtr(pThis);
131 AssertReturn(pThis->s.Core.u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
132
133#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
134 RTTHREAD hThreadSelf = RTThreadSelfAutoAdopt();
135 if (!fTryOnly)
136 {
137 int rc9;
138 RTNATIVETHREAD hNativeWriter;
139 ASMAtomicUoReadHandle(&pThis->s.Core.hNativeWriter, &hNativeWriter);
140 if (hNativeWriter != NIL_RTTHREAD && hNativeWriter == pdmCritSectRwGetNativeSelf(pThis))
141 rc9 = RTLockValidatorRecExclCheckOrder(pThis->s.Core.pValidatorWrite, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT);
142 else
143 rc9 = RTLockValidatorRecSharedCheckOrder(pThis->s.Core.pValidatorRead, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT);
144 if (RT_FAILURE(rc9))
145 return rc9;
146 }
147#endif
148
149 /*
150 * Get cracking...
151 */
152 uint64_t u64State = ASMAtomicReadU64(&pThis->s.Core.u64State);
153 uint64_t u64OldState = u64State;
154
155 for (;;)
156 {
157 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
158 {
159 /* It flows in the right direction, try follow it before it changes. */
160 uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
161 c++;
162 Assert(c < RTCSRW_CNT_MASK / 2);
163 u64State &= ~RTCSRW_CNT_RD_MASK;
164 u64State |= c << RTCSRW_CNT_RD_SHIFT;
165 if (ASMAtomicCmpXchgU64(&pThis->s.Core.u64State, u64State, u64OldState))
166 {
167#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
168 RTLockValidatorRecSharedAddOwner(pThis->s.Core.pValidatorRead, hThreadSelf, pSrcPos);
169#endif
170 break;
171 }
172 }
173 else if ((u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) == 0)
174 {
175 /* Wrong direction, but we're alone here and can simply try switch the direction. */
176 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK | RTCSRW_DIR_MASK);
177 u64State |= (UINT64_C(1) << RTCSRW_CNT_RD_SHIFT) | (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT);
178 if (ASMAtomicCmpXchgU64(&pThis->s.Core.u64State, u64State, u64OldState))
179 {
180 Assert(!pThis->s.Core.fNeedReset);
181#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
182 RTLockValidatorRecSharedAddOwner(pThis->s.Core.pValidatorRead, hThreadSelf, pSrcPos);
183#endif
184 break;
185 }
186 }
187 else
188 {
189 /* Is the writer perhaps doing a read recursion? */
190 RTNATIVETHREAD hNativeSelf = pdmCritSectRwGetNativeSelf(pThis);
191 RTNATIVETHREAD hNativeWriter;
192 ASMAtomicUoReadHandle(&pThis->s.Core.hNativeWriter, &hNativeWriter);
193 if (hNativeSelf == hNativeWriter)
194 {
195#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
196 int rc9 = RTLockValidatorRecExclRecursionMixed(pThis->s.Core.pValidatorWrite, &pThis->s.Core.pValidatorRead->Core, pSrcPos);
197 if (RT_FAILURE(rc9))
198 return rc9;
199#endif
200 Assert(pThis->s.Core.cWriterReads < UINT32_MAX / 2);
201 ASMAtomicIncU32(&pThis->s.Core.cWriterReads);
202 return VINF_SUCCESS; /* don't break! */
203 }
204
205 /* If we're only trying, return already. */
206 if (fTryOnly)
207 return VERR_SEM_BUSY;
208
209 /* Add ourselves to the queue and wait for the direction to change. */
210 uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
211 c++;
212 Assert(c < RTCSRW_CNT_MASK / 2);
213
214 uint64_t cWait = (u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT;
215 cWait++;
216 Assert(cWait <= c);
217 Assert(cWait < RTCSRW_CNT_MASK / 2);
218
219 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_WAIT_CNT_RD_MASK);
220 u64State |= (c << RTCSRW_CNT_RD_SHIFT) | (cWait << RTCSRW_WAIT_CNT_RD_SHIFT);
221
222 if (ASMAtomicCmpXchgU64(&pThis->s.Core.u64State, u64State, u64OldState))
223 {
224 for (uint32_t iLoop = 0; ; iLoop++)
225 {
226 int rc;
227#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
228 rc = RTLockValidatorRecSharedCheckBlocking(pThis->s.Core.pValidatorRead, hThreadSelf, pSrcPos, true,
229 RT_INDEFINITE_WAIT, RTTHREADSTATE_RW_READ, false);
230 if (RT_SUCCESS(rc))
231#else
232 RTTHREAD hThreadSelf = RTThreadSelf();
233 RTThreadBlocking(hThreadSelf, RTTHREADSTATE_RW_READ, false);
234#endif
235 {
236 do
237 rc = SUPSemEventMultiWaitNoResume(pThis->s.CTX_SUFF(pVM)->pSession,
238 (SUPSEMEVENTMULTI)pThis->s.Core.hEvtRead,
239 RT_INDEFINITE_WAIT);
240 while (rc == VERR_INTERRUPTED && pThis->s.Core.u32Magic == RTCRITSECTRW_MAGIC);
241 RTThreadUnblocked(hThreadSelf, RTTHREADSTATE_RW_READ);
242 if (pThis->s.Core.u32Magic != RTCRITSECTRW_MAGIC)
243 return VERR_SEM_DESTROYED;
244 }
245 if (RT_FAILURE(rc))
246 {
247 /* Decrement the counts and return the error. */
248 for (;;)
249 {
250 u64OldState = u64State = ASMAtomicReadU64(&pThis->s.Core.u64State);
251 c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT; Assert(c > 0);
252 c--;
253 cWait = (u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT; Assert(cWait > 0);
254 cWait--;
255 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_WAIT_CNT_RD_MASK);
256 u64State |= (c << RTCSRW_CNT_RD_SHIFT) | (cWait << RTCSRW_WAIT_CNT_RD_SHIFT);
257 if (ASMAtomicCmpXchgU64(&pThis->s.Core.u64State, u64State, u64OldState))
258 break;
259 }
260 return rc;
261 }
262
263 Assert(pThis->s.Core.fNeedReset);
264 u64State = ASMAtomicReadU64(&pThis->s.Core.u64State);
265 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
266 break;
267 AssertMsg(iLoop < 1, ("%u\n", iLoop));
268 }
269
270 /* Decrement the wait count and maybe reset the semaphore (if we're last). */
271 for (;;)
272 {
273 u64OldState = u64State;
274
275 cWait = (u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT;
276 Assert(cWait > 0);
277 cWait--;
278 u64State &= ~RTCSRW_WAIT_CNT_RD_MASK;
279 u64State |= cWait << RTCSRW_WAIT_CNT_RD_SHIFT;
280
281 if (ASMAtomicCmpXchgU64(&pThis->s.Core.u64State, u64State, u64OldState))
282 {
283 if (cWait == 0)
284 {
285 if (ASMAtomicXchgBool(&pThis->s.Core.fNeedReset, false))
286 {
287 int rc = SUPSemEventMultiReset(pThis->s.CTX_SUFF(pVM)->pSession,
288 (SUPSEMEVENTMULTI)pThis->s.Core.hEvtRead);
289 AssertRCReturn(rc, rc);
290 }
291 }
292 break;
293 }
294 u64State = ASMAtomicReadU64(&pThis->s.Core.u64State);
295 }
296
297#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
298 RTLockValidatorRecSharedAddOwner(pThis->s.Core.pValidatorRead, hThreadSelf, pSrcPos);
299#endif
300 break;
301 }
302 }
303
304 if (pThis->s.Core.u32Magic != RTCRITSECTRW_MAGIC)
305 return VERR_SEM_DESTROYED;
306
307 ASMNopPause();
308 u64State = ASMAtomicReadU64(&pThis->s.Core.u64State);
309 u64OldState = u64State;
310 }
311
312 /* got it! */
313 STAM_REL_COUNTER_INC(&pThis->s.CTX_MID_Z(Stat,EnterShared));
314 Assert((ASMAtomicReadU64(&pThis->s.Core.u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT));
315 return VINF_SUCCESS;
316
317}
318
319
320/**
321 * Enter a critical section with shared (read) access.
322 *
323 * @returns VBox status code.
324 * @retval VINF_SUCCESS on success.
325 * @retval @a rcBusy if in ring-0 or raw-mode context and it is busy.
326 * @retval VERR_SEM_NESTED if nested enter on a no nesting section. (Asserted.)
327 * @retval VERR_SEM_DESTROYED if the critical section is delete before or
328 * during the operation.
329 *
330 * @param pThis Pointer to the read/write critical section.
331 * @param rcBusy The status code to return when we're in RC or R0 and the
332 * section is busy. Pass VINF_SUCCESS to acquired the
333 * critical section thru a ring-3 call if necessary.
334 * @param uId Where we're entering the section.
335 * @param pszFile The source position - file.
336 * @param iLine The source position - line.
337 * @param pszFunction The source position - function.
338 * @sa PDMCritSectRwEnterSharedDebug, PDMCritSectRwTryEnterShared,
339 * PDMCritSectRwTryEnterSharedDebug, PDMCritSectRwLeaveShared,
340 * RTCritSectRwEnterShared.
341 */
342VMMDECL(int) PDMCritSectRwEnterShared(PPDMCRITSECTRW pThis, int rcBusy)
343{
344#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
345 return pdmCritSectRwEnterShared(pThis, rcBusy, NULL, false /*fTryOnly*/);
346#else
347 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
348 return pdmCritSectRwEnterShared(pThis, rcBusy, &SrcPos, false /*fTryOnly*/);
349#endif
350}
351
352
353/**
354 * Enter a critical section with shared (read) access.
355 *
356 * @returns VBox status code.
357 * @retval VINF_SUCCESS on success.
358 * @retval @a rcBusy if in ring-0 or raw-mode context and it is busy.
359 * @retval VERR_SEM_NESTED if nested enter on a no nesting section. (Asserted.)
360 * @retval VERR_SEM_DESTROYED if the critical section is delete before or
361 * during the operation.
362 *
363 * @param pThis Pointer to the read/write critical section.
364 * @param rcBusy The status code to return when we're in RC or R0 and the
365 * section is busy. Pass VINF_SUCCESS to acquired the
366 * critical section thru a ring-3 call if necessary.
367 * @param uId Where we're entering the section.
368 * @param pszFile The source position - file.
369 * @param iLine The source position - line.
370 * @param pszFunction The source position - function.
371 * @sa PDMCritSectRwEnterShared, PDMCritSectRwTryEnterShared,
372 * PDMCritSectRwTryEnterSharedDebug, PDMCritSectRwLeaveShared,
373 * RTCritSectRwEnterSharedDebug.
374 */
375VMMDECL(int) PDMCritSectRwEnterSharedDebug(PPDMCRITSECTRW pThis, int rcBusy, RTHCUINTPTR uId, RT_SRC_POS_DECL)
376{
377 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
378 return pdmCritSectRwEnterShared(pThis, rcBusy, &SrcPos, false /*fTryOnly*/);
379}
380
381
382/**
383 * Try enter a critical section with shared (read) access.
384 *
385 * @returns VBox status code.
386 * @retval VINF_SUCCESS on success.
387 * @retval VERR_SEM_BUSY if the critsect was owned.
388 * @retval VERR_SEM_NESTED if nested enter on a no nesting section. (Asserted.)
389 * @retval VERR_SEM_DESTROYED if the critical section is delete before or
390 * during the operation.
391 *
392 * @param pThis Pointer to the read/write critical section.
393 * @param uId Where we're entering the section.
394 * @param pszFile The source position - file.
395 * @param iLine The source position - line.
396 * @param pszFunction The source position - function.
397 * @sa PDMCritSectRwTryEnterSharedDebug, PDMCritSectRwEnterShared,
398 * PDMCritSectRwEnterSharedDebug, PDMCritSectRwLeaveShared,
399 * RTCritSectRwTryEnterShared.
400 */
401VMMDECL(int) PDMCritSectRwTryEnterShared(PPDMCRITSECTRW pThis)
402{
403#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
404 return pdmCritSectRwEnterShared(pThis, VERR_SEM_BUSY, NULL, true /*fTryOnly*/);
405#else
406 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
407 return pdmCritSectRwEnterShared(pThis, VERR_SEM_BUSY, &SrcPos, true /*fTryOnly*/);
408#endif
409}
410
411
412/**
413 * Try enter a critical section with shared (read) access.
414 *
415 * @returns VBox status code.
416 * @retval VINF_SUCCESS on success.
417 * @retval VERR_SEM_BUSY if the critsect was owned.
418 * @retval VERR_SEM_NESTED if nested enter on a no nesting section. (Asserted.)
419 * @retval VERR_SEM_DESTROYED if the critical section is delete before or
420 * during the operation.
421 *
422 * @param pThis Pointer to the read/write critical section.
423 * @param uId Where we're entering the section.
424 * @param pszFile The source position - file.
425 * @param iLine The source position - line.
426 * @param pszFunction The source position - function.
427 * @sa PDMCritSectRwTryEnterShared, PDMCritSectRwEnterShared,
428 * PDMCritSectRwEnterSharedDebug, PDMCritSectRwLeaveShared,
429 * RTCritSectRwTryEnterSharedDebug.
430 */
431VMMDECL(int) PDMCritSectRwTryEnterSharedDebug(PPDMCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
432{
433 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
434 return pdmCritSectRwEnterShared(pThis, VERR_SEM_BUSY, &SrcPos, true /*fTryOnly*/);
435}
436
437
438/**
439 * Leave a critical section held with shared access.
440 *
441 * @returns VBox status code.
442 * @retval VERR_SEM_DESTROYED if the critical section is delete before or
443 * during the operation.
444 * @param pThis Pointer to the read/write critical section.
445 * @sa PDMCritSectRwEnterShared, PDMCritSectRwTryEnterShared,
446 * PDMCritSectRwEnterSharedDebug, PDMCritSectRwTryEnterSharedDebug,
447 * PDMCritSectRwLeaveExcl, RTCritSectRwLeaveShared.
448 */
449VMMDECL(int) PDMCritSectRwLeaveShared(PPDMCRITSECTRW pThis)
450{
451 /*
452 * Validate handle.
453 */
454 AssertPtr(pThis);
455 AssertReturn(pThis->s.Core.u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
456
457 /*
458 * Check the direction and take action accordingly.
459 */
460 uint64_t u64State = ASMAtomicReadU64(&pThis->s.Core.u64State);
461 uint64_t u64OldState = u64State;
462 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
463 {
464#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
465 int rc9 = RTLockValidatorRecSharedCheckAndRelease(pThis->s.Core.pValidatorRead, NIL_RTTHREAD);
466 if (RT_FAILURE(rc9))
467 return rc9;
468#endif
469 for (;;)
470 {
471 uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
472 AssertReturn(c > 0, VERR_NOT_OWNER);
473 c--;
474
475 if ( c > 0
476 || (u64State & RTCSRW_CNT_RD_MASK) == 0)
477 {
478 /* Don't change the direction. */
479 u64State &= ~RTCSRW_CNT_RD_MASK;
480 u64State |= c << RTCSRW_CNT_RD_SHIFT;
481 if (ASMAtomicCmpXchgU64(&pThis->s.Core.u64State, u64State, u64OldState))
482 break;
483 }
484 else
485 {
486 /* Reverse the direction and signal the reader threads. */
487 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_DIR_MASK);
488 u64State |= RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT;
489 if (ASMAtomicCmpXchgU64(&pThis->s.Core.u64State, u64State, u64OldState))
490 {
491 int rc = SUPSemEventSignal(pThis->s.CTX_SUFF(pVM)->pSession, (SUPSEMEVENT)pThis->s.Core.hEvtWrite);
492 AssertRC(rc);
493 break;
494 }
495 }
496
497 ASMNopPause();
498 u64State = ASMAtomicReadU64(&pThis->s.Core.u64State);
499 u64OldState = u64State;
500 }
501 }
502 else
503 {
504 RTNATIVETHREAD hNativeSelf = pdmCritSectRwGetNativeSelf(pThis);
505 RTNATIVETHREAD hNativeWriter;
506 ASMAtomicUoReadHandle(&pThis->s.Core.hNativeWriter, &hNativeWriter);
507 AssertReturn(hNativeSelf == hNativeWriter, VERR_NOT_OWNER);
508 AssertReturn(pThis->s.Core.cWriterReads > 0, VERR_NOT_OWNER);
509#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
510 int rc = RTLockValidatorRecExclUnwindMixed(pThis->s.Core.pValidatorWrite, &pThis->s.Core.pValidatorRead->Core);
511 if (RT_FAILURE(rc))
512 return rc;
513#endif
514 ASMAtomicDecU32(&pThis->s.Core.cWriterReads);
515 }
516
517 return VINF_SUCCESS;
518}
519
520
521static int pdmCritSectRwEnterExcl(PPDMCRITSECTRW pThis, int rcBusy, PCRTLOCKVALSRCPOS pSrcPos, bool fTryOnly)
522{
523 /*
524 * Validate input.
525 */
526 AssertPtr(pThis);
527 AssertReturn(pThis->s.Core.u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
528
529#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
530 RTTHREAD hThreadSelf = NIL_RTTHREAD;
531 if (!fTryOnly)
532 {
533 hThreadSelf = RTThreadSelfAutoAdopt();
534 int rc9 = RTLockValidatorRecExclCheckOrder(pThis->s.Core.pValidatorWrite, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT);
535 if (RT_FAILURE(rc9))
536 return rc9;
537 }
538#endif
539
540 /*
541 * Check if we're already the owner and just recursing.
542 */
543 RTNATIVETHREAD hNativeSelf = pdmCritSectRwGetNativeSelf(pThis);
544 RTNATIVETHREAD hNativeWriter;
545 ASMAtomicUoReadHandle(&pThis->s.Core.hNativeWriter, &hNativeWriter);
546 if (hNativeSelf == hNativeWriter)
547 {
548 Assert((ASMAtomicReadU64(&pThis->s.Core.u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT));
549#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
550 int rc9 = RTLockValidatorRecExclRecursion(pThis->s.Core.pValidatorWrite, pSrcPos);
551 if (RT_FAILURE(rc9))
552 return rc9;
553#endif
554 Assert(pThis->s.Core.cWriteRecursions < UINT32_MAX / 2);
555 STAM_REL_COUNTER_INC(&pThis->s.CTX_MID_Z(Stat,EnterExcl));
556 ASMAtomicIncU32(&pThis->s.Core.cWriteRecursions);
557 return VINF_SUCCESS;
558 }
559
560 /*
561 * Get cracking.
562 */
563 uint64_t u64State = ASMAtomicReadU64(&pThis->s.Core.u64State);
564 uint64_t u64OldState = u64State;
565
566 for (;;)
567 {
568 if ( (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT)
569 || (u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) != 0)
570 {
571 /* It flows in the right direction, try follow it before it changes. */
572 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT;
573 c++;
574 Assert(c < RTCSRW_CNT_MASK / 2);
575 u64State &= ~RTCSRW_CNT_WR_MASK;
576 u64State |= c << RTCSRW_CNT_WR_SHIFT;
577 if (ASMAtomicCmpXchgU64(&pThis->s.Core.u64State, u64State, u64OldState))
578 break;
579 }
580 else if ((u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) == 0)
581 {
582 /* Wrong direction, but we're alone here and can simply try switch the direction. */
583 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK | RTCSRW_DIR_MASK);
584 u64State |= (UINT64_C(1) << RTCSRW_CNT_WR_SHIFT) | (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT);
585 if (ASMAtomicCmpXchgU64(&pThis->s.Core.u64State, u64State, u64OldState))
586 break;
587 }
588 else if (fTryOnly)
589 /* Wrong direction and we're not supposed to wait, just return. */
590 return VERR_SEM_BUSY;
591 else
592 {
593 /* Add ourselves to the write count and break out to do the wait. */
594 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT;
595 c++;
596 Assert(c < RTCSRW_CNT_MASK / 2);
597 u64State &= ~RTCSRW_CNT_WR_MASK;
598 u64State |= c << RTCSRW_CNT_WR_SHIFT;
599 if (ASMAtomicCmpXchgU64(&pThis->s.Core.u64State, u64State, u64OldState))
600 break;
601 }
602
603 if (pThis->s.Core.u32Magic != RTCRITSECTRW_MAGIC)
604 return VERR_SEM_DESTROYED;
605
606 ASMNopPause();
607 u64State = ASMAtomicReadU64(&pThis->s.Core.u64State);
608 u64OldState = u64State;
609 }
610
611 /*
612 * If we're in write mode now try grab the ownership. Play fair if there
613 * are threads already waiting.
614 */
615 bool fDone = (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT)
616 && ( ((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT) == 1
617 || fTryOnly);
618 if (fDone)
619 ASMAtomicCmpXchgHandle(&pThis->s.Core.hNativeWriter, hNativeSelf, NIL_RTNATIVETHREAD, fDone);
620 if (!fDone)
621 {
622 /*
623 * Wait for our turn.
624 */
625 for (uint32_t iLoop = 0; ; iLoop++)
626 {
627 int rc;
628#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
629 if (!fTryOnly)
630 {
631 if (hThreadSelf == NIL_RTTHREAD)
632 hThreadSelf = RTThreadSelfAutoAdopt();
633 rc = RTLockValidatorRecExclCheckBlocking(pThis->s.Core.pValidatorWrite, hThreadSelf, pSrcPos, true,
634 RT_INDEFINITE_WAIT, RTTHREADSTATE_RW_WRITE, false);
635 }
636 else
637 rc = VINF_SUCCESS;
638 if (RT_SUCCESS(rc))
639#else
640 RTTHREAD hThreadSelf = RTThreadSelf();
641 RTThreadBlocking(hThreadSelf, RTTHREADSTATE_RW_WRITE, false);
642#endif
643 {
644 do
645 rc = SUPSemEventWaitNoResume(pThis->s.CTX_SUFF(pVM)->pSession,
646 (SUPSEMEVENT)pThis->s.Core.hEvtWrite,
647 RT_INDEFINITE_WAIT);
648 while (rc == VERR_INTERRUPTED && pThis->s.Core.u32Magic == RTCRITSECTRW_MAGIC);
649 RTThreadUnblocked(hThreadSelf, RTTHREADSTATE_RW_WRITE);
650 if (pThis->s.Core.u32Magic != RTCRITSECTRW_MAGIC)
651 return VERR_SEM_DESTROYED;
652 }
653 if (RT_FAILURE(rc))
654 {
655 /* Decrement the counts and return the error. */
656 for (;;)
657 {
658 u64OldState = u64State = ASMAtomicReadU64(&pThis->s.Core.u64State);
659 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT; Assert(c > 0);
660 c--;
661 u64State &= ~RTCSRW_CNT_WR_MASK;
662 u64State |= c << RTCSRW_CNT_WR_SHIFT;
663 if (ASMAtomicCmpXchgU64(&pThis->s.Core.u64State, u64State, u64OldState))
664 break;
665 }
666 return rc;
667 }
668
669 u64State = ASMAtomicReadU64(&pThis->s.Core.u64State);
670 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT))
671 {
672 ASMAtomicCmpXchgHandle(&pThis->s.Core.hNativeWriter, hNativeSelf, NIL_RTNATIVETHREAD, fDone);
673 if (fDone)
674 break;
675 }
676 AssertMsg(iLoop < 1000, ("%u\n", iLoop)); /* may loop a few times here... */
677 }
678 }
679
680 /*
681 * Got it!
682 */
683 Assert((ASMAtomicReadU64(&pThis->s.Core.u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT));
684 ASMAtomicWriteU32(&pThis->s.Core.cWriteRecursions, 1);
685 Assert(pThis->s.Core.cWriterReads == 0);
686#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
687 RTLockValidatorRecExclSetOwner(pThis->s.Core.pValidatorWrite, hThreadSelf, pSrcPos, true);
688#endif
689 STAM_REL_COUNTER_INC(&pThis->s.CTX_MID_Z(Stat,EnterExcl));
690 STAM_PROFILE_ADV_START(&pThis->s.StatWriteLocked, swl);
691
692 return VINF_SUCCESS;
693}
694
695
696/**
697 * Try enter a critical section with exclusive (write) access.
698 *
699 * @returns VBox status code.
700 * @retval VINF_SUCCESS on success.
701 * @retval @a rcBusy if in ring-0 or raw-mode context and it is busy.
702 * @retval VERR_SEM_NESTED if nested enter on a no nesting section. (Asserted.)
703 * @retval VERR_SEM_DESTROYED if the critical section is delete before or
704 * during the operation.
705 *
706 * @param pThis Pointer to the read/write critical section.
707 * @param rcBusy The status code to return when we're in RC or R0 and the
708 * section is busy. Pass VINF_SUCCESS to acquired the
709 * critical section thru a ring-3 call if necessary.
710 * @sa PDMCritSectRwEnterExclDebug, PDMCritSectRwTryEnterExcl,
711 * PDMCritSectRwTryEnterExclDebug,
712 * PDMCritSectEnterDebug, PDMCritSectEnter,
713 * RTCritSectRwEnterExcl.
714 */
715VMMDECL(int) PDMCritSectRwEnterExcl(PPDMCRITSECTRW pThis, int rcBusy)
716{
717#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
718 return pdmCritSectRwEnterExcl(pThis, rcBusy, NULL, false /*fTryAgain*/);
719#else
720 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
721 return pdmCritSectRwEnterExcl(pThis, rcBusy, &SrcPos, false /*fTryAgain*/);
722#endif
723}
724
725
726/**
727 * Try enter a critical section with exclusive (write) access.
728 *
729 * @returns VBox status code.
730 * @retval VINF_SUCCESS on success.
731 * @retval @a rcBusy if in ring-0 or raw-mode context and it is busy.
732 * @retval VERR_SEM_NESTED if nested enter on a no nesting section. (Asserted.)
733 * @retval VERR_SEM_DESTROYED if the critical section is delete before or
734 * during the operation.
735 *
736 * @param pThis Pointer to the read/write critical section.
737 * @param rcBusy The status code to return when we're in RC or R0 and the
738 * section is busy. Pass VINF_SUCCESS to acquired the
739 * critical section thru a ring-3 call if necessary.
740 * @param uId Where we're entering the section.
741 * @param pszFile The source position - file.
742 * @param iLine The source position - line.
743 * @param pszFunction The source position - function.
744 * @sa PDMCritSectRwEnterExcl, PDMCritSectRwTryEnterExcl,
745 * PDMCritSectRwTryEnterExclDebug,
746 * PDMCritSectEnterDebug, PDMCritSectEnter,
747 * RTCritSectRwEnterExclDebug.
748 */
749VMMDECL(int) PDMCritSectRwEnterExclDebug(PPDMCRITSECTRW pThis, int rcBusy, RTHCUINTPTR uId, RT_SRC_POS_DECL)
750{
751 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
752 return pdmCritSectRwEnterExcl(pThis, rcBusy, &SrcPos, false /*fTryAgain*/);
753}
754
755
756/**
757 * Try enter a critical section with exclusive (write) access.
758 *
759 * @retval VINF_SUCCESS on success.
760 * @retval VERR_SEM_BUSY if the critsect was owned.
761 * @retval VERR_SEM_NESTED if nested enter on a no nesting section. (Asserted.)
762 * @retval VERR_SEM_DESTROYED if the critical section is delete before or
763 * during the operation.
764 *
765 * @param pThis Pointer to the read/write critical section.
766 * @sa PDMCritSectRwEnterExcl, PDMCritSectRwTryEnterExclDebug,
767 * PDMCritSectRwEnterExclDebug,
768 * PDMCritSectTryEnter, PDMCritSectTryEnterDebug,
769 * RTCritSectRwTryEnterExcl.
770 */
771VMMDECL(int) PDMCritSectRwTryEnterExcl(PPDMCRITSECTRW pThis)
772{
773#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
774 return pdmCritSectRwEnterExcl(pThis, VERR_SEM_BUSY, NULL, true /*fTryAgain*/);
775#else
776 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
777 return pdmCritSectRwEnterExcl(pThis, VERR_SEM_BUSY, &SrcPos, true /*fTryAgain*/);
778#endif
779}
780
781
782/**
783 * Try enter a critical section with exclusive (write) access.
784 *
785 * @retval VINF_SUCCESS on success.
786 * @retval VERR_SEM_BUSY if the critsect was owned.
787 * @retval VERR_SEM_NESTED if nested enter on a no nesting section. (Asserted.)
788 * @retval VERR_SEM_DESTROYED if the critical section is delete before or
789 * during the operation.
790 *
791 * @param pThis Pointer to the read/write critical section.
792 * @param uId Where we're entering the section.
793 * @param pszFile The source position - file.
794 * @param iLine The source position - line.
795 * @param pszFunction The source position - function.
796 * @sa PDMCritSectRwTryEnterExcl, PDMCritSectRwEnterExcl,
797 * PDMCritSectRwEnterExclDebug,
798 * PDMCritSectTryEnterDebug, PDMCritSectTryEnter,
799 * RTCritSectRwTryEnterExclDebug.
800 */
801VMMDECL(int) PDMCritSectRwTryEnterExclDebug(PPDMCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
802{
803 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
804 return pdmCritSectRwEnterExcl(pThis, VERR_SEM_BUSY, &SrcPos, true /*fTryAgain*/);
805}
806
807
808/**
809 * Leave a critical section held exclusively.
810 *
811 * @returns VBox status code.
812 * @retval VERR_SEM_DESTROYED if the critical section is delete before or
813 * during the operation.
814 * @param pThis Pointer to the read/write critical section.
815 * @sa PDMCritSectRwLeaveShared, RTCritSectRwLeaveExcl.
816 */
817VMMDECL(int) PDMCritSectRwLeaveExcl(PPDMCRITSECTRW pThis)
818{
819 /*
820 * Validate handle.
821 */
822 AssertPtr(pThis);
823 AssertReturn(pThis->s.Core.u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
824
825 RTNATIVETHREAD hNativeSelf = pdmCritSectRwGetNativeSelf(pThis);
826 RTNATIVETHREAD hNativeWriter;
827 ASMAtomicUoReadHandle(&pThis->s.Core.hNativeWriter, &hNativeWriter);
828 AssertReturn(hNativeSelf == hNativeWriter, VERR_NOT_OWNER);
829
830 /*
831 * Unwind one recursion. Is it the final one?
832 */
833 if (pThis->s.Core.cWriteRecursions == 1)
834 {
835 AssertReturn(pThis->s.Core.cWriterReads == 0, VERR_WRONG_ORDER); /* (must release all read recursions before the final write.) */
836#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
837 int rc9 = RTLockValidatorRecExclReleaseOwner(pThis->s.Core.pValidatorWrite, true);
838 if (RT_FAILURE(rc9))
839 return rc9;
840#endif
841 /*
842 * Update the state.
843 */
844 ASMAtomicWriteU32(&pThis->s.Core.cWriteRecursions, 0);
845 ASMAtomicWriteHandle(&pThis->s.Core.hNativeWriter, NIL_RTNATIVETHREAD);
846 STAM_PROFILE_ADV_STOP(&pThis->s.StatWriteLocked, swl);
847
848 for (;;)
849 {
850 uint64_t u64State = ASMAtomicReadU64(&pThis->s.Core.u64State);
851 uint64_t u64OldState = u64State;
852
853 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT;
854 Assert(c > 0);
855 c--;
856
857 if ( c > 0
858 || (u64State & RTCSRW_CNT_RD_MASK) == 0)
859 {
860 /* Don't change the direction, wake up the next writer if any. */
861 u64State &= ~RTCSRW_CNT_WR_MASK;
862 u64State |= c << RTCSRW_CNT_WR_SHIFT;
863 if (ASMAtomicCmpXchgU64(&pThis->s.Core.u64State, u64State, u64OldState))
864 {
865 if (c > 0)
866 {
867 int rc = SUPSemEventSignal(pThis->s.CTX_SUFF(pVM)->pSession, (SUPSEMEVENT)pThis->s.Core.hEvtWrite);
868 AssertRC(rc);
869 }
870 break;
871 }
872 }
873 else
874 {
875 /* Reverse the direction and signal the reader threads. */
876 u64State &= ~(RTCSRW_CNT_WR_MASK | RTCSRW_DIR_MASK);
877 u64State |= RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT;
878 if (ASMAtomicCmpXchgU64(&pThis->s.Core.u64State, u64State, u64OldState))
879 {
880 Assert(!pThis->s.Core.fNeedReset);
881 ASMAtomicWriteBool(&pThis->s.Core.fNeedReset, true);
882 int rc = SUPSemEventMultiSignal(pThis->s.CTX_SUFF(pVM)->pSession, (SUPSEMEVENTMULTI)pThis->s.Core.hEvtRead);
883 AssertRC(rc);
884 break;
885 }
886 }
887
888 ASMNopPause();
889 if (pThis->s.Core.u32Magic != RTCRITSECTRW_MAGIC)
890 return VERR_SEM_DESTROYED;
891 }
892 }
893 else
894 {
895 /*
896 * Not the final recursion.
897 */
898 Assert(pThis->s.Core.cWriteRecursions != 0);
899#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
900 int rc9 = RTLockValidatorRecExclUnwind(pThis->s.Core.pValidatorWrite);
901 if (RT_FAILURE(rc9))
902 return rc9;
903#endif
904 ASMAtomicDecU32(&pThis->s.Core.cWriteRecursions);
905 }
906
907 return VINF_SUCCESS;
908}
909
910
911/**
912 * Checks the caller is the exclusive (write) owner of the critical section.
913 *
914 * @retval @c true if owner.
915 * @retval @c false if not owner.
916 * @param pThis Pointer to the read/write critical section.
917 * @sa PDMCritSectRwIsReadOwner, PDMCritSectIsOwner,
918 * RTCritSectRwIsWriteOwner.
919 */
920VMMDECL(bool) PDMCritSectRwIsWriteOwner(PPDMCRITSECTRW pThis)
921{
922 /*
923 * Validate handle.
924 */
925 AssertPtr(pThis);
926 AssertReturn(pThis->s.Core.u32Magic == RTCRITSECTRW_MAGIC, false);
927
928 /*
929 * Check ownership.
930 */
931 RTNATIVETHREAD hNativeWriter;
932 ASMAtomicUoReadHandle(&pThis->s.Core.hNativeWriter, &hNativeWriter);
933 if (hNativeWriter == NIL_RTNATIVETHREAD)
934 return false;
935 return hNativeWriter == pdmCritSectRwGetNativeSelf(pThis);
936}
937
938
939/**
940 * Checks if the caller is one of the read owners of the critical section.
941 *
942 * @note !CAUTION! This API doesn't work reliably if lock validation isn't
943 * enabled. Meaning, the answer is not trustworhty unless
944 * RT_LOCK_STRICT or PDMCRITSECTRW_STRICT was defined at build time.
945 * Also, make sure you do not use RTCRITSECTRW_FLAGS_NO_LOCK_VAL when
946 * creating the semaphore. And finally, if you used a locking class,
947 * don't disable deadlock detection by setting cMsMinDeadlock to
948 * RT_INDEFINITE_WAIT.
949 *
950 * In short, only use this for assertions.
951 *
952 * @returns @c true if reader, @c false if not.
953 * @param pThis Pointer to the read/write critical section.
954 * @param fWannaHear What you'd like to hear when lock validation is not
955 * available. (For avoiding asserting all over the place.)
956 * @sa PDMCritSectRwIsWriteOwner, RTCritSectRwIsReadOwner.
957 */
958VMMDECL(bool) PDMCritSectRwIsReadOwner(PPDMCRITSECTRW pThis, bool fWannaHear)
959{
960 /*
961 * Validate handle.
962 */
963 AssertPtr(pThis);
964 AssertReturn(pThis->s.Core.u32Magic == RTCRITSECTRW_MAGIC, false);
965
966 /*
967 * Inspect the state.
968 */
969 uint64_t u64State = ASMAtomicReadU64(&pThis->s.Core.u64State);
970 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT))
971 {
972 /*
973 * It's in write mode, so we can only be a reader if we're also the
974 * current writer.
975 */
976 RTNATIVETHREAD hWriter;
977 ASMAtomicUoReadHandle(&pThis->s.Core.hNativeWriter, &hWriter);
978 if (hWriter == NIL_RTNATIVETHREAD)
979 return false;
980 return hWriter == pdmCritSectRwGetNativeSelf(pThis);
981 }
982
983 /*
984 * Read mode. If there are no current readers, then we cannot be a reader.
985 */
986 if (!(u64State & RTCSRW_CNT_RD_MASK))
987 return false;
988
989#if defined(PDMCRITSECTRW_STRICT) && defined(IN_RING3)
990 /*
991 * Ask the lock validator.
992 * Note! It doesn't know everything, let's deal with that if it becomes an issue...
993 */
994 return RTLockValidatorRecSharedIsOwner(pThis->s.Core.pValidatorRead, NIL_RTTHREAD);
995#else
996 /*
997 * Ok, we don't know, just tell the caller what he want to hear.
998 */
999 return fWannaHear;
1000#endif
1001}
1002
1003
1004/**
1005 * Gets the write recursion count.
1006 *
1007 * @returns The write recursion count (0 if bad critsect).
1008 * @param pThis Pointer to the read/write critical section.
1009 * @sa PDMCritSectRwGetWriterReadRecursion, PDMCritSectRwGetReadCount,
1010 * RTCritSectRwGetWriteRecursion.
1011 */
1012VMMDECL(uint32_t) PDMCritSectRwGetWriteRecursion(PPDMCRITSECTRW pThis)
1013{
1014 /*
1015 * Validate handle.
1016 */
1017 AssertPtr(pThis);
1018 AssertReturn(pThis->s.Core.u32Magic == RTCRITSECTRW_MAGIC, 0);
1019
1020 /*
1021 * Return the requested data.
1022 */
1023 return pThis->s.Core.cWriteRecursions;
1024}
1025
1026
1027/**
1028 * Gets the read recursion count of the current writer.
1029 *
1030 * @returns The read recursion count (0 if bad critsect).
1031 * @param pThis Pointer to the read/write critical section.
1032 * @sa PDMCritSectRwGetWriteRecursion, PDMCritSectRwGetReadCount,
1033 * RTCritSectRwGetWriterReadRecursion.
1034 */
1035VMMDECL(uint32_t) PDMCritSectRwGetWriterReadRecursion(PPDMCRITSECTRW pThis)
1036{
1037 /*
1038 * Validate handle.
1039 */
1040 AssertPtr(pThis);
1041 AssertReturn(pThis->s.Core.u32Magic == RTCRITSECTRW_MAGIC, 0);
1042
1043 /*
1044 * Return the requested data.
1045 */
1046 return pThis->s.Core.cWriterReads;
1047}
1048
1049
1050/**
1051 * Gets the current number of reads.
1052 *
1053 * This includes all read recursions, so it might be higher than the number of
1054 * read owners. It does not include reads done by the current writer.
1055 *
1056 * @returns The read count (0 if bad critsect).
1057 * @param pThis Pointer to the read/write critical section.
1058 * @sa PDMCritSectRwGetWriteRecursion, PDMCritSectRwGetWriterReadRecursion,
1059 * RTCritSectRwGetReadCount.
1060 */
1061VMMDECL(uint32_t) PDMCritSectRwGetReadCount(PPDMCRITSECTRW pThis)
1062{
1063 /*
1064 * Validate input.
1065 */
1066 AssertPtr(pThis);
1067 AssertReturn(pThis->s.Core.u32Magic == RTCRITSECTRW_MAGIC, 0);
1068
1069 /*
1070 * Return the requested data.
1071 */
1072 uint64_t u64State = ASMAtomicReadU64(&pThis->s.Core.u64State);
1073 if ((u64State & RTCSRW_DIR_MASK) != (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
1074 return 0;
1075 return (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
1076}
1077
1078
1079/**
1080 * Checks if the read/write critical section is initialized or not.
1081 *
1082 * @retval @c true if initialized.
1083 * @retval @c false if not initialized.
1084 * @param pThis Pointer to the read/write critical section.
1085 * @sa PDMCritSectIsInitialized, RTCritSectRwIsInitialized.
1086 */
1087VMMDECL(bool) PDMCritSectRwIsInitialized(PCPDMCRITSECTRW pThis)
1088{
1089 AssertPtr(pThis);
1090 return pThis->s.Core.u32Magic == RTCRITSECTRW_MAGIC;
1091}
1092
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette