VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMR0/VMMR0JmpA-x86.asm@ 28656

Last change on this file since 28656 was 23487, checked in by vboxsync, 15 years ago

VMM: Saved and restore the [R|E]FLAGS register in the cmmR3CallRing3SetJmp/Long code.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id
File size: 11.8 KB
Line 
1; $Id: VMMR0JmpA-x86.asm 23487 2009-10-01 14:57:14Z vboxsync $
2;; @file
3; VMM - R0 SetJmp / LongJmp routines for X86.
4;
5
6;
7; Copyright (C) 2006-2009 Sun Microsystems, Inc.
8;
9; This file is part of VirtualBox Open Source Edition (OSE), as
10; available from http://www.virtualbox.org. This file is free software;
11; you can redistribute it and/or modify it under the terms of the GNU
12; General Public License (GPL) as published by the Free Software
13; Foundation, in version 2 as it comes in the "COPYING" file of the
14; VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15; hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16;
17; Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa
18; Clara, CA 95054 USA or visit http://www.sun.com if you need
19; additional information or have any questions.
20;
21
22;*******************************************************************************
23;* Header Files *
24;*******************************************************************************
25%include "VBox/asmdefs.mac"
26%include "../VMMInternal.mac"
27%include "iprt/err.mac"
28%include "VBox/param.mac"
29
30
31;*******************************************************************************
32;* Defined Constants And Macros *
33;*******************************************************************************
34%define RESUME_MAGIC 07eadf00dh
35%define STACK_PADDING 0eeeeeeeeh
36
37
38; For vmmR0LoggerWrapper. (The other architecture(s) use(s) C99 variadict macros.)
39extern NAME(RTLogLogger)
40
41
42BEGINCODE
43
44
45;;
46; The setjmp variant used for calling Ring-3.
47;
48; This differs from the normal setjmp in that it will resume VMMRZCallRing3 if we're
49; in the middle of a ring-3 call. Another differences is the function pointer and
50; argument. This has to do with resuming code and the stack frame of the caller.
51;
52; @returns VINF_SUCCESS on success or whatever is passed to vmmR0CallRing3LongJmp.
53; @param pJmpBuf msc:rcx gcc:rdi x86:[esp+0x04] Our jmp_buf.
54; @param pfn msc:rdx gcc:rsi x86:[esp+0x08] The function to be called when not resuming.
55; @param pvUser1 msc:r8 gcc:rdx x86:[esp+0x0c] The argument of that function.
56; @param pvUser2 msc:r9 gcc:rcx x86:[esp+0x10] The argument of that function.
57;
58BEGINPROC vmmR0CallRing3SetJmp
59GLOBALNAME vmmR0CallRing3SetJmpEx
60 ;
61 ; Save the registers.
62 ;
63 mov edx, [esp + 4h] ; pJmpBuf
64 mov [xDX + VMMR0JMPBUF.ebx], ebx
65 mov [xDX + VMMR0JMPBUF.esi], esi
66 mov [xDX + VMMR0JMPBUF.edi], edi
67 mov [xDX + VMMR0JMPBUF.ebp], ebp
68 mov xAX, [esp]
69 mov [xDX + VMMR0JMPBUF.eip], xAX
70 lea ecx, [esp + 4] ; (used in resume)
71 mov [xDX + VMMR0JMPBUF.esp], ecx
72 pushf
73 pop xAX
74 mov [xDX + VMMR0JMPBUF.eflags], xAX
75
76 ;
77 ; If we're not in a ring-3 call, call pfn and return.
78 ;
79 test byte [xDX + VMMR0JMPBUF.fInRing3Call], 1
80 jnz .resume
81
82 mov ebx, edx ; pJmpBuf -> ebx (persistent reg)
83%ifdef VMM_R0_SWITCH_STACK
84 mov esi, [ebx + VMMR0JMPBUF.pvSavedStack]
85 test esi, esi
86 jz .entry_error
87 %ifdef VBOX_STRICT
88 cmp dword [esi], 0h
89 jne .entry_error
90 mov edx, esi
91 mov edi, esi
92 mov ecx, VMM_STACK_SIZE / 4
93 mov eax, STACK_PADDING
94 repne stosd
95 %endif
96 lea esi, [esi + VMM_STACK_SIZE - 32]
97 mov [esi + 1ch], dword 0deadbeefh ; Marker 1.
98 mov [esi + 18h], ebx ; Save pJmpBuf pointer.
99 mov [esi + 14h], dword 00c00ffeeh ; Marker 2.
100 mov [esi + 10h], dword 0f00dbeefh ; Marker 3.
101 mov edx, [esp + 10h] ; pvArg2
102 mov ecx, [esp + 0ch] ; pvArg1
103 mov eax, [esp + 08h] ; pfn
104%if 1 ; Use this to eat of some extra stack - handy for finding paths using lots of stack.
105 %define FRAME_OFFSET 0
106%else
107 %define FRAME_OFFSET 1024
108%endif
109 mov [esi - FRAME_OFFSET + 04h], edx
110 mov [esi - FRAME_OFFSET ], ecx
111 lea esp, [esi - FRAME_OFFSET] ; Switch stack!
112 call eax
113 and dword [esi + 1ch], byte 0 ; reset marker.
114
115 %ifdef VBOX_STRICT
116 ; Calc stack usage and check for overflows.
117 mov edi, [ebx + VMMR0JMPBUF.pvSavedStack]
118 cmp dword [edi], STACK_PADDING ; Check for obvious stack overflow.
119 jne .stack_overflow
120 mov esi, eax ; save eax
121 mov eax, STACK_PADDING
122 mov ecx, VMM_STACK_SIZE / 4
123 cld
124 repe scasd
125 shl ecx, 2 ; *4
126 cmp ecx, VMM_STACK_SIZE - 64 ; Less than 64 bytes left -> overflow as well.
127 mov eax, esi ; restore eax in case of overflow (esi remains used)
128 jae .stack_overflow_almost
129
130 ; Update stack usage statistics.
131 cmp ecx, [ebx + VMMR0JMPBUF.cbUsedMax] ; New max usage?
132 jle .no_used_max
133 mov [ebx + VMMR0JMPBUF.cbUsedMax], ecx
134.no_used_max:
135 ; To simplify the average stuff, just historize before we hit div errors.
136 inc dword [ebx + VMMR0JMPBUF.cUsedTotal]
137 test [ebx + VMMR0JMPBUF.cUsedTotal], dword 0c0000000h
138 jz .no_historize
139 mov dword [ebx + VMMR0JMPBUF.cUsedTotal], 2
140 mov edi, [ebx + VMMR0JMPBUF.cbUsedAvg]
141 mov [ebx + VMMR0JMPBUF.cbUsedTotal], edi
142 mov dword [ebx + VMMR0JMPBUF.cbUsedTotal + 4], 0
143.no_historize:
144 add [ebx + VMMR0JMPBUF.cbUsedTotal], ecx
145 adc dword [ebx + VMMR0JMPBUF.cbUsedTotal + 4], 0
146 mov eax, [ebx + VMMR0JMPBUF.cbUsedTotal]
147 mov edx, [ebx + VMMR0JMPBUF.cbUsedTotal + 4]
148 mov edi, [ebx + VMMR0JMPBUF.cUsedTotal]
149 div edi
150 mov [ebx + VMMR0JMPBUF.cbUsedAvg], eax
151
152 mov eax, esi ; restore eax (final, esi released)
153
154 mov edi, [ebx + VMMR0JMPBUF.pvSavedStack]
155 mov dword [edi], 0h ; Reset the overflow marker.
156 %endif ; VBOX_STRICT
157
158%else ; !VMM_R0_SWITCH_STACK
159 mov ecx, [esp + 0ch] ; pvArg1
160 mov edx, [esp + 10h] ; pvArg2
161 mov eax, [esp + 08h] ; pfn
162 sub esp, 12 ; align the stack on a 16-byte boundrary.
163 mov [esp ], ecx
164 mov [esp + 04h], edx
165 call eax
166%endif ; !VMM_R0_SWITCH_STACK
167 mov edx, ebx ; pJmpBuf -> edx (volatile reg)
168
169 ;
170 ; Return like in the long jump but clear eip, no short cuts here.
171 ;
172.proper_return:
173 mov ebx, [xDX + VMMR0JMPBUF.ebx]
174 mov esi, [xDX + VMMR0JMPBUF.esi]
175 mov edi, [xDX + VMMR0JMPBUF.edi]
176 mov ebp, [xDX + VMMR0JMPBUF.ebp]
177 mov xCX, [xDX + VMMR0JMPBUF.eip]
178 and dword [xDX + VMMR0JMPBUF.eip], byte 0 ; used for valid check.
179 mov esp, [xDX + VMMR0JMPBUF.esp]
180 push dword [xDX + VMMR0JMPBUF.eflags]
181 popf
182 jmp xCX
183
184.entry_error:
185 mov eax, VERR_INTERNAL_ERROR_2
186 jmp .proper_return
187
188.stack_overflow:
189 mov eax, VERR_INTERNAL_ERROR_5
190 mov edx, ebx
191 jmp .proper_return
192
193.stack_overflow_almost:
194 mov eax, VERR_INTERNAL_ERROR
195 mov edx, ebx
196 jmp .proper_return
197
198 ;
199 ; Aborting resume.
200 ;
201.bad:
202 and dword [xDX + VMMR0JMPBUF.eip], byte 0 ; used for valid check.
203 mov edi, [xDX + VMMR0JMPBUF.edi]
204 mov esi, [xDX + VMMR0JMPBUF.esi]
205 mov ebx, [xDX + VMMR0JMPBUF.ebx]
206 mov eax, VERR_INTERNAL_ERROR_3 ; todo better return code!
207 ret
208
209 ;
210 ; Resume VMMRZCallRing3 the call.
211 ;
212.resume:
213 ; Sanity checks.
214%ifdef VMM_R0_SWITCH_STACK
215 mov eax, [xDX + VMMR0JMPBUF.pvSavedStack]
216 %ifdef RT_STRICT
217 cmp dword [eax], STACK_PADDING
218 %endif
219 lea eax, [eax + VMM_STACK_SIZE - 32]
220 cmp dword [eax + 1ch], 0deadbeefh ; Marker 1.
221 jne .bad
222 %ifdef RT_STRICT
223 cmp [esi + 18h], edx ; The saved pJmpBuf pointer.
224 jne .bad
225 cmp dword [esi + 14h], 00c00ffeeh ; Marker 2.
226 jne .bad
227 cmp dword [esi + 10h], 0f00dbeefh ; Marker 3.
228 jne .bad
229 %endif
230%else ; !VMM_R0_SWITCH_STACK
231 cmp ecx, [xDX + VMMR0JMPBUF.SpCheck]
232 jne .bad
233.espCheck_ok:
234 mov ecx, [xDX + VMMR0JMPBUF.cbSavedStack]
235 cmp ecx, VMM_STACK_SIZE
236 ja .bad
237 test ecx, 3
238 jnz .bad
239 mov edi, [xDX + VMMR0JMPBUF.esp]
240 sub edi, [xDX + VMMR0JMPBUF.SpResume]
241 cmp ecx, edi
242 jne .bad
243%endif
244
245%ifdef VMM_R0_SWITCH_STACK
246 ; Switch stack.
247 mov esp, [xDX + VMMR0JMPBUF.SpResume]
248%else
249 ; Restore the stack.
250 mov ecx, [xDX + VMMR0JMPBUF.cbSavedStack]
251 shr ecx, 2
252 mov esi, [xDX + VMMR0JMPBUF.pvSavedStack]
253 mov edi, [xDX + VMMR0JMPBUF.SpResume]
254 mov esp, edi
255 rep movsd
256%endif ; !VMM_R0_SWITCH_STACK
257 mov byte [xDX + VMMR0JMPBUF.fInRing3Call], 0
258
259 ;
260 ; Continue where we left off.
261 ;
262%ifdef VBOX_STRICT
263 pop eax ; magic
264 cmp eax, RESUME_MAGIC
265 je .magic_ok
266 mov ecx, 0123h
267 mov [ecx], edx
268.magic_ok:
269%endif
270 popf
271 pop ebx
272 pop esi
273 pop edi
274 pop ebp
275 xor eax, eax ; VINF_SUCCESS
276 ret
277ENDPROC vmmR0CallRing3SetJmp
278
279
280;;
281; Worker for VMMRZCallRing3.
282; This will save the stack and registers.
283;
284; @param pJmpBuf msc:rcx gcc:rdi x86:[ebp+8] Pointer to the jump buffer.
285; @param rc msc:rdx gcc:rsi x86:[ebp+c] The return code.
286;
287BEGINPROC vmmR0CallRing3LongJmp
288 ;
289 ; Save the registers on the stack.
290 ;
291 push ebp
292 mov ebp, esp
293 push edi
294 push esi
295 push ebx
296 pushf
297%ifdef VBOX_STRICT
298 push RESUME_MAGIC
299%endif
300
301 ;
302 ; Load parameters.
303 ;
304 mov edx, [ebp + 08h] ; pJmpBuf
305 mov eax, [ebp + 0ch] ; rc
306
307 ;
308 ; Is the jump buffer armed?
309 ;
310 cmp dword [xDX + VMMR0JMPBUF.eip], byte 0
311 je .nok
312
313 ;
314 ; Sanity checks.
315 ;
316 mov edi, [xDX + VMMR0JMPBUF.pvSavedStack]
317 test edi, edi ; darwin may set this to 0.
318 jz .nok
319 mov [xDX + VMMR0JMPBUF.SpResume], esp
320%ifndef VMM_R0_SWITCH_STACK
321 mov esi, esp
322 mov ecx, [xDX + VMMR0JMPBUF.esp]
323 sub ecx, esi
324
325 ; two sanity checks on the size.
326 cmp ecx, VMM_STACK_SIZE ; check max size.
327 jnbe .nok
328
329 ;
330 ; Copy the stack.
331 ;
332 test ecx, 3 ; check alignment
333 jnz .nok
334 mov [xDX + VMMR0JMPBUF.cbSavedStack], ecx
335 shr ecx, 2
336 rep movsd
337%endif ; !VMM_R0_SWITCH_STACK
338
339 ; Save ESP & EBP to enable stack dumps
340 mov ecx, ebp
341 mov [xDX + VMMR0JMPBUF.SavedEbp], ecx
342 sub ecx, 4
343 mov [xDX + VMMR0JMPBUF.SavedEsp], ecx
344
345 ; store the last pieces of info.
346 mov ecx, [xDX + VMMR0JMPBUF.esp]
347 mov [xDX + VMMR0JMPBUF.SpCheck], ecx
348 mov byte [xDX + VMMR0JMPBUF.fInRing3Call], 1
349
350 ;
351 ; Do the long jump.
352 ;
353 mov ebx, [xDX + VMMR0JMPBUF.ebx]
354 mov esi, [xDX + VMMR0JMPBUF.esi]
355 mov edi, [xDX + VMMR0JMPBUF.edi]
356 mov ebp, [xDX + VMMR0JMPBUF.ebp]
357 mov ecx, [xDX + VMMR0JMPBUF.eip]
358 mov esp, [xDX + VMMR0JMPBUF.esp]
359 push dword [xDX + VMMR0JMPBUF.eflags]
360 popf
361 jmp ecx
362
363 ;
364 ; Failure
365 ;
366.nok:
367%ifdef VBOX_STRICT
368 pop eax ; magic
369 cmp eax, RESUME_MAGIC
370 je .magic_ok
371 mov ecx, 0123h
372 mov [ecx], edx
373.magic_ok:
374%endif
375 popf
376 pop ebx
377 pop esi
378 pop edi
379 mov eax, VERR_INTERNAL_ERROR_4
380 leave
381 ret
382ENDPROC vmmR0CallRing3LongJmp
383
384
385;;
386; Internal R0 logger worker: Logger wrapper.
387;
388; @cproto VMMR0DECL(void) vmmR0LoggerWrapper(const char *pszFormat, ...)
389;
390EXPORTEDNAME vmmR0LoggerWrapper
391 push 0 ; assumes we're the wrapper for a default instance.
392 call NAME(RTLogLogger)
393 add esp, byte 4
394 ret
395ENDPROC vmmR0LoggerWrapper
396
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette