VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMR0/VMMR0A.asm@ 20534

Last change on this file since 20534 was 20534, checked in by vboxsync, 15 years ago

VMMR0A.asm: More sanity checks.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id
File size: 16.7 KB
Line 
1; $Id: VMMR0A.asm 20534 2009-06-13 20:58:04Z vboxsync $
2;; @file
3; VMM - R0 assembly routines.
4;
5
6;
7; Copyright (C) 2006-2007 Sun Microsystems, Inc.
8;
9; This file is part of VirtualBox Open Source Edition (OSE), as
10; available from http://www.virtualbox.org. This file is free software;
11; you can redistribute it and/or modify it under the terms of the GNU
12; General Public License (GPL) as published by the Free Software
13; Foundation, in version 2 as it comes in the "COPYING" file of the
14; VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15; hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16;
17; Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa
18; Clara, CA 95054 USA or visit http://www.sun.com if you need
19; additional information or have any questions.
20;
21
22;*******************************************************************************
23;* Header Files *
24;*******************************************************************************
25%include "VBox/asmdefs.mac"
26%include "VMMInternal.mac"
27%include "iprt/err.mac"
28
29
30%ifdef RT_ARCH_X86 ; The other architecture(s) use(s) C99 variadict macros.
31extern NAME(RTLogLogger)
32%endif
33
34%ifdef RT_OS_DARWIN
35 %define VMM_R0_SWITCH_STACK
36%endif
37
38
39BEGINCODE
40
41
42;;
43; The setjmp variant used for calling Ring-3.
44;
45; This differs from the normal setjmp in that it will resume VMMR0CallHost if we're
46; in the middle of a ring-3 call. Another differences is the function pointer and
47; argument. This has to do with resuming code and the stack frame of the caller.
48;
49; @returns VINF_SUCCESS on success or whatever is passed to vmmR0CallHostLongJmp.
50; @param pJmpBuf msc:rcx gcc:rdi x86:[esp+0x04] Our jmp_buf.
51; @param pfn msc:rdx gcc:rsi x86:[esp+0x08] The function to be called when not resuming.
52; @param pvUser1 msc:r8 gcc:rdx x86:[esp+0x0c] The argument of that function.
53; @param pvUser2 msc:r9 gcc:rcx x86:[esp+0x10] The argument of that function.
54;
55BEGINPROC vmmR0CallHostSetJmp
56GLOBALNAME vmmR0CallHostSetJmpEx
57%ifdef RT_ARCH_X86
58 ;
59 ; Save the registers.
60 ;
61 mov edx, [esp + 4h] ; pJmpBuf
62 mov [edx + VMMR0JMPBUF.ebx], ebx
63 mov [edx + VMMR0JMPBUF.esi], esi
64 mov [edx + VMMR0JMPBUF.edi], edi
65 mov [edx + VMMR0JMPBUF.ebp], ebp
66 mov eax, [esp]
67 mov [edx + VMMR0JMPBUF.eip], eax
68 lea ecx, [esp + 4] ; (used in resume)
69 mov [edx + VMMR0JMPBUF.esp], ecx
70
71 ;
72 ; If we're not in a ring-3 call, call pfn and return.
73 ;
74 test byte [edx + VMMR0JMPBUF.fInRing3Call], 1
75 jnz .resume
76
77 mov ebx, edx ; pJmpBuf -> ebx (persistent reg)
78%ifdef VMM_R0_SWITCH_STACK
79 mov esi, [ebx + VMMR0JMPBUF.pvSavedStack]
80 test esi, esi
81 jz .entry_error
82 %ifdef VBOX_STRICT
83 cmp dword [esi], 0h
84 jne .entry_error
85 mov edx, esi
86 mov edi, esi
87 mov ecx, 2048
88 mov eax, 0eeeeeeeeh
89 repne stosd
90 %endif
91 lea esi, [esi + 8192 - 32]
92 mov [esi + 1ch], dword 0deadbeefh ; Marker 1.
93 mov [esi + 18h], ebx ; Save pJmpBuf pointer.
94 mov [esi + 14h], dword 00c00ffeeh ; Marker 2.
95 mov [esi + 10h], dword 0f00dbeefh ; Marker 3.
96 mov edx, [esp + 10h] ; pvArg2
97 mov [esi + 04h], edx
98 mov ecx, [esp + 0ch] ; pvArg1
99 mov [esi ], ecx
100 mov eax, [esp + 08h] ; pfn
101 mov esp, esi ; Switch stack!
102 call eax
103 and dword [esi + 1ch], byte 0 ; clear marker.
104
105 %ifdef VBOX_STRICT
106 mov esi, [ebx + VMMR0JMPBUF.pvSavedStack]
107 cmp [esi], 0eeeeeeeeh ; Check for stack overflow
108 jne .stack_overflow
109 cmp [esi + 04h], 0eeeeeeeeh
110 jne .stack_overflow
111 cmp [esi + 08h], 0eeeeeeeeh
112 jne .stack_overflow
113 cmp [esi + 0ch], 0eeeeeeeeh
114 jne .stack_overflow
115 cmp [esi + 10h], 0eeeeeeeeh
116 jne .stack_overflow
117 cmp [esi + 20h], 0eeeeeeeeh
118 jne .stack_overflow
119 cmp [esi + 30h], 0eeeeeeeeh
120 jne .stack_overflow
121 mov dword [esi], 0h ; Reset the marker
122 %endif
123
124%else ; !VMM_R0_SWITCH_STACK
125 mov ecx, [esp + 0ch] ; pvArg1
126 mov edx, [esp + 10h] ; pvArg2
127 mov eax, [esp + 08h] ; pfn
128 sub esp, 12 ; align the stack on a 16-byte boundrary.
129 mov [esp ], ecx
130 mov [esp + 04h], edx
131 call eax
132%endif ; !VMM_R0_SWITCH_STACK
133 mov edx, ebx ; pJmpBuf -> edx (volatile reg)
134
135 ;
136 ; Return like in the long jump but clear eip, no short cuts here.
137 ;
138.proper_return:
139 mov ebx, [edx + VMMR0JMPBUF.ebx]
140 mov esi, [edx + VMMR0JMPBUF.esi]
141 mov edi, [edx + VMMR0JMPBUF.edi]
142 mov ebp, [edx + VMMR0JMPBUF.ebp]
143 mov ecx, [edx + VMMR0JMPBUF.eip]
144 and dword [edx + VMMR0JMPBUF.eip], byte 0 ; used for valid check.
145 mov esp, [edx + VMMR0JMPBUF.esp]
146 jmp ecx
147
148.entry_error:
149 mov eax, VERR_INTERNAL_ERROR_2
150 jmp .proper_return
151
152.stack_overflow:
153 mov eax, VERR_INTERNAL_ERROR_5
154 jmp .proper_return
155
156 ;
157 ; Aborting resume.
158 ;
159.bad:
160 and dword [edx + VMMR0JMPBUF.eip], byte 0 ; used for valid check.
161 mov edi, [edx + VMMR0JMPBUF.edi]
162 mov esi, [edx + VMMR0JMPBUF.esi]
163 mov ebx, [edx + VMMR0JMPBUF.ebx]
164 mov eax, VERR_INTERNAL_ERROR_3 ; todo better return code!
165 ret
166
167 ;
168 ; Resume VMMR0CallHost the call.
169 ;
170.resume:
171 ; Sanity checks.
172%ifdef VMM_R0_SWITCH_STACK
173 mov eax, [edx + VMMR0JMPBUF.pvSavedStack]
174 %ifdef RT_STRICT
175 cmp dword [eax], 0eeeeeeeeh
176 %endif
177 lea eax, [eax + 8192 - 32]
178 cmp dword [eax + 1ch], 0deadbeefh ; Marker 1.
179 jne .bad
180 %ifdef RT_STRICT
181 cmp [esi + 18h], edx ; The saved pJmpBuf pointer.
182 jne .bad
183 cmp dword [esi + 14h], 00c00ffeeh ; Marker 2.
184 jne .bad
185 cmp dword [esi + 10h], 0f00dbeefh ; Marker 3.
186 jne .bad
187 %endif
188%else ; !VMM_R0_SWITCH_STACK
189 cmp ecx, [edx + VMMR0JMPBUF.SpCheck]
190 jne .bad
191.espCheck_ok:
192 mov ecx, [edx + VMMR0JMPBUF.cbSavedStack]
193 cmp ecx, 8192
194 ja .bad
195 test ecx, 3
196 jnz .bad
197 mov edi, [edx + VMMR0JMPBUF.esp]
198 sub edi, [edx + VMMR0JMPBUF.SpResume]
199 cmp ecx, edi
200 jne .bad
201%endif
202
203%ifdef VMM_R0_SWITCH_STACK
204 ; Switch stack.
205 mov esp, [edx + VMMR0JMPBUF.SpResume]
206%else
207 ; Restore the stack.
208 mov ecx, [edx + VMMR0JMPBUF.cbSavedStack]
209 shr ecx, 2
210 mov esi, [edx + VMMR0JMPBUF.pvSavedStack]
211 mov edi, [edx + VMMR0JMPBUF.SpResume]
212 mov esp, edi
213 rep movsd
214%endif ; !VMM_R0_SWITCH_STACK
215 mov byte [edx + VMMR0JMPBUF.fInRing3Call], 0
216
217 ; Continue where we left off.
218%ifdef VBOX_STRICT
219 pop eax ; magic
220 cmp eax, 0f00dbed0h
221 je .magic_ok
222 mov ecx, 0123h
223 mov [ecx], edx
224.magic_ok:
225%endif
226 popf
227 pop ebx
228 pop esi
229 pop edi
230 pop ebp
231 xor eax, eax ; VINF_SUCCESS
232 ret
233%endif ; RT_ARCH_X86
234
235%ifdef RT_ARCH_AMD64
236 ;
237 ; Save the registers.
238 ;
239 push rbp
240 mov rbp, rsp
241 %ifdef ASM_CALL64_MSC
242 sub rsp, 30h
243 mov r11, rdx ; pfn
244 mov rdx, rcx ; pJmpBuf;
245 %else
246 sub rsp, 10h
247 mov r8, rdx ; pvUser1 (save it like MSC)
248 mov r9, rcx ; pvUser2 (save it like MSC)
249 mov r11, rsi ; pfn
250 mov rdx, rdi ; pJmpBuf
251 %endif
252 mov [rdx + VMMR0JMPBUF.rbx], rbx
253 %ifdef ASM_CALL64_MSC
254 mov [rdx + VMMR0JMPBUF.rsi], rsi
255 mov [rdx + VMMR0JMPBUF.rdi], rdi
256 %endif
257 mov r10, [rbp]
258 mov [rdx + VMMR0JMPBUF.rbp], r10
259 mov [rdx + VMMR0JMPBUF.r12], r12
260 mov [rdx + VMMR0JMPBUF.r13], r13
261 mov [rdx + VMMR0JMPBUF.r14], r14
262 mov [rdx + VMMR0JMPBUF.r15], r15
263 mov rax, [rbp + 8]
264 mov [rdx + VMMR0JMPBUF.rip], rax
265 lea r10, [rbp + 10h] ; (used in resume)
266 mov [rdx + VMMR0JMPBUF.rsp], r10
267
268 ;
269 ; If we're not in a ring-3 call, call pfn and return.
270 ;
271 test byte [rdx + VMMR0JMPBUF.fInRing3Call], 1
272 jnz .resume
273
274 %ifdef VMM_R0_SWITCH_STACK
275 mov r15, [rdx + VMMR0JMPBUF.pvSavedStack]
276 test r15, r15
277 jz .entry_error
278 %ifdef VBOX_STRICT
279 cmp dword [r15], 0h
280 jne .entry_error
281 mov rdi, r15
282 mov rcx, 1024
283 mov rax, 00eeeeeeeffeeeeeeeh
284 repne stosq
285 mov [rdi - 10h], rbx
286 %endif
287 lea r15, [r15 + 8192 - 40h]
288 mov rsp, r15 ; Switch stack!
289 %endif ; VMM_R0_SWITCH_STACK
290
291 mov r12, rdx ; Save pJmpBuf.
292 %ifdef ASM_CALL64_MSC
293 mov rcx, r8 ; pvUser -> arg0
294 mov rdx, r9
295 %else
296 mov rdi, r8 ; pvUser -> arg0
297 mov rsi, r9
298 %endif
299 call r11
300 mov rdx, r12 ; Restore pJmpBuf
301
302 %ifdef VMM_R0_SWITCH_STACK
303 %ifdef VBOX_STRICT
304 mov r15, [rdx + VMMR0JMPBUF.pvSavedStack]
305 mov dword [r15], 0h ; Reset the marker
306 %endif
307 %endif
308
309 ;
310 ; Return like in the long jump but clear eip, no short cuts here.
311 ;
312.proper_return:
313 mov rbx, [rdx + VMMR0JMPBUF.rbx]
314 %ifdef ASM_CALL64_MSC
315 mov rsi, [rdx + VMMR0JMPBUF.rsi]
316 mov rdi, [rdx + VMMR0JMPBUF.rdi]
317 %endif
318 mov r12, [rdx + VMMR0JMPBUF.r12]
319 mov r13, [rdx + VMMR0JMPBUF.r13]
320 mov r14, [rdx + VMMR0JMPBUF.r14]
321 mov r15, [rdx + VMMR0JMPBUF.r15]
322 mov rbp, [rdx + VMMR0JMPBUF.rbp]
323 mov rcx, [rdx + VMMR0JMPBUF.rip]
324 and qword [rdx + VMMR0JMPBUF.rip], byte 0 ; used for valid check.
325 mov rsp, [rdx + VMMR0JMPBUF.rsp]
326 jmp rcx
327
328.entry_error:
329 mov eax, VERR_INTERNAL_ERROR_2
330 jmp .proper_return
331
332 ;
333 ; Resume VMMR0CallHost the call.
334 ;
335.resume:
336 %ifdef VMM_R0_SWITCH_STACK
337 ; Switch stack.
338 mov rsp, [rdx + VMMR0JMPBUF.SpResume]
339 %else ; !VMM_R0_SWITCH_STACK
340 ; Sanity checks.
341 cmp r10, [rdx + VMMR0JMPBUF.SpCheck]
342 je .rspCheck_ok
343.bad:
344 and qword [rdx + VMMR0JMPBUF.rip], byte 0 ; used for valid check.
345 mov rbx, [rdx + VMMR0JMPBUF.rbx]
346 %ifdef ASM_CALL64_MSC
347 mov rsi, [rdx + VMMR0JMPBUF.rsi]
348 mov rdi, [rdx + VMMR0JMPBUF.rdi]
349 %endif
350 mov r12, [rdx + VMMR0JMPBUF.r12]
351 mov r13, [rdx + VMMR0JMPBUF.r13]
352 mov r14, [rdx + VMMR0JMPBUF.r14]
353 mov r15, [rdx + VMMR0JMPBUF.r15]
354 mov eax, VERR_INTERNAL_ERROR_2
355 leave
356 ret
357
358.rspCheck_ok:
359 mov ecx, [rdx + VMMR0JMPBUF.cbSavedStack]
360 cmp rcx, 8192
361 ja .bad
362 test rcx, 3
363 jnz .bad
364 mov rdi, [rdx + VMMR0JMPBUF.rsp]
365 sub rdi, [rdx + VMMR0JMPBUF.SpResume]
366 cmp rcx, rdi
367 jne .bad
368
369 ;
370 ; Restore the stack.
371 ;
372 mov ecx, [rdx + VMMR0JMPBUF.cbSavedStack]
373 shr ecx, 3
374 mov rsi, [rdx + VMMR0JMPBUF.pvSavedStack]
375 mov rdi, [rdx + VMMR0JMPBUF.SpResume]
376 mov rsp, rdi
377 rep movsq
378 %endif ; !VMM_R0_SWITCH_STACK
379 mov byte [rdx + VMMR0JMPBUF.fInRing3Call], 0
380
381 ;
382 ; Continue where we left off.
383 ;
384 popf
385 pop rbx
386 %ifdef ASM_CALL64_MSC
387 pop rsi
388 pop rdi
389 %endif
390 pop r12
391 pop r13
392 pop r14
393 pop r15
394 pop rbp
395 xor eax, eax ; VINF_SUCCESS
396 ret
397%endif
398ENDPROC vmmR0CallHostSetJmp
399
400
401;;
402; Worker for VMMR0CallHost.
403; This will save the stack and registers.
404;
405; @param pJmpBuf msc:rcx gcc:rdi x86:[ebp+8] Pointer to the jump buffer.
406; @param rc msc:rdx gcc:rsi x86:[ebp+c] The return code.
407;
408BEGINPROC vmmR0CallHostLongJmp
409%ifdef RT_ARCH_X86
410 ;
411 ; Save the registers on the stack.
412 ;
413 push ebp
414 mov ebp, esp
415 push edi
416 push esi
417 push ebx
418 pushf
419%ifdef VBOX_STRICT
420 push dword 0f00dbed0h
421%endif
422
423 ;
424 ; Load parameters.
425 ;
426 mov edx, [ebp + 08h] ; pJmpBuf
427 mov eax, [ebp + 0ch] ; rc
428
429 ;
430 ; Is the jump buffer armed?
431 ;
432 cmp dword [edx + VMMR0JMPBUF.eip], byte 0
433 je .nok
434
435 ;
436 ; Sanity checks.
437 ;
438 mov edi, [edx + VMMR0JMPBUF.pvSavedStack]
439 test edi, edi ; darwin may set this to 0.
440 jz .nok
441 mov [edx + VMMR0JMPBUF.SpResume], esp
442%ifndef VMM_R0_SWITCH_STACK
443 mov esi, esp
444 mov ecx, [edx + VMMR0JMPBUF.esp]
445 sub ecx, esi
446
447 ; two sanity checks on the size.
448 cmp ecx, 8192 ; check max size.
449 jnbe .nok
450
451 ;
452 ; Copy the stack.
453 ;
454 test ecx, 3 ; check alignment
455 jnz .nok
456 mov [edx + VMMR0JMPBUF.cbSavedStack], ecx
457 shr ecx, 2
458 rep movsd
459%endif ; !VMM_R0_SWITCH_STACK
460
461 ; Save ESP & EBP to enable stack dumps
462 mov ecx, ebp
463 mov [edx + VMMR0JMPBUF.SavedEbp], ecx
464 sub ecx, 4
465 mov [edx + VMMR0JMPBUF.SavedEsp], ecx
466
467 ; store the last pieces of info.
468 mov ecx, [edx + VMMR0JMPBUF.esp]
469 mov [edx + VMMR0JMPBUF.SpCheck], ecx
470 mov byte [edx + VMMR0JMPBUF.fInRing3Call], 1
471
472 ;
473 ; Do the long jump.
474 ;
475 mov ebx, [edx + VMMR0JMPBUF.ebx]
476 mov esi, [edx + VMMR0JMPBUF.esi]
477 mov edi, [edx + VMMR0JMPBUF.edi]
478 mov ebp, [edx + VMMR0JMPBUF.ebp]
479 mov ecx, [edx + VMMR0JMPBUF.eip]
480 mov esp, [edx + VMMR0JMPBUF.esp]
481 jmp ecx
482
483 ;
484 ; Failure
485 ;
486.nok:
487%ifdef VBOX_STRICT
488 pop eax ; magic
489 cmp eax, 0f00dbed0h
490 je .magic_ok
491 mov ecx, 0123h
492 mov [ecx], edx
493.magic_ok:
494%endif
495 popf
496 pop ebx
497 pop esi
498 pop edi
499 mov eax, VERR_INTERNAL_ERROR_4
500 leave
501 ret
502%endif ; RT_ARCH_X86
503
504%ifdef RT_ARCH_AMD64
505 ;
506 ; Save the registers on the stack.
507 ;
508 push rbp
509 mov rbp, rsp
510 push r15
511 push r14
512 push r13
513 push r12
514 %ifdef ASM_CALL64_MSC
515 push rdi
516 push rsi
517 %endif
518 push rbx
519 pushf
520
521 ;
522 ; Normalize the parameters.
523 ;
524 %ifdef ASM_CALL64_MSC
525 mov eax, edx ; rc
526 mov rdx, rcx ; pJmpBuf
527 %else
528 mov rdx, rdi ; pJmpBuf
529 mov eax, esi ; rc
530 %endif
531
532 ;
533 ; Is the jump buffer armed?
534 ;
535 cmp qword [rdx + VMMR0JMPBUF.rip], byte 0
536 je .nok
537
538 ;
539 ; Sanity checks.
540 ;
541 mov rdi, [rdx + VMMR0JMPBUF.pvSavedStack]
542 test rdi, rdi ; darwin may set this to 0.
543 jz .nok
544 mov [rdx + VMMR0JMPBUF.SpResume], rsp
545 %ifndef VMM_R0_SWITCH_STACK
546 mov rsi, rsp
547 mov rcx, [rdx + VMMR0JMPBUF.rsp]
548 sub rcx, rsi
549
550 ; two sanity checks on the size.
551 cmp rcx, 8192 ; check max size.
552 jnbe .nok
553
554 ;
555 ; Copy the stack
556 ;
557 test ecx, 7 ; check alignment
558 jnz .nok
559 mov [rdx + VMMR0JMPBUF.cbSavedStack], ecx
560 shr ecx, 3
561 rep movsq
562
563 %endif ; !VMM_R0_SWITCH_STACK
564
565 ; Save RSP & RBP to enable stack dumps
566 mov rcx, rbp
567 mov [rdx + VMMR0JMPBUF.SavedEbp], rcx
568 sub rcx, 8
569 mov [rdx + VMMR0JMPBUF.SavedEsp], rcx
570
571 ; store the last pieces of info.
572 mov rcx, [rdx + VMMR0JMPBUF.rsp]
573 mov [rdx + VMMR0JMPBUF.SpCheck], rcx
574 mov byte [rdx + VMMR0JMPBUF.fInRing3Call], 1
575
576 ;
577 ; Do the long jump.
578 ;
579 mov rbx, [rdx + VMMR0JMPBUF.rbx]
580 %ifdef ASM_CALL64_MSC
581 mov rsi, [rdx + VMMR0JMPBUF.rsi]
582 mov rdi, [rdx + VMMR0JMPBUF.rdi]
583 %endif
584 mov r12, [rdx + VMMR0JMPBUF.r12]
585 mov r13, [rdx + VMMR0JMPBUF.r13]
586 mov r14, [rdx + VMMR0JMPBUF.r14]
587 mov r15, [rdx + VMMR0JMPBUF.r15]
588 mov rbp, [rdx + VMMR0JMPBUF.rbp]
589 mov rcx, [rdx + VMMR0JMPBUF.rip]
590 mov rsp, [rdx + VMMR0JMPBUF.rsp]
591 jmp rcx
592
593 ;
594 ; Failure
595 ;
596.nok:
597 mov eax, VERR_INTERNAL_ERROR_4
598 popf
599 pop rbx
600 %ifdef ASM_CALL64_MSC
601 pop rsi
602 pop rdi
603 %endif
604 pop r12
605 pop r13
606 pop r14
607 pop r15
608 leave
609 ret
610
611%endif
612ENDPROC vmmR0CallHostLongJmp
613
614
615;;
616; Internal R0 logger worker: Logger wrapper.
617;
618; @cproto VMMR0DECL(void) vmmR0LoggerWrapper(const char *pszFormat, ...)
619;
620EXPORTEDNAME vmmR0LoggerWrapper
621%ifdef RT_ARCH_X86 ; The other architecture(s) use(s) C99 variadict macros.
622 push 0 ; assumes we're the wrapper for a default instance.
623 call NAME(RTLogLogger)
624 add esp, byte 4
625 ret
626%else
627 int3
628 int3
629 int3
630 ret
631%endif
632ENDPROC vmmR0LoggerWrapper
633
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette