VirtualBox

source: vbox/trunk/src/recompiler_new/tcg/x86_64/tcg-target.c@ 14772

Last change on this file since 14772 was 14542, checked in by vboxsync, 16 years ago

Export new recompiler to OSE

File size: 41.5 KB
Line 
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2008 Fabrice Bellard
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24/*
25 * Sun LGPL Disclaimer: For the avoidance of doubt, except that if any license choice
26 * other than GPL or LGPL is available it will apply instead, Sun elects to use only
27 * the Lesser General Public License version 2.1 (LGPLv2) at this time for any software where
28 * a choice of LGPL license versions is made available with the language indicating
29 * that LGPLv2 or any later version may be used, or where a choice of which version
30 * of the LGPL is applied is otherwise unspecified.
31 */
32#ifndef NDEBUG
33static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
34 "%rax",
35 "%rcx",
36 "%rdx",
37 "%rbx",
38 "%rsp",
39 "%rbp",
40 "%rsi",
41 "%rdi",
42 "%r8",
43 "%r9",
44 "%r10",
45 "%r11",
46 "%r12",
47 "%r13",
48 "%r14",
49 "%r15",
50};
51#endif
52
53static const int tcg_target_reg_alloc_order[] = {
54 TCG_REG_RDI,
55 TCG_REG_RSI,
56 TCG_REG_RDX,
57 TCG_REG_RCX,
58 TCG_REG_R8,
59 TCG_REG_R9,
60 TCG_REG_RAX,
61 TCG_REG_R10,
62 TCG_REG_R11,
63
64 TCG_REG_RBP,
65 TCG_REG_RBX,
66 TCG_REG_R12,
67 TCG_REG_R13,
68 TCG_REG_R14,
69 TCG_REG_R15,
70};
71
72static const int tcg_target_call_iarg_regs[6] = {
73 TCG_REG_RDI,
74 TCG_REG_RSI,
75 TCG_REG_RDX,
76 TCG_REG_RCX,
77 TCG_REG_R8,
78 TCG_REG_R9,
79};
80
81static const int tcg_target_call_oarg_regs[2] = {
82 TCG_REG_RAX,
83 TCG_REG_RDX
84};
85
86static uint8_t *tb_ret_addr;
87
88static void patch_reloc(uint8_t *code_ptr, int type,
89 tcg_target_long value, tcg_target_long addend)
90{
91 value += addend;
92 switch(type) {
93 case R_X86_64_32:
94 if (value != (uint32_t)value)
95 tcg_abort();
96 *(uint32_t *)code_ptr = value;
97 break;
98 case R_X86_64_32S:
99 if (value != (int32_t)value)
100 tcg_abort();
101 *(uint32_t *)code_ptr = value;
102 break;
103 case R_386_PC32:
104 value -= (long)code_ptr;
105 if (value != (int32_t)value)
106 tcg_abort();
107 *(uint32_t *)code_ptr = value;
108 break;
109 default:
110 tcg_abort();
111 }
112}
113
114/* maximum number of register used for input function arguments */
115static inline int tcg_target_get_call_iarg_regs_count(int flags)
116{
117 return 6;
118}
119
120/* parse target specific constraints */
121static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
122{
123 const char *ct_str;
124
125 ct_str = *pct_str;
126 switch(ct_str[0]) {
127 case 'a':
128 ct->ct |= TCG_CT_REG;
129 tcg_regset_set_reg(ct->u.regs, TCG_REG_RAX);
130 break;
131 case 'b':
132 ct->ct |= TCG_CT_REG;
133 tcg_regset_set_reg(ct->u.regs, TCG_REG_RBX);
134 break;
135 case 'c':
136 ct->ct |= TCG_CT_REG;
137 tcg_regset_set_reg(ct->u.regs, TCG_REG_RCX);
138 break;
139 case 'd':
140 ct->ct |= TCG_CT_REG;
141 tcg_regset_set_reg(ct->u.regs, TCG_REG_RDX);
142 break;
143 case 'S':
144 ct->ct |= TCG_CT_REG;
145 tcg_regset_set_reg(ct->u.regs, TCG_REG_RSI);
146 break;
147 case 'D':
148 ct->ct |= TCG_CT_REG;
149 tcg_regset_set_reg(ct->u.regs, TCG_REG_RDI);
150 break;
151 case 'q':
152 ct->ct |= TCG_CT_REG;
153 tcg_regset_set32(ct->u.regs, 0, 0xf);
154 break;
155 case 'r':
156 ct->ct |= TCG_CT_REG;
157 tcg_regset_set32(ct->u.regs, 0, 0xffff);
158 break;
159 case 'L': /* qemu_ld/st constraint */
160 ct->ct |= TCG_CT_REG;
161 tcg_regset_set32(ct->u.regs, 0, 0xffff);
162 tcg_regset_reset_reg(ct->u.regs, TCG_REG_RSI);
163 tcg_regset_reset_reg(ct->u.regs, TCG_REG_RDI);
164 break;
165 case 'e':
166 ct->ct |= TCG_CT_CONST_S32;
167 break;
168 case 'Z':
169 ct->ct |= TCG_CT_CONST_U32;
170 break;
171 default:
172 return -1;
173 }
174 ct_str++;
175 *pct_str = ct_str;
176 return 0;
177}
178
179/* test if a constant matches the constraint */
180static inline int tcg_target_const_match(tcg_target_long val,
181 const TCGArgConstraint *arg_ct)
182{
183 int ct;
184 ct = arg_ct->ct;
185 if (ct & TCG_CT_CONST)
186 return 1;
187 else if ((ct & TCG_CT_CONST_S32) && val == (int32_t)val)
188 return 1;
189 else if ((ct & TCG_CT_CONST_U32) && val == (uint32_t)val)
190 return 1;
191 else
192 return 0;
193}
194
195#define ARITH_ADD 0
196#define ARITH_OR 1
197#define ARITH_ADC 2
198#define ARITH_SBB 3
199#define ARITH_AND 4
200#define ARITH_SUB 5
201#define ARITH_XOR 6
202#define ARITH_CMP 7
203
204#define SHIFT_SHL 4
205#define SHIFT_SHR 5
206#define SHIFT_SAR 7
207
208#define JCC_JMP (-1)
209#define JCC_JO 0x0
210#define JCC_JNO 0x1
211#define JCC_JB 0x2
212#define JCC_JAE 0x3
213#define JCC_JE 0x4
214#define JCC_JNE 0x5
215#define JCC_JBE 0x6
216#define JCC_JA 0x7
217#define JCC_JS 0x8
218#define JCC_JNS 0x9
219#define JCC_JP 0xa
220#define JCC_JNP 0xb
221#define JCC_JL 0xc
222#define JCC_JGE 0xd
223#define JCC_JLE 0xe
224#define JCC_JG 0xf
225
226#define P_EXT 0x100 /* 0x0f opcode prefix */
227#define P_REXW 0x200 /* set rex.w = 1 */
228#define P_REXB 0x400 /* force rex use for byte registers */
229
230static const uint8_t tcg_cond_to_jcc[10] = {
231 [TCG_COND_EQ] = JCC_JE,
232 [TCG_COND_NE] = JCC_JNE,
233 [TCG_COND_LT] = JCC_JL,
234 [TCG_COND_GE] = JCC_JGE,
235 [TCG_COND_LE] = JCC_JLE,
236 [TCG_COND_GT] = JCC_JG,
237 [TCG_COND_LTU] = JCC_JB,
238 [TCG_COND_GEU] = JCC_JAE,
239 [TCG_COND_LEU] = JCC_JBE,
240 [TCG_COND_GTU] = JCC_JA,
241};
242
243static inline void tcg_out_opc(TCGContext *s, int opc, int r, int rm, int x)
244{
245 int rex;
246 rex = ((opc >> 6) & 0x8) | ((r >> 1) & 0x4) |
247 ((x >> 2) & 2) | ((rm >> 3) & 1);
248 if (rex || (opc & P_REXB)) {
249 tcg_out8(s, rex | 0x40);
250 }
251 if (opc & P_EXT)
252 tcg_out8(s, 0x0f);
253 tcg_out8(s, opc);
254}
255
256static inline void tcg_out_modrm(TCGContext *s, int opc, int r, int rm)
257{
258 tcg_out_opc(s, opc, r, rm, 0);
259 tcg_out8(s, 0xc0 | ((r & 7) << 3) | (rm & 7));
260}
261
262static inline void tcg_out_push(TCGContext *s, int reg)
263{
264 tcg_out_opc(s, (0x50 + (reg & 7)), 0, reg, 0);
265}
266
267static inline void tcg_out_pop(TCGContext *s, int reg)
268{
269 tcg_out_opc(s, (0x58 + (reg & 7)), 0, reg, 0);
270}
271
272
273/* rm < 0 means no register index plus (-rm - 1 immediate bytes) */
274static inline void tcg_out_modrm_offset(TCGContext *s, int opc, int r, int rm,
275 tcg_target_long offset)
276{
277 if (rm < 0) {
278 tcg_target_long val;
279 tcg_out_opc(s, opc, r, 0, 0);
280 val = offset - ((tcg_target_long)s->code_ptr + 5 + (-rm - 1));
281 if (val == (int32_t)val) {
282 /* eip relative */
283 tcg_out8(s, 0x05 | ((r & 7) << 3));
284 tcg_out32(s, val);
285 } else if (offset == (int32_t)offset) {
286 tcg_out8(s, 0x04 | ((r & 7) << 3));
287 tcg_out8(s, 0x25); /* sib */
288 tcg_out32(s, offset);
289 } else {
290 tcg_abort();
291 }
292 } else if (offset == 0 && (rm & 7) != TCG_REG_RBP) {
293 tcg_out_opc(s, opc, r, rm, 0);
294 if ((rm & 7) == TCG_REG_RSP) {
295 tcg_out8(s, 0x04 | ((r & 7) << 3));
296 tcg_out8(s, 0x24);
297 } else {
298 tcg_out8(s, 0x00 | ((r & 7) << 3) | (rm & 7));
299 }
300 } else if ((int8_t)offset == offset) {
301 tcg_out_opc(s, opc, r, rm, 0);
302 if ((rm & 7) == TCG_REG_RSP) {
303 tcg_out8(s, 0x44 | ((r & 7) << 3));
304 tcg_out8(s, 0x24);
305 } else {
306 tcg_out8(s, 0x40 | ((r & 7) << 3) | (rm & 7));
307 }
308 tcg_out8(s, offset);
309 } else {
310 tcg_out_opc(s, opc, r, rm, 0);
311 if ((rm & 7) == TCG_REG_RSP) {
312 tcg_out8(s, 0x84 | ((r & 7) << 3));
313 tcg_out8(s, 0x24);
314 } else {
315 tcg_out8(s, 0x80 | ((r & 7) << 3) | (rm & 7));
316 }
317 tcg_out32(s, offset);
318 }
319}
320
321#if defined(CONFIG_SOFTMMU)
322/* XXX: incomplete. index must be different from ESP */
323static void tcg_out_modrm_offset2(TCGContext *s, int opc, int r, int rm,
324 int index, int shift,
325 tcg_target_long offset)
326{
327 int mod;
328 if (rm == -1)
329 tcg_abort();
330 if (offset == 0 && (rm & 7) != TCG_REG_RBP) {
331 mod = 0;
332 } else if (offset == (int8_t)offset) {
333 mod = 0x40;
334 } else if (offset == (int32_t)offset) {
335 mod = 0x80;
336 } else {
337 tcg_abort();
338 }
339 if (index == -1) {
340 tcg_out_opc(s, opc, r, rm, 0);
341 if ((rm & 7) == TCG_REG_RSP) {
342 tcg_out8(s, mod | ((r & 7) << 3) | 0x04);
343 tcg_out8(s, 0x04 | (rm & 7));
344 } else {
345 tcg_out8(s, mod | ((r & 7) << 3) | (rm & 7));
346 }
347 } else {
348 tcg_out_opc(s, opc, r, rm, index);
349 tcg_out8(s, mod | ((r & 7) << 3) | 0x04);
350 tcg_out8(s, (shift << 6) | ((index & 7) << 3) | (rm & 7));
351 }
352 if (mod == 0x40) {
353 tcg_out8(s, offset);
354 } else if (mod == 0x80) {
355 tcg_out32(s, offset);
356 }
357}
358#endif
359
360static inline void tcg_out_mov(TCGContext *s, int ret, int arg)
361{
362 tcg_out_modrm(s, 0x8b | P_REXW, ret, arg);
363}
364
365static inline void tcg_out_movi(TCGContext *s, TCGType type,
366 int ret, tcg_target_long arg)
367{
368 if (arg == 0) {
369 tcg_out_modrm(s, 0x01 | (ARITH_XOR << 3), ret, ret); /* xor r0,r0 */
370 } else if (arg == (uint32_t)arg || type == TCG_TYPE_I32) {
371 tcg_out_opc(s, 0xb8 + (ret & 7), 0, ret, 0);
372 tcg_out32(s, arg);
373 } else if (arg == (int32_t)arg) {
374 tcg_out_modrm(s, 0xc7 | P_REXW, 0, ret);
375 tcg_out32(s, arg);
376 } else {
377 tcg_out_opc(s, (0xb8 + (ret & 7)) | P_REXW, 0, ret, 0);
378 tcg_out32(s, arg);
379 tcg_out32(s, arg >> 32);
380 }
381}
382
383static inline void tcg_out_ld(TCGContext *s, TCGType type, int ret,
384 int arg1, tcg_target_long arg2)
385{
386 if (type == TCG_TYPE_I32)
387 tcg_out_modrm_offset(s, 0x8b, ret, arg1, arg2); /* movl */
388 else
389 tcg_out_modrm_offset(s, 0x8b | P_REXW, ret, arg1, arg2); /* movq */
390}
391
392static inline void tcg_out_st(TCGContext *s, TCGType type, int arg,
393 int arg1, tcg_target_long arg2)
394{
395 if (type == TCG_TYPE_I32)
396 tcg_out_modrm_offset(s, 0x89, arg, arg1, arg2); /* movl */
397 else
398 tcg_out_modrm_offset(s, 0x89 | P_REXW, arg, arg1, arg2); /* movq */
399}
400
401static inline void tgen_arithi32(TCGContext *s, int c, int r0, int32_t val)
402{
403 if (val == (int8_t)val) {
404 tcg_out_modrm(s, 0x83, c, r0);
405 tcg_out8(s, val);
406 } else if (c == ARITH_AND && val == 0xffu) {
407 /* movzbl */
408 tcg_out_modrm(s, 0xb6 | P_EXT | P_REXB, r0, r0);
409 } else if (c == ARITH_AND && val == 0xffffu) {
410 /* movzwl */
411 tcg_out_modrm(s, 0xb7 | P_EXT, r0, r0);
412 } else {
413 tcg_out_modrm(s, 0x81, c, r0);
414 tcg_out32(s, val);
415 }
416}
417
418static inline void tgen_arithi64(TCGContext *s, int c, int r0, int64_t val)
419{
420 if (val == (int8_t)val) {
421 tcg_out_modrm(s, 0x83 | P_REXW, c, r0);
422 tcg_out8(s, val);
423 } else if (c == ARITH_AND && val == 0xffu) {
424 /* movzbl */
425 tcg_out_modrm(s, 0xb6 | P_EXT | P_REXW, r0, r0);
426 } else if (c == ARITH_AND && val == 0xffffu) {
427 /* movzwl */
428 tcg_out_modrm(s, 0xb7 | P_EXT | P_REXW, r0, r0);
429 } else if (c == ARITH_AND && val == 0xffffffffu) {
430 /* 32-bit mov zero extends */
431 tcg_out_modrm(s, 0x8b, r0, r0);
432 } else if (val == (int32_t)val) {
433 tcg_out_modrm(s, 0x81 | P_REXW, c, r0);
434 tcg_out32(s, val);
435 } else if (c == ARITH_AND && val == (uint32_t)val) {
436 tcg_out_modrm(s, 0x81, c, r0);
437 tcg_out32(s, val);
438 } else {
439 tcg_abort();
440 }
441}
442
443static void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
444{
445 if (val != 0)
446 tgen_arithi64(s, ARITH_ADD, reg, val);
447}
448
449static void tcg_out_jxx(TCGContext *s, int opc, int label_index)
450{
451 int32_t val, val1;
452 TCGLabel *l = &s->labels[label_index];
453
454 if (l->has_value) {
455 val = l->u.value - (tcg_target_long)s->code_ptr;
456 val1 = val - 2;
457 if ((int8_t)val1 == val1) {
458 if (opc == -1)
459 tcg_out8(s, 0xeb);
460 else
461 tcg_out8(s, 0x70 + opc);
462 tcg_out8(s, val1);
463 } else {
464 if (opc == -1) {
465 tcg_out8(s, 0xe9);
466 tcg_out32(s, val - 5);
467 } else {
468 tcg_out8(s, 0x0f);
469 tcg_out8(s, 0x80 + opc);
470 tcg_out32(s, val - 6);
471 }
472 }
473 } else {
474 if (opc == -1) {
475 tcg_out8(s, 0xe9);
476 } else {
477 tcg_out8(s, 0x0f);
478 tcg_out8(s, 0x80 + opc);
479 }
480 tcg_out_reloc(s, s->code_ptr, R_386_PC32, label_index, -4);
481 s->code_ptr += 4;
482 }
483}
484
485static void tcg_out_brcond(TCGContext *s, int cond,
486 TCGArg arg1, TCGArg arg2, int const_arg2,
487 int label_index, int rexw)
488{
489 if (const_arg2) {
490 if (arg2 == 0) {
491 /* test r, r */
492 tcg_out_modrm(s, 0x85 | rexw, arg1, arg1);
493 } else {
494 if (rexw)
495 tgen_arithi64(s, ARITH_CMP, arg1, arg2);
496 else
497 tgen_arithi32(s, ARITH_CMP, arg1, arg2);
498 }
499 } else {
500 tcg_out_modrm(s, 0x01 | (ARITH_CMP << 3) | rexw, arg2, arg1);
501 }
502 tcg_out_jxx(s, tcg_cond_to_jcc[cond], label_index);
503}
504
505#ifdef VBOX
506DECLINLINE(void) tcg_out_pushq(TCGContext *s, tcg_target_long val)
507{
508 tcg_out8(s, 0x68); /* push imm32, subs 8 from rsp */
509 tcg_out32(s, val); /* imm32 */
510 if ((val >> 32) != 0)
511 {
512 tcg_out8(s, 0xc7); /* mov imm32, 4(%rsp) */
513 tcg_out8(s, 0x44);
514 tcg_out8(s, 0x24);
515 tcg_out8(s, 0x04);
516 tcg_out32(s, ((uint64_t)val) >> 32); /* imm32 */
517 }
518}
519
520DECLINLINE(void) tcg_out_long_call(TCGContext *s, tcg_target_long dst)
521{
522 intptr_t disp = dst - (tcg_target_long)s->code_ptr - 5;
523 /* can do normal call */
524 if (disp < 2LL * _1G && disp > -2LL * _1G)
525 {
526 tcg_out8(s, 0xe8); /* call disp32 */
527 tcg_out32(s, disp); /* disp32 */
528 }
529 else
530 {
531#if 0
532 /* Somewhat tricky, but allows long jump not touching registers */
533 int off = 5 /* push imm32 */ + 5 /* push imm32 */ + 1 /* ret */;
534 if ((((uint64_t)s->code_ptr) + 32) >> 32)
535 off += 8;
536 if (dst >> 32)
537 off += 8;
538 /* return address */
539 tcg_out_pushq(s, (tcg_target_long)s->code_ptr+off);
540 /* destination */
541 tcg_out_pushq(s, dst);
542 tcg_out8(s, 0xc3); /* ret, used as call */
543#else
544 tcg_out_movi(s, TCG_TYPE_I64, TCG_REG_RAX, dst);
545 tcg_out8(s, 0xff); /* call *%eax */
546 tcg_out8(s, 0xd0);
547#endif
548 }
549}
550
551DECLINLINE(void) tcg_out_long_jmp(TCGContext *s, tcg_target_long dst)
552{
553 intptr_t disp;
554
555 disp = dst - (tcg_target_long)s->code_ptr - 2;
556 /* can do short relative jump */
557 if (disp < 0x7f && disp > -0x7f)
558 {
559 tcg_out8(s, 0xeb); /* short jmp */
560 tcg_out8(s, (int8_t)disp);
561 return;
562 }
563
564 disp = dst - (tcg_target_long)s->code_ptr - 5;
565 if (disp < 2LL * _1G && disp > -2LL * _1G)
566 {
567 tcg_out8(s, 0xe9); /* jmp */
568 tcg_out32(s, (int32_t)disp);
569 return;
570 }
571#if 0
572 tcg_out_pushq(s, dst);
573 tcg_out8(s, 0xc3); /* ret */
574#else
575 tcg_out_movi(s, TCG_TYPE_I64, TCG_REG_RAX, dst);
576 tcg_out8(s, 0xff); /* jmp *%eax */
577 tcg_out8(s, 0xe0);
578#endif
579}
580#endif
581
582#if defined(CONFIG_SOFTMMU)
583
584#include "../../softmmu_defs.h"
585
586static void *qemu_ld_helpers[4] = {
587 __ldb_mmu,
588 __ldw_mmu,
589 __ldl_mmu,
590 __ldq_mmu,
591};
592
593static void *qemu_st_helpers[4] = {
594 __stb_mmu,
595 __stw_mmu,
596 __stl_mmu,
597 __stq_mmu,
598};
599#endif
600
601#if defined(VBOX) && defined(REM_PHYS_ADDR_IN_TLB)
602static void *vbox_ld_helpers[] = {
603 remR3PhysReadU8,
604 remR3PhysReadU16,
605 remR3PhysReadU32,
606 remR3PhysReadU64,
607 remR3PhysReadS8,
608 remR3PhysReadS16,
609 remR3PhysReadS32,
610 remR3PhysReadS64,
611};
612
613static void *vbox_st_helpers[] = {
614 remR3PhysWriteU8,
615 remR3PhysWriteU16,
616 remR3PhysWriteU32,
617 remR3PhysWriteU64
618};
619
620static void tcg_out_vbox_phys_read(TCGContext *s, int index, int addr_reg, int data_reg) {
621 if (addr_reg != TCG_REG_RDI)
622 /* mov addr_reg, %rdi */
623 tcg_out_modrm(s, 0x8b | P_REXW, TCG_REG_RDI, addr_reg);
624
625 tcg_out_long_call(s, (tcg_target_long)vbox_ld_helpers[index]);
626 /* mov %rax, data_reg*/
627 tcg_out_modrm(s, 0x8b | P_REXW, data_reg, TCG_REG_RAX);
628}
629
630static void tcg_out_vbox_phys_write(TCGContext *s, int index, int addr_reg, int val_reg) {
631 if (addr_reg != TCG_REG_RDI)
632 /* mov addr_reg, %rdi */
633 tcg_out_modrm(s, 0x8b | P_REXW, TCG_REG_RDI, addr_reg);
634 if (val_reg != TCG_REG_RSI)
635 /* mov addr_reg, %rsi */
636 tcg_out_modrm(s, 0x8b | P_REXW, TCG_REG_RSI, val_reg);
637 tcg_out_long_call(s, (tcg_target_long)vbox_st_helpers[index]);
638}
639
640#endif
641
642static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
643 int opc)
644{
645 int addr_reg, data_reg, r0, r1, mem_index, s_bits, bswap, rexw;
646#if defined(CONFIG_SOFTMMU)
647 uint8_t *label1_ptr, *label2_ptr;
648#endif
649
650 data_reg = *args++;
651 addr_reg = *args++;
652 mem_index = *args;
653 s_bits = opc & 3;
654
655 r0 = TCG_REG_RDI;
656 r1 = TCG_REG_RSI;
657
658#if TARGET_LONG_BITS == 32
659 rexw = 0;
660#else
661 rexw = P_REXW;
662#endif
663#if defined(CONFIG_SOFTMMU)
664 /* mov */
665 tcg_out_modrm(s, 0x8b | rexw, r1, addr_reg);
666
667 /* mov */
668 tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
669
670 tcg_out_modrm(s, 0xc1 | rexw, 5, r1); /* shr $x, r1 */
671 tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
672
673 tcg_out_modrm(s, 0x81 | rexw, 4, r0); /* andl $x, r0 */
674 tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
675
676 tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */
677 tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
678
679 /* lea offset(r1, env), r1 */
680 tcg_out_modrm_offset2(s, 0x8d | P_REXW, r1, r1, TCG_AREG0, 0,
681 offsetof(CPUState, tlb_table[mem_index][0].addr_read));
682
683 /* cmp 0(r1), r0 */
684 tcg_out_modrm_offset(s, 0x3b | rexw, r0, r1, 0);
685
686 /* mov */
687 tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
688
689 /* je label1 */
690 tcg_out8(s, 0x70 + JCC_JE);
691 label1_ptr = s->code_ptr;
692 s->code_ptr++;
693
694 /* XXX: move that code at the end of the TB */
695 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_RSI, mem_index);
696#ifndef VBOX
697 tcg_out8(s, 0xe8);
698 tcg_out32(s, (tcg_target_long)qemu_ld_helpers[s_bits] -
699 (tcg_target_long)s->code_ptr - 4);
700#else
701 tcg_out_long_call(s, (tcg_target_long)qemu_ld_helpers[s_bits]);
702#endif
703
704 switch(opc) {
705 case 0 | 4:
706 /* movsbq */
707 tcg_out_modrm(s, 0xbe | P_EXT | P_REXW, data_reg, TCG_REG_RAX);
708 break;
709 case 1 | 4:
710 /* movswq */
711 tcg_out_modrm(s, 0xbf | P_EXT | P_REXW, data_reg, TCG_REG_RAX);
712 break;
713 case 2 | 4:
714 /* movslq */
715 tcg_out_modrm(s, 0x63 | P_REXW, data_reg, TCG_REG_RAX);
716 break;
717 case 0:
718 case 1:
719 case 2:
720 default:
721 /* movl */
722 tcg_out_modrm(s, 0x8b, data_reg, TCG_REG_RAX);
723 break;
724 case 3:
725 tcg_out_mov(s, data_reg, TCG_REG_RAX);
726 break;
727 }
728
729 /* jmp label2 */
730 tcg_out8(s, 0xeb);
731 label2_ptr = s->code_ptr;
732 s->code_ptr++;
733
734 /* label1: */
735 *label1_ptr = s->code_ptr - label1_ptr - 1;
736
737 /* add x(r1), r0 */
738 tcg_out_modrm_offset(s, 0x03 | P_REXW, r0, r1, offsetof(CPUTLBEntry, addend) -
739 offsetof(CPUTLBEntry, addr_read));
740#else
741 r0 = addr_reg;
742#endif
743
744#if !defined(VBOX) || !defined(REM_PHYS_ADDR_IN_TLB)
745
746#ifdef TARGET_WORDS_BIGENDIAN
747 bswap = 1;
748#else
749 bswap = 0;
750#endif
751
752 switch(opc) {
753 case 0:
754 /* movzbl */
755 tcg_out_modrm_offset(s, 0xb6 | P_EXT, data_reg, r0, 0);
756 break;
757 case 0 | 4:
758 /* movsbX */
759 tcg_out_modrm_offset(s, 0xbe | P_EXT | rexw, data_reg, r0, 0);
760 break;
761 case 1:
762 /* movzwl */
763 tcg_out_modrm_offset(s, 0xb7 | P_EXT, data_reg, r0, 0);
764 if (bswap) {
765 /* rolw $8, data_reg */
766 tcg_out8(s, 0x66);
767 tcg_out_modrm(s, 0xc1, 0, data_reg);
768 tcg_out8(s, 8);
769 }
770 break;
771 case 1 | 4:
772 if (bswap) {
773 /* movzwl */
774 tcg_out_modrm_offset(s, 0xb7 | P_EXT, data_reg, r0, 0);
775 /* rolw $8, data_reg */
776 tcg_out8(s, 0x66);
777 tcg_out_modrm(s, 0xc1, 0, data_reg);
778 tcg_out8(s, 8);
779
780 /* movswX data_reg, data_reg */
781 tcg_out_modrm(s, 0xbf | P_EXT | rexw, data_reg, data_reg);
782 } else {
783 /* movswX */
784 tcg_out_modrm_offset(s, 0xbf | P_EXT | rexw, data_reg, r0, 0);
785 }
786 break;
787 case 2:
788 /* movl (r0), data_reg */
789 tcg_out_modrm_offset(s, 0x8b, data_reg, r0, 0);
790 if (bswap) {
791 /* bswap */
792 tcg_out_opc(s, (0xc8 + (data_reg & 7)) | P_EXT, 0, data_reg, 0);
793 }
794 break;
795 case 2 | 4:
796 if (bswap) {
797 /* movl (r0), data_reg */
798 tcg_out_modrm_offset(s, 0x8b, data_reg, r0, 0);
799 /* bswap */
800 tcg_out_opc(s, (0xc8 + (data_reg & 7)) | P_EXT, 0, data_reg, 0);
801 /* movslq */
802 tcg_out_modrm(s, 0x63 | P_REXW, data_reg, data_reg);
803 } else {
804 /* movslq */
805 tcg_out_modrm_offset(s, 0x63 | P_REXW, data_reg, r0, 0);
806 }
807 break;
808 case 3:
809 /* movq (r0), data_reg */
810 tcg_out_modrm_offset(s, 0x8b | P_REXW, data_reg, r0, 0);
811 if (bswap) {
812 /* bswap */
813 tcg_out_opc(s, (0xc8 + (data_reg & 7)) | P_EXT | P_REXW, 0, data_reg, 0);
814 }
815 break;
816 default:
817 tcg_abort();
818 }
819#else /* VBOX */
820 tcg_out_vbox_phys_read(s, opc, r0, data_reg);
821#endif /* VBOX */
822
823#if defined(CONFIG_SOFTMMU)
824 /* label2: */
825 *label2_ptr = s->code_ptr - label2_ptr - 1;
826#endif
827}
828
829static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
830 int opc)
831{
832 int addr_reg, data_reg, r0, r1, mem_index, s_bits, bswap, rexw;
833#if defined(CONFIG_SOFTMMU)
834 uint8_t *label1_ptr, *label2_ptr;
835#endif
836
837 data_reg = *args++;
838 addr_reg = *args++;
839 mem_index = *args;
840
841 s_bits = opc;
842
843 r0 = TCG_REG_RDI;
844 r1 = TCG_REG_RSI;
845
846#if TARGET_LONG_BITS == 32
847 rexw = 0;
848#else
849 rexw = P_REXW;
850#endif
851#if defined(CONFIG_SOFTMMU)
852 /* mov */
853 tcg_out_modrm(s, 0x8b | rexw, r1, addr_reg);
854
855 /* mov */
856 tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
857
858 tcg_out_modrm(s, 0xc1 | rexw, 5, r1); /* shr $x, r1 */
859 tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
860
861 tcg_out_modrm(s, 0x81 | rexw, 4, r0); /* andl $x, r0 */
862 tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
863
864 tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */
865 tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
866
867 /* lea offset(r1, env), r1 */
868 tcg_out_modrm_offset2(s, 0x8d | P_REXW, r1, r1, TCG_AREG0, 0,
869 offsetof(CPUState, tlb_table[mem_index][0].addr_write));
870
871 /* cmp 0(r1), r0 */
872 tcg_out_modrm_offset(s, 0x3b | rexw, r0, r1, 0);
873
874 /* mov */
875 tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
876
877 /* je label1 */
878 tcg_out8(s, 0x70 + JCC_JE);
879 label1_ptr = s->code_ptr;
880 s->code_ptr++;
881
882 /* XXX: move that code at the end of the TB */
883 switch(opc) {
884 case 0:
885 /* movzbl */
886 tcg_out_modrm(s, 0xb6 | P_EXT | P_REXB, TCG_REG_RSI, data_reg);
887 break;
888 case 1:
889 /* movzwl */
890 tcg_out_modrm(s, 0xb7 | P_EXT, TCG_REG_RSI, data_reg);
891 break;
892 case 2:
893 /* movl */
894 tcg_out_modrm(s, 0x8b, TCG_REG_RSI, data_reg);
895 break;
896 default:
897 case 3:
898 tcg_out_mov(s, TCG_REG_RSI, data_reg);
899 break;
900 }
901 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_RDX, mem_index);
902#ifndef VBOX
903 tcg_out8(s, 0xe8);
904 tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] -
905 (tcg_target_long)s->code_ptr - 4);
906#else
907 tcg_out_long_call(s, (tcg_target_long)qemu_st_helpers[s_bits]);
908#endif
909
910 /* jmp label2 */
911 tcg_out8(s, 0xeb);
912 label2_ptr = s->code_ptr;
913 s->code_ptr++;
914
915 /* label1: */
916 *label1_ptr = s->code_ptr - label1_ptr - 1;
917
918 /* add x(r1), r0 */
919 tcg_out_modrm_offset(s, 0x03 | P_REXW, r0, r1, offsetof(CPUTLBEntry, addend) -
920 offsetof(CPUTLBEntry, addr_write));
921#else
922 r0 = addr_reg;
923#endif
924
925#if !defined(VBOX) || !defined(REM_PHYS_ADDR_IN_TLB)
926#ifdef TARGET_WORDS_BIGENDIAN
927 bswap = 1;
928#else
929 bswap = 0;
930#endif
931 switch(opc) {
932 case 0:
933 /* movb */
934 tcg_out_modrm_offset(s, 0x88 | P_REXB, data_reg, r0, 0);
935 break;
936 case 1:
937 if (bswap) {
938 tcg_out_modrm(s, 0x8b, r1, data_reg); /* movl */
939 tcg_out8(s, 0x66); /* rolw $8, %ecx */
940 tcg_out_modrm(s, 0xc1, 0, r1);
941 tcg_out8(s, 8);
942 data_reg = r1;
943 }
944 /* movw */
945 tcg_out8(s, 0x66);
946 tcg_out_modrm_offset(s, 0x89, data_reg, r0, 0);
947 break;
948 case 2:
949 if (bswap) {
950 tcg_out_modrm(s, 0x8b, r1, data_reg); /* movl */
951 /* bswap data_reg */
952 tcg_out_opc(s, (0xc8 + r1) | P_EXT, 0, r1, 0);
953 data_reg = r1;
954 }
955 /* movl */
956 tcg_out_modrm_offset(s, 0x89, data_reg, r0, 0);
957 break;
958 case 3:
959 if (bswap) {
960 tcg_out_mov(s, r1, data_reg);
961 /* bswap data_reg */
962 tcg_out_opc(s, (0xc8 + r1) | P_EXT | P_REXW, 0, r1, 0);
963 data_reg = r1;
964 }
965 /* movq */
966 tcg_out_modrm_offset(s, 0x89 | P_REXW, data_reg, r0, 0);
967 break;
968 default:
969 tcg_abort();
970 }
971#else /* VBOX */
972 tcg_out_vbox_phys_write(s, opc, r0, data_reg);
973#endif /* VBOX */
974
975#if defined(CONFIG_SOFTMMU)
976 /* label2: */
977 *label2_ptr = s->code_ptr - label2_ptr - 1;
978#endif
979}
980
981static inline void tcg_out_op(TCGContext *s, int opc, const TCGArg *args,
982 const int *const_args)
983{
984 int c;
985
986 switch(opc) {
987 case INDEX_op_exit_tb:
988 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_RAX, args[0]);
989#ifndef VBOX
990 tcg_out8(s, 0xe9); /* jmp tb_ret_addr */
991 tcg_out32(s, tb_ret_addr - s->code_ptr - 4);
992#else
993 tcg_out_long_jmp(s, (tcg_target_long)tb_ret_addr);
994#endif
995 break;
996 case INDEX_op_goto_tb:
997 if (s->tb_jmp_offset) {
998 /* direct jump method */
999 tcg_out8(s, 0xe9); /* jmp im */
1000 s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
1001 tcg_out32(s, 0);
1002 } else {
1003 /* indirect jump method */
1004 /* jmp Ev */
1005#ifndef VBOX
1006 tcg_out_modrm_offset(s, 0xff, 4, -1,
1007 (tcg_target_long)(s->tb_next +
1008 args[0]));
1009#else
1010 /* @todo: can we clobber RAX here? */
1011 tcg_out_movi(s, TCG_TYPE_I64, TCG_REG_RAX,
1012 (tcg_target_long)&(s->tb_next[args[0]]));
1013 tcg_out8(s, 0xff); tcg_out8(s, 0x20 | TCG_REG_RAX); /* jmp *(%rax) */
1014#endif
1015 }
1016 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
1017 break;
1018 case INDEX_op_call:
1019 if (const_args[0]) {
1020#ifndef VBOX
1021 tcg_out8(s, 0xe8);
1022 tcg_out32(s, args[0] - (tcg_target_long)s->code_ptr - 4);
1023#else
1024 tcg_out_long_call(s, args[0]);
1025#endif
1026 } else {
1027 tcg_out_modrm(s, 0xff, 2, args[0]);
1028 }
1029 break;
1030 case INDEX_op_jmp:
1031 if (const_args[0]) {
1032 tcg_out8(s, 0xe9);
1033 tcg_out32(s, args[0] - (tcg_target_long)s->code_ptr - 4);
1034 } else {
1035 tcg_out_modrm(s, 0xff, 4, args[0]);
1036 }
1037 break;
1038 case INDEX_op_br:
1039 tcg_out_jxx(s, JCC_JMP, args[0]);
1040 break;
1041 case INDEX_op_movi_i32:
1042 tcg_out_movi(s, TCG_TYPE_I32, args[0], (uint32_t)args[1]);
1043 break;
1044 case INDEX_op_movi_i64:
1045 tcg_out_movi(s, TCG_TYPE_I64, args[0], args[1]);
1046 break;
1047 case INDEX_op_ld8u_i32:
1048 case INDEX_op_ld8u_i64:
1049 /* movzbl */
1050 tcg_out_modrm_offset(s, 0xb6 | P_EXT, args[0], args[1], args[2]);
1051 break;
1052 case INDEX_op_ld8s_i32:
1053 /* movsbl */
1054 tcg_out_modrm_offset(s, 0xbe | P_EXT, args[0], args[1], args[2]);
1055 break;
1056 case INDEX_op_ld8s_i64:
1057 /* movsbq */
1058 tcg_out_modrm_offset(s, 0xbe | P_EXT | P_REXW, args[0], args[1], args[2]);
1059 break;
1060 case INDEX_op_ld16u_i32:
1061 case INDEX_op_ld16u_i64:
1062 /* movzwl */
1063 tcg_out_modrm_offset(s, 0xb7 | P_EXT, args[0], args[1], args[2]);
1064 break;
1065 case INDEX_op_ld16s_i32:
1066 /* movswl */
1067 tcg_out_modrm_offset(s, 0xbf | P_EXT, args[0], args[1], args[2]);
1068 break;
1069 case INDEX_op_ld16s_i64:
1070 /* movswq */
1071 tcg_out_modrm_offset(s, 0xbf | P_EXT | P_REXW, args[0], args[1], args[2]);
1072 break;
1073 case INDEX_op_ld_i32:
1074 case INDEX_op_ld32u_i64:
1075 /* movl */
1076 tcg_out_modrm_offset(s, 0x8b, args[0], args[1], args[2]);
1077 break;
1078 case INDEX_op_ld32s_i64:
1079 /* movslq */
1080 tcg_out_modrm_offset(s, 0x63 | P_REXW, args[0], args[1], args[2]);
1081 break;
1082 case INDEX_op_ld_i64:
1083 /* movq */
1084 tcg_out_modrm_offset(s, 0x8b | P_REXW, args[0], args[1], args[2]);
1085 break;
1086
1087 case INDEX_op_st8_i32:
1088 case INDEX_op_st8_i64:
1089 /* movb */
1090 tcg_out_modrm_offset(s, 0x88 | P_REXB, args[0], args[1], args[2]);
1091 break;
1092 case INDEX_op_st16_i32:
1093 case INDEX_op_st16_i64:
1094 /* movw */
1095 tcg_out8(s, 0x66);
1096 tcg_out_modrm_offset(s, 0x89, args[0], args[1], args[2]);
1097 break;
1098 case INDEX_op_st_i32:
1099 case INDEX_op_st32_i64:
1100 /* movl */
1101 tcg_out_modrm_offset(s, 0x89, args[0], args[1], args[2]);
1102 break;
1103 case INDEX_op_st_i64:
1104 /* movq */
1105 tcg_out_modrm_offset(s, 0x89 | P_REXW, args[0], args[1], args[2]);
1106 break;
1107
1108 case INDEX_op_sub_i32:
1109 c = ARITH_SUB;
1110 goto gen_arith32;
1111 case INDEX_op_and_i32:
1112 c = ARITH_AND;
1113 goto gen_arith32;
1114 case INDEX_op_or_i32:
1115 c = ARITH_OR;
1116 goto gen_arith32;
1117 case INDEX_op_xor_i32:
1118 c = ARITH_XOR;
1119 goto gen_arith32;
1120 case INDEX_op_add_i32:
1121 c = ARITH_ADD;
1122 gen_arith32:
1123 if (const_args[2]) {
1124 tgen_arithi32(s, c, args[0], args[2]);
1125 } else {
1126 tcg_out_modrm(s, 0x01 | (c << 3), args[2], args[0]);
1127 }
1128 break;
1129
1130 case INDEX_op_sub_i64:
1131 c = ARITH_SUB;
1132 goto gen_arith64;
1133 case INDEX_op_and_i64:
1134 c = ARITH_AND;
1135 goto gen_arith64;
1136 case INDEX_op_or_i64:
1137 c = ARITH_OR;
1138 goto gen_arith64;
1139 case INDEX_op_xor_i64:
1140 c = ARITH_XOR;
1141 goto gen_arith64;
1142 case INDEX_op_add_i64:
1143 c = ARITH_ADD;
1144 gen_arith64:
1145 if (const_args[2]) {
1146 tgen_arithi64(s, c, args[0], args[2]);
1147 } else {
1148 tcg_out_modrm(s, 0x01 | (c << 3) | P_REXW, args[2], args[0]);
1149 }
1150 break;
1151
1152 case INDEX_op_mul_i32:
1153 if (const_args[2]) {
1154 int32_t val;
1155 val = args[2];
1156 if (val == (int8_t)val) {
1157 tcg_out_modrm(s, 0x6b, args[0], args[0]);
1158 tcg_out8(s, val);
1159 } else {
1160 tcg_out_modrm(s, 0x69, args[0], args[0]);
1161 tcg_out32(s, val);
1162 }
1163 } else {
1164 tcg_out_modrm(s, 0xaf | P_EXT, args[0], args[2]);
1165 }
1166 break;
1167 case INDEX_op_mul_i64:
1168 if (const_args[2]) {
1169 int32_t val;
1170 val = args[2];
1171 if (val == (int8_t)val) {
1172 tcg_out_modrm(s, 0x6b | P_REXW, args[0], args[0]);
1173 tcg_out8(s, val);
1174 } else {
1175 tcg_out_modrm(s, 0x69 | P_REXW, args[0], args[0]);
1176 tcg_out32(s, val);
1177 }
1178 } else {
1179 tcg_out_modrm(s, 0xaf | P_EXT | P_REXW, args[0], args[2]);
1180 }
1181 break;
1182 case INDEX_op_div2_i32:
1183 tcg_out_modrm(s, 0xf7, 7, args[4]);
1184 break;
1185 case INDEX_op_divu2_i32:
1186 tcg_out_modrm(s, 0xf7, 6, args[4]);
1187 break;
1188 case INDEX_op_div2_i64:
1189 tcg_out_modrm(s, 0xf7 | P_REXW, 7, args[4]);
1190 break;
1191 case INDEX_op_divu2_i64:
1192 tcg_out_modrm(s, 0xf7 | P_REXW, 6, args[4]);
1193 break;
1194
1195 case INDEX_op_shl_i32:
1196 c = SHIFT_SHL;
1197 gen_shift32:
1198 if (const_args[2]) {
1199 if (args[2] == 1) {
1200 tcg_out_modrm(s, 0xd1, c, args[0]);
1201 } else {
1202 tcg_out_modrm(s, 0xc1, c, args[0]);
1203 tcg_out8(s, args[2]);
1204 }
1205 } else {
1206 tcg_out_modrm(s, 0xd3, c, args[0]);
1207 }
1208 break;
1209 case INDEX_op_shr_i32:
1210 c = SHIFT_SHR;
1211 goto gen_shift32;
1212 case INDEX_op_sar_i32:
1213 c = SHIFT_SAR;
1214 goto gen_shift32;
1215
1216 case INDEX_op_shl_i64:
1217 c = SHIFT_SHL;
1218 gen_shift64:
1219 if (const_args[2]) {
1220 if (args[2] == 1) {
1221 tcg_out_modrm(s, 0xd1 | P_REXW, c, args[0]);
1222 } else {
1223 tcg_out_modrm(s, 0xc1 | P_REXW, c, args[0]);
1224 tcg_out8(s, args[2]);
1225 }
1226 } else {
1227 tcg_out_modrm(s, 0xd3 | P_REXW, c, args[0]);
1228 }
1229 break;
1230 case INDEX_op_shr_i64:
1231 c = SHIFT_SHR;
1232 goto gen_shift64;
1233 case INDEX_op_sar_i64:
1234 c = SHIFT_SAR;
1235 goto gen_shift64;
1236
1237 case INDEX_op_brcond_i32:
1238 tcg_out_brcond(s, args[2], args[0], args[1], const_args[1],
1239 args[3], 0);
1240 break;
1241 case INDEX_op_brcond_i64:
1242 tcg_out_brcond(s, args[2], args[0], args[1], const_args[1],
1243 args[3], P_REXW);
1244 break;
1245
1246 case INDEX_op_bswap_i32:
1247 tcg_out_opc(s, (0xc8 + (args[0] & 7)) | P_EXT, 0, args[0], 0);
1248 break;
1249 case INDEX_op_bswap_i64:
1250 tcg_out_opc(s, (0xc8 + (args[0] & 7)) | P_EXT | P_REXW, 0, args[0], 0);
1251 break;
1252
1253 case INDEX_op_neg_i32:
1254 tcg_out_modrm(s, 0xf7, 3, args[0]);
1255 break;
1256 case INDEX_op_neg_i64:
1257 tcg_out_modrm(s, 0xf7 | P_REXW, 3, args[0]);
1258 break;
1259
1260 case INDEX_op_ext8s_i32:
1261 tcg_out_modrm(s, 0xbe | P_EXT | P_REXB, args[0], args[1]);
1262 break;
1263 case INDEX_op_ext16s_i32:
1264 tcg_out_modrm(s, 0xbf | P_EXT, args[0], args[1]);
1265 break;
1266 case INDEX_op_ext8s_i64:
1267 tcg_out_modrm(s, 0xbe | P_EXT | P_REXW, args[0], args[1]);
1268 break;
1269 case INDEX_op_ext16s_i64:
1270 tcg_out_modrm(s, 0xbf | P_EXT | P_REXW, args[0], args[1]);
1271 break;
1272 case INDEX_op_ext32s_i64:
1273 tcg_out_modrm(s, 0x63 | P_REXW, args[0], args[1]);
1274 break;
1275
1276 case INDEX_op_qemu_ld8u:
1277 tcg_out_qemu_ld(s, args, 0);
1278 break;
1279 case INDEX_op_qemu_ld8s:
1280 tcg_out_qemu_ld(s, args, 0 | 4);
1281 break;
1282 case INDEX_op_qemu_ld16u:
1283 tcg_out_qemu_ld(s, args, 1);
1284 break;
1285 case INDEX_op_qemu_ld16s:
1286 tcg_out_qemu_ld(s, args, 1 | 4);
1287 break;
1288 case INDEX_op_qemu_ld32u:
1289 tcg_out_qemu_ld(s, args, 2);
1290 break;
1291 case INDEX_op_qemu_ld32s:
1292 tcg_out_qemu_ld(s, args, 2 | 4);
1293 break;
1294 case INDEX_op_qemu_ld64:
1295 tcg_out_qemu_ld(s, args, 3);
1296 break;
1297
1298 case INDEX_op_qemu_st8:
1299 tcg_out_qemu_st(s, args, 0);
1300 break;
1301 case INDEX_op_qemu_st16:
1302 tcg_out_qemu_st(s, args, 1);
1303 break;
1304 case INDEX_op_qemu_st32:
1305 tcg_out_qemu_st(s, args, 2);
1306 break;
1307 case INDEX_op_qemu_st64:
1308 tcg_out_qemu_st(s, args, 3);
1309 break;
1310
1311 default:
1312 tcg_abort();
1313 }
1314}
1315
1316static int tcg_target_callee_save_regs[] = {
1317 TCG_REG_RBP,
1318 TCG_REG_RBX,
1319 TCG_REG_R12,
1320 TCG_REG_R13,
1321 /* TCG_REG_R14, */ /* currently used for the global env, so no
1322 need to save */
1323 TCG_REG_R15,
1324};
1325
1326/* Generate global QEMU prologue and epilogue code */
1327void tcg_target_qemu_prologue(TCGContext *s)
1328{
1329 int i, frame_size, push_size, stack_addend;
1330
1331 /* TB prologue */
1332 /* save all callee saved registers */
1333 for(i = 0; i < ARRAY_SIZE(tcg_target_callee_save_regs); i++) {
1334 tcg_out_push(s, tcg_target_callee_save_regs[i]);
1335
1336 }
1337 /* reserve some stack space */
1338 push_size = 8 + ARRAY_SIZE(tcg_target_callee_save_regs) * 8;
1339 frame_size = push_size + TCG_STATIC_CALL_ARGS_SIZE;
1340 frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) &
1341 ~(TCG_TARGET_STACK_ALIGN - 1);
1342 stack_addend = frame_size - push_size;
1343 tcg_out_addi(s, TCG_REG_RSP, -stack_addend);
1344
1345 tcg_out_modrm(s, 0xff, 4, TCG_REG_RDI); /* jmp *%rdi */
1346
1347 /* TB epilogue */
1348 tb_ret_addr = s->code_ptr;
1349 tcg_out_addi(s, TCG_REG_RSP, stack_addend);
1350 for(i = ARRAY_SIZE(tcg_target_callee_save_regs) - 1; i >= 0; i--) {
1351 tcg_out_pop(s, tcg_target_callee_save_regs[i]);
1352 }
1353 tcg_out8(s, 0xc3); /* ret */
1354}
1355
1356static const TCGTargetOpDef x86_64_op_defs[] = {
1357 { INDEX_op_exit_tb, { } },
1358 { INDEX_op_goto_tb, { } },
1359 { INDEX_op_call, { "ri" } }, /* XXX: might need a specific constant constraint */
1360 { INDEX_op_jmp, { "ri" } }, /* XXX: might need a specific constant constraint */
1361 { INDEX_op_br, { } },
1362
1363 { INDEX_op_mov_i32, { "r", "r" } },
1364 { INDEX_op_movi_i32, { "r" } },
1365 { INDEX_op_ld8u_i32, { "r", "r" } },
1366 { INDEX_op_ld8s_i32, { "r", "r" } },
1367 { INDEX_op_ld16u_i32, { "r", "r" } },
1368 { INDEX_op_ld16s_i32, { "r", "r" } },
1369 { INDEX_op_ld_i32, { "r", "r" } },
1370 { INDEX_op_st8_i32, { "r", "r" } },
1371 { INDEX_op_st16_i32, { "r", "r" } },
1372 { INDEX_op_st_i32, { "r", "r" } },
1373
1374 { INDEX_op_add_i32, { "r", "0", "ri" } },
1375 { INDEX_op_mul_i32, { "r", "0", "ri" } },
1376 { INDEX_op_div2_i32, { "a", "d", "0", "1", "r" } },
1377 { INDEX_op_divu2_i32, { "a", "d", "0", "1", "r" } },
1378 { INDEX_op_sub_i32, { "r", "0", "ri" } },
1379 { INDEX_op_and_i32, { "r", "0", "ri" } },
1380 { INDEX_op_or_i32, { "r", "0", "ri" } },
1381 { INDEX_op_xor_i32, { "r", "0", "ri" } },
1382
1383 { INDEX_op_shl_i32, { "r", "0", "ci" } },
1384 { INDEX_op_shr_i32, { "r", "0", "ci" } },
1385 { INDEX_op_sar_i32, { "r", "0", "ci" } },
1386
1387 { INDEX_op_brcond_i32, { "r", "ri" } },
1388
1389 { INDEX_op_mov_i64, { "r", "r" } },
1390 { INDEX_op_movi_i64, { "r" } },
1391 { INDEX_op_ld8u_i64, { "r", "r" } },
1392 { INDEX_op_ld8s_i64, { "r", "r" } },
1393 { INDEX_op_ld16u_i64, { "r", "r" } },
1394 { INDEX_op_ld16s_i64, { "r", "r" } },
1395 { INDEX_op_ld32u_i64, { "r", "r" } },
1396 { INDEX_op_ld32s_i64, { "r", "r" } },
1397 { INDEX_op_ld_i64, { "r", "r" } },
1398 { INDEX_op_st8_i64, { "r", "r" } },
1399 { INDEX_op_st16_i64, { "r", "r" } },
1400 { INDEX_op_st32_i64, { "r", "r" } },
1401 { INDEX_op_st_i64, { "r", "r" } },
1402
1403 { INDEX_op_add_i64, { "r", "0", "re" } },
1404 { INDEX_op_mul_i64, { "r", "0", "re" } },
1405 { INDEX_op_div2_i64, { "a", "d", "0", "1", "r" } },
1406 { INDEX_op_divu2_i64, { "a", "d", "0", "1", "r" } },
1407 { INDEX_op_sub_i64, { "r", "0", "re" } },
1408 { INDEX_op_and_i64, { "r", "0", "reZ" } },
1409 { INDEX_op_or_i64, { "r", "0", "re" } },
1410 { INDEX_op_xor_i64, { "r", "0", "re" } },
1411
1412 { INDEX_op_shl_i64, { "r", "0", "ci" } },
1413 { INDEX_op_shr_i64, { "r", "0", "ci" } },
1414 { INDEX_op_sar_i64, { "r", "0", "ci" } },
1415
1416 { INDEX_op_brcond_i64, { "r", "re" } },
1417
1418 { INDEX_op_bswap_i32, { "r", "0" } },
1419 { INDEX_op_bswap_i64, { "r", "0" } },
1420
1421 { INDEX_op_neg_i32, { "r", "0" } },
1422 { INDEX_op_neg_i64, { "r", "0" } },
1423
1424 { INDEX_op_ext8s_i32, { "r", "r"} },
1425 { INDEX_op_ext16s_i32, { "r", "r"} },
1426 { INDEX_op_ext8s_i64, { "r", "r"} },
1427 { INDEX_op_ext16s_i64, { "r", "r"} },
1428 { INDEX_op_ext32s_i64, { "r", "r"} },
1429
1430 { INDEX_op_qemu_ld8u, { "r", "L" } },
1431 { INDEX_op_qemu_ld8s, { "r", "L" } },
1432 { INDEX_op_qemu_ld16u, { "r", "L" } },
1433 { INDEX_op_qemu_ld16s, { "r", "L" } },
1434 { INDEX_op_qemu_ld32u, { "r", "L" } },
1435 { INDEX_op_qemu_ld32s, { "r", "L" } },
1436 { INDEX_op_qemu_ld64, { "r", "L" } },
1437
1438 { INDEX_op_qemu_st8, { "L", "L" } },
1439 { INDEX_op_qemu_st16, { "L", "L" } },
1440 { INDEX_op_qemu_st32, { "L", "L" } },
1441 { INDEX_op_qemu_st64, { "L", "L", "L" } },
1442
1443 { -1 },
1444};
1445
1446void tcg_target_init(TCGContext *s)
1447{
1448 /* fail safe */
1449 if ((1 << CPU_TLB_ENTRY_BITS) != sizeof(CPUTLBEntry))
1450 tcg_abort();
1451
1452 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffff);
1453 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0, 0xffff);
1454 tcg_regset_set32(tcg_target_call_clobber_regs, 0,
1455 (1 << TCG_REG_RDI) |
1456 (1 << TCG_REG_RSI) |
1457 (1 << TCG_REG_RDX) |
1458 (1 << TCG_REG_RCX) |
1459 (1 << TCG_REG_R8) |
1460 (1 << TCG_REG_R9) |
1461 (1 << TCG_REG_RAX) |
1462 (1 << TCG_REG_R10) |
1463 (1 << TCG_REG_R11));
1464
1465 tcg_regset_clear(s->reserved_regs);
1466 tcg_regset_set_reg(s->reserved_regs, TCG_REG_RSP);
1467
1468 tcg_add_target_add_op_defs(x86_64_op_defs);
1469}
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette