VirtualBox

source: vbox/trunk/src/recompiler/target-i386/translate.c@ 19484

Last change on this file since 19484 was 18476, checked in by vboxsync, 16 years ago

REM/translate.c: warning about unsued label and an unused function that looks like some remnants from an experiment with external events...

  • Property svn:eol-style set to native
File size: 276.3 KB
Line 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21/*
22 * Sun LGPL Disclaimer: For the avoidance of doubt, except that if any license choice
23 * other than GPL or LGPL is available it will apply instead, Sun elects to use only
24 * the Lesser General Public License version 2.1 (LGPLv2) at this time for any software where
25 * a choice of LGPL license versions is made available with the language indicating
26 * that LGPLv2 or any later version may be used, or where a choice of which version
27 * of the LGPL is applied is otherwise unspecified.
28 */
29#include <stdarg.h>
30#include <stdlib.h>
31#include <stdio.h>
32#include <string.h>
33#ifndef VBOX
34#include <inttypes.h>
35#include <signal.h>
36#include <assert.h>
37#endif /* !VBOX */
38
39#include "cpu.h"
40#include "exec-all.h"
41#include "disas.h"
42#include "helper.h"
43#include "tcg-op.h"
44
45#define PREFIX_REPZ 0x01
46#define PREFIX_REPNZ 0x02
47#define PREFIX_LOCK 0x04
48#define PREFIX_DATA 0x08
49#define PREFIX_ADR 0x10
50
51#ifdef TARGET_X86_64
52#define X86_64_ONLY(x) x
53#ifndef VBOX
54#define X86_64_DEF(x...) x
55#else
56#define X86_64_DEF(x...) x
57#endif
58#define CODE64(s) ((s)->code64)
59#define REX_X(s) ((s)->rex_x)
60#define REX_B(s) ((s)->rex_b)
61/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
62#if 1
63#define BUGGY_64(x) NULL
64#endif
65#else
66#define X86_64_ONLY(x) NULL
67#ifndef VBOX
68#define X86_64_DEF(x...)
69#else
70#define X86_64_DEF(x)
71#endif
72#define CODE64(s) 0
73#define REX_X(s) 0
74#define REX_B(s) 0
75#endif
76
77//#define MACRO_TEST 1
78
79/* global register indexes */
80static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
81/* local temps */
82static TCGv cpu_T[2], cpu_T3;
83/* local register indexes (only used inside old micro ops) */
84static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
85static TCGv cpu_tmp5, cpu_tmp6;
86
87#include "gen-icount.h"
88
89#ifdef TARGET_X86_64
90static int x86_64_hregs;
91#endif
92
93#ifdef VBOX
94
95/* Special/override code readers to hide patched code. */
96
97uint8_t ldub_code_raw(target_ulong pc)
98{
99 uint8_t b;
100
101 if (!remR3GetOpcode(cpu_single_env, pc, &b))
102 b = ldub_code(pc);
103 return b;
104}
105#define ldub_code(a) ldub_code_raw(a)
106
107uint16_t lduw_code_raw(target_ulong pc)
108{
109 return (ldub_code(pc+1) << 8) | ldub_code(pc);
110}
111#define lduw_code(a) lduw_code_raw(a)
112
113
114uint32_t ldl_code_raw(target_ulong pc)
115{
116 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
117}
118#define ldl_code(a) ldl_code_raw(a)
119
120#endif /* VBOX */
121
122
123typedef struct DisasContext {
124 /* current insn context */
125 int override; /* -1 if no override */
126 int prefix;
127 int aflag, dflag;
128 target_ulong pc; /* pc = eip + cs_base */
129 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
130 static state change (stop translation) */
131 /* current block context */
132 target_ulong cs_base; /* base of CS segment */
133 int pe; /* protected mode */
134 int code32; /* 32 bit code segment */
135#ifdef TARGET_X86_64
136 int lma; /* long mode active */
137 int code64; /* 64 bit code segment */
138 int rex_x, rex_b;
139#endif
140 int ss32; /* 32 bit stack segment */
141 int cc_op; /* current CC operation */
142 int addseg; /* non zero if either DS/ES/SS have a non zero base */
143 int f_st; /* currently unused */
144 int vm86; /* vm86 mode */
145#ifdef VBOX
146 int vme; /* CR4.VME */
147 int pvi; /* CR4.PVI */
148 int record_call; /* record calls for CSAM or not? */
149#endif
150 int cpl;
151 int iopl;
152 int tf; /* TF cpu flag */
153 int singlestep_enabled; /* "hardware" single step enabled */
154 int jmp_opt; /* use direct block chaining for direct jumps */
155 int mem_index; /* select memory access functions */
156 uint64_t flags; /* all execution flags */
157 struct TranslationBlock *tb;
158 int popl_esp_hack; /* for correct popl with esp base handling */
159 int rip_offset; /* only used in x86_64, but left for simplicity */
160 int cpuid_features;
161 int cpuid_ext_features;
162 int cpuid_ext2_features;
163 int cpuid_ext3_features;
164} DisasContext;
165
166static void gen_eob(DisasContext *s);
167static void gen_jmp(DisasContext *s, target_ulong eip);
168static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
169
170#ifdef VBOX
171static void gen_check_external_event();
172#endif
173
174/* i386 arith/logic operations */
175enum {
176 OP_ADDL,
177 OP_ORL,
178 OP_ADCL,
179 OP_SBBL,
180 OP_ANDL,
181 OP_SUBL,
182 OP_XORL,
183 OP_CMPL,
184};
185
186/* i386 shift ops */
187enum {
188 OP_ROL,
189 OP_ROR,
190 OP_RCL,
191 OP_RCR,
192 OP_SHL,
193 OP_SHR,
194 OP_SHL1, /* undocumented */
195 OP_SAR = 7,
196};
197
198enum {
199 JCC_O,
200 JCC_B,
201 JCC_Z,
202 JCC_BE,
203 JCC_S,
204 JCC_P,
205 JCC_L,
206 JCC_LE,
207};
208
209/* operand size */
210enum {
211 OT_BYTE = 0,
212 OT_WORD,
213 OT_LONG,
214 OT_QUAD,
215};
216
217enum {
218 /* I386 int registers */
219 OR_EAX, /* MUST be even numbered */
220 OR_ECX,
221 OR_EDX,
222 OR_EBX,
223 OR_ESP,
224 OR_EBP,
225 OR_ESI,
226 OR_EDI,
227
228 OR_TMP0 = 16, /* temporary operand register */
229 OR_TMP1,
230 OR_A0, /* temporary register used when doing address evaluation */
231};
232
233#ifndef VBOX
234static inline void gen_op_movl_T0_0(void)
235#else /* VBOX */
236DECLINLINE(void) gen_op_movl_T0_0(void)
237#endif /* VBOX */
238{
239 tcg_gen_movi_tl(cpu_T[0], 0);
240}
241
242#ifndef VBOX
243static inline void gen_op_movl_T0_im(int32_t val)
244#else /* VBOX */
245DECLINLINE(void) gen_op_movl_T0_im(int32_t val)
246#endif /* VBOX */
247{
248 tcg_gen_movi_tl(cpu_T[0], val);
249}
250
251#ifndef VBOX
252static inline void gen_op_movl_T0_imu(uint32_t val)
253#else /* VBOX */
254DECLINLINE(void) gen_op_movl_T0_imu(uint32_t val)
255#endif /* VBOX */
256{
257 tcg_gen_movi_tl(cpu_T[0], val);
258}
259
260#ifndef VBOX
261static inline void gen_op_movl_T1_im(int32_t val)
262#else /* VBOX */
263DECLINLINE(void) gen_op_movl_T1_im(int32_t val)
264#endif /* VBOX */
265{
266 tcg_gen_movi_tl(cpu_T[1], val);
267}
268
269#ifndef VBOX
270static inline void gen_op_movl_T1_imu(uint32_t val)
271#else /* VBOX */
272DECLINLINE(void) gen_op_movl_T1_imu(uint32_t val)
273#endif /* VBOX */
274{
275 tcg_gen_movi_tl(cpu_T[1], val);
276}
277
278#ifndef VBOX
279static inline void gen_op_movl_A0_im(uint32_t val)
280#else /* VBOX */
281DECLINLINE(void) gen_op_movl_A0_im(uint32_t val)
282#endif /* VBOX */
283{
284 tcg_gen_movi_tl(cpu_A0, val);
285}
286
287#ifdef TARGET_X86_64
288#ifndef VBOX
289static inline void gen_op_movq_A0_im(int64_t val)
290#else /* VBOX */
291DECLINLINE(void) gen_op_movq_A0_im(int64_t val)
292#endif /* VBOX */
293{
294 tcg_gen_movi_tl(cpu_A0, val);
295}
296#endif
297
298#ifndef VBOX
299static inline void gen_movtl_T0_im(target_ulong val)
300#else /* VBOX */
301DECLINLINE(void) gen_movtl_T0_im(target_ulong val)
302#endif /* VBOX */
303{
304 tcg_gen_movi_tl(cpu_T[0], val);
305}
306
307#ifndef VBOX
308static inline void gen_movtl_T1_im(target_ulong val)
309#else /* VBOX */
310DECLINLINE(void) gen_movtl_T1_im(target_ulong val)
311#endif /* VBOX */
312{
313 tcg_gen_movi_tl(cpu_T[1], val);
314}
315
316#ifndef VBOX
317static inline void gen_op_andl_T0_ffff(void)
318#else /* VBOX */
319DECLINLINE(void) gen_op_andl_T0_ffff(void)
320#endif /* VBOX */
321{
322 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
323}
324
325#ifndef VBOX
326static inline void gen_op_andl_T0_im(uint32_t val)
327#else /* VBOX */
328DECLINLINE(void) gen_op_andl_T0_im(uint32_t val)
329#endif /* VBOX */
330{
331 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
332}
333
334#ifndef VBOX
335static inline void gen_op_movl_T0_T1(void)
336#else /* VBOX */
337DECLINLINE(void) gen_op_movl_T0_T1(void)
338#endif /* VBOX */
339{
340 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
341}
342
343#ifndef VBOX
344static inline void gen_op_andl_A0_ffff(void)
345#else /* VBOX */
346DECLINLINE(void) gen_op_andl_A0_ffff(void)
347#endif /* VBOX */
348{
349 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
350}
351
352#ifdef TARGET_X86_64
353
354#define NB_OP_SIZES 4
355
356#else /* !TARGET_X86_64 */
357
358#define NB_OP_SIZES 3
359
360#endif /* !TARGET_X86_64 */
361
362#if defined(WORDS_BIGENDIAN)
363#define REG_B_OFFSET (sizeof(target_ulong) - 1)
364#define REG_H_OFFSET (sizeof(target_ulong) - 2)
365#define REG_W_OFFSET (sizeof(target_ulong) - 2)
366#define REG_L_OFFSET (sizeof(target_ulong) - 4)
367#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
368#else
369#define REG_B_OFFSET 0
370#define REG_H_OFFSET 1
371#define REG_W_OFFSET 0
372#define REG_L_OFFSET 0
373#define REG_LH_OFFSET 4
374#endif
375
376#ifndef VBOX
377static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
378#else /* VBOX */
379DECLINLINE(void) gen_op_mov_reg_v(int ot, int reg, TCGv t0)
380#endif /* VBOX */
381{
382 switch(ot) {
383 case OT_BYTE:
384 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
385 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
386 } else {
387 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
388 }
389 break;
390 case OT_WORD:
391 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
392 break;
393#ifdef TARGET_X86_64
394 case OT_LONG:
395 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
396 /* high part of register set to zero */
397 tcg_gen_movi_tl(cpu_tmp0, 0);
398 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
399 break;
400 default:
401 case OT_QUAD:
402 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
403 break;
404#else
405 default:
406 case OT_LONG:
407 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
408 break;
409#endif
410 }
411}
412
413#ifndef VBOX
414static inline void gen_op_mov_reg_T0(int ot, int reg)
415#else /* VBOX */
416DECLINLINE(void) gen_op_mov_reg_T0(int ot, int reg)
417#endif /* VBOX */
418{
419 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
420}
421
422#ifndef VBOX
423static inline void gen_op_mov_reg_T1(int ot, int reg)
424#else /* VBOX */
425DECLINLINE(void) gen_op_mov_reg_T1(int ot, int reg)
426#endif /* VBOX */
427{
428 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
429}
430
431#ifndef VBOX
432static inline void gen_op_mov_reg_A0(int size, int reg)
433#else /* VBOX */
434DECLINLINE(void) gen_op_mov_reg_A0(int size, int reg)
435#endif /* VBOX */
436{
437 switch(size) {
438 case 0:
439 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
440 break;
441#ifdef TARGET_X86_64
442 case 1:
443 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
444 /* high part of register set to zero */
445 tcg_gen_movi_tl(cpu_tmp0, 0);
446 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
447 break;
448 default:
449 case 2:
450 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
451 break;
452#else
453 default:
454 case 1:
455 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
456 break;
457#endif
458 }
459}
460
461#ifndef VBOX
462static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
463#else /* VBOX */
464DECLINLINE(void) gen_op_mov_v_reg(int ot, TCGv t0, int reg)
465#endif /* VBOX */
466{
467 switch(ot) {
468 case OT_BYTE:
469 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
470#ifndef VBOX
471 goto std_case;
472#else
473 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
474#endif
475 } else {
476 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
477 }
478 break;
479 default:
480#ifndef VBOX
481 std_case:
482#endif
483 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
484 break;
485 }
486}
487
488#ifndef VBOX
489static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
490#else /* VBOX */
491DECLINLINE(void) gen_op_mov_TN_reg(int ot, int t_index, int reg)
492#endif /* VBOX */
493{
494 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
495}
496
497#ifndef VBOX
498static inline void gen_op_movl_A0_reg(int reg)
499#else /* VBOX */
500DECLINLINE(void) gen_op_movl_A0_reg(int reg)
501#endif /* VBOX */
502{
503 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
504}
505
506#ifndef VBOX
507static inline void gen_op_addl_A0_im(int32_t val)
508#else /* VBOX */
509DECLINLINE(void) gen_op_addl_A0_im(int32_t val)
510#endif /* VBOX */
511{
512 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
513#ifdef TARGET_X86_64
514 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
515#endif
516}
517
518#ifdef TARGET_X86_64
519#ifndef VBOX
520static inline void gen_op_addq_A0_im(int64_t val)
521#else /* VBOX */
522DECLINLINE(void) gen_op_addq_A0_im(int64_t val)
523#endif /* VBOX */
524{
525 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
526}
527#endif
528
529static void gen_add_A0_im(DisasContext *s, int val)
530{
531#ifdef TARGET_X86_64
532 if (CODE64(s))
533 gen_op_addq_A0_im(val);
534 else
535#endif
536 gen_op_addl_A0_im(val);
537}
538
539#ifndef VBOX
540static inline void gen_op_addl_T0_T1(void)
541#else /* VBOX */
542DECLINLINE(void) gen_op_addl_T0_T1(void)
543#endif /* VBOX */
544{
545 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
546}
547
548#ifndef VBOX
549static inline void gen_op_jmp_T0(void)
550#else /* VBOX */
551DECLINLINE(void) gen_op_jmp_T0(void)
552#endif /* VBOX */
553{
554 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
555}
556
557#ifndef VBOX
558static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
559#else /* VBOX */
560DECLINLINE(void) gen_op_add_reg_im(int size, int reg, int32_t val)
561#endif /* VBOX */
562{
563 switch(size) {
564 case 0:
565 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
566 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
567 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
568 break;
569 case 1:
570 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
571 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
572#ifdef TARGET_X86_64
573 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
574#endif
575 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
576 break;
577#ifdef TARGET_X86_64
578 case 2:
579 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
580 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
581 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
582 break;
583#endif
584 }
585}
586
587#ifndef VBOX
588static inline void gen_op_add_reg_T0(int size, int reg)
589#else /* VBOX */
590DECLINLINE(void) gen_op_add_reg_T0(int size, int reg)
591#endif /* VBOX */
592{
593 switch(size) {
594 case 0:
595 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
596 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
597 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
598 break;
599 case 1:
600 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
601 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
602#ifdef TARGET_X86_64
603 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
604#endif
605 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
606 break;
607#ifdef TARGET_X86_64
608 case 2:
609 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
610 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
611 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
612 break;
613#endif
614 }
615}
616
617#ifndef VBOX
618static inline void gen_op_set_cc_op(int32_t val)
619#else /* VBOX */
620DECLINLINE(void) gen_op_set_cc_op(int32_t val)
621#endif /* VBOX */
622{
623 tcg_gen_movi_i32(cpu_cc_op, val);
624}
625
626#ifndef VBOX
627static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
628#else /* VBOX */
629DECLINLINE(void) gen_op_addl_A0_reg_sN(int shift, int reg)
630#endif /* VBOX */
631{
632 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
633 if (shift != 0)
634 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
635 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
636#ifdef TARGET_X86_64
637 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
638#endif
639}
640#ifdef VBOX
641DECLINLINE(void) gen_op_seg_check(int reg, bool keepA0)
642{
643 /* It seems segments doesn't get out of sync - if they do in fact - enable below code. */
644#ifdef FORCE_SEGMENT_SYNC
645#if 1
646 TCGv t0;
647
648 /* Considering poor quality of TCG optimizer - better call directly */
649 t0 = tcg_temp_local_new(TCG_TYPE_TL);
650 tcg_gen_movi_tl(t0, reg);
651 tcg_gen_helper_0_1(helper_sync_seg, t0);
652 tcg_temp_free(t0);
653#else
654 /* Our segments could be outdated, thus check for newselector field to see if update really needed */
655 int skip_label;
656 TCGv t0, a0;
657
658 /* For other segments this check is waste of time, and also TCG is unable to cope with this code,
659 for data/stack segments, as expects alive cpu_T[0] */
660 if (reg != R_GS)
661 return;
662
663 if (keepA0)
664 {
665 /* we need to store old cpu_A0 */
666 a0 = tcg_temp_local_new(TCG_TYPE_TL);
667 tcg_gen_mov_tl(a0, cpu_A0);
668 }
669
670 skip_label = gen_new_label();
671 t0 = tcg_temp_local_new(TCG_TYPE_TL);
672
673 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, segs[reg].newselector) + REG_L_OFFSET);
674 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
675 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, eflags) + REG_L_OFFSET);
676 tcg_gen_andi_tl(t0, t0, VM_MASK);
677 tcg_gen_brcondi_i32(TCG_COND_NE, t0, 0, skip_label);
678 tcg_gen_movi_tl(t0, reg);
679
680 tcg_gen_helper_0_1(helper_sync_seg, t0);
681
682 tcg_temp_free(t0);
683
684 gen_set_label(skip_label);
685 if (keepA0)
686 {
687 tcg_gen_mov_tl(cpu_A0, a0);
688 tcg_temp_free(a0);
689 }
690#endif /* 0 */
691#endif /* FORCE_SEGMENT_SYNC */
692}
693#endif
694
695#ifndef VBOX
696static inline void gen_op_movl_A0_seg(int reg)
697#else /* VBOX */
698DECLINLINE(void) gen_op_movl_A0_seg(int reg)
699#endif /* VBOX */
700{
701#ifdef VBOX
702 gen_op_seg_check(reg, false);
703#endif
704 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
705}
706
707#ifndef VBOX
708static inline void gen_op_addl_A0_seg(int reg)
709#else /* VBOX */
710DECLINLINE(void) gen_op_addl_A0_seg(int reg)
711#endif /* VBOX */
712{
713#ifdef VBOX
714 gen_op_seg_check(reg, true);
715#endif
716 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
717 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
718#ifdef TARGET_X86_64
719 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
720#endif
721}
722
723#ifdef TARGET_X86_64
724#ifndef VBOX
725static inline void gen_op_movq_A0_seg(int reg)
726#else /* VBOX */
727DECLINLINE(void) gen_op_movq_A0_seg(int reg)
728#endif /* VBOX */
729{
730#ifdef VBOX
731 gen_op_seg_check(reg, false);
732#endif
733 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
734}
735
736#ifndef VBOX
737static inline void gen_op_addq_A0_seg(int reg)
738#else /* VBOX */
739DECLINLINE(void) gen_op_addq_A0_seg(int reg)
740#endif /* VBOX */
741{
742#ifdef VBOX
743 gen_op_seg_check(reg, true);
744#endif
745 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
746 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
747}
748
749#ifndef VBOX
750static inline void gen_op_movq_A0_reg(int reg)
751#else /* VBOX */
752DECLINLINE(void) gen_op_movq_A0_reg(int reg)
753#endif /* VBOX */
754{
755 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
756}
757
758#ifndef VBOX
759static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
760#else /* VBOX */
761DECLINLINE(void) gen_op_addq_A0_reg_sN(int shift, int reg)
762#endif /* VBOX */
763{
764 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
765 if (shift != 0)
766 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
767 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
768}
769#endif
770
771#ifndef VBOX
772static inline void gen_op_lds_T0_A0(int idx)
773#else /* VBOX */
774DECLINLINE(void) gen_op_lds_T0_A0(int idx)
775#endif /* VBOX */
776{
777 int mem_index = (idx >> 2) - 1;
778 switch(idx & 3) {
779 case 0:
780 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
781 break;
782 case 1:
783 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
784 break;
785 default:
786 case 2:
787 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
788 break;
789 }
790}
791
792#ifndef VBOX
793static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
794#else /* VBOX */
795DECLINLINE(void) gen_op_ld_v(int idx, TCGv t0, TCGv a0)
796#endif /* VBOX */
797{
798 int mem_index = (idx >> 2) - 1;
799 switch(idx & 3) {
800 case 0:
801 tcg_gen_qemu_ld8u(t0, a0, mem_index);
802 break;
803 case 1:
804 tcg_gen_qemu_ld16u(t0, a0, mem_index);
805 break;
806 case 2:
807 tcg_gen_qemu_ld32u(t0, a0, mem_index);
808 break;
809 default:
810 case 3:
811 tcg_gen_qemu_ld64(t0, a0, mem_index);
812 break;
813 }
814}
815
816/* XXX: always use ldu or lds */
817#ifndef VBOX
818static inline void gen_op_ld_T0_A0(int idx)
819#else /* VBOX */
820DECLINLINE(void) gen_op_ld_T0_A0(int idx)
821#endif /* VBOX */
822{
823 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
824}
825
826#ifndef VBOX
827static inline void gen_op_ldu_T0_A0(int idx)
828#else /* VBOX */
829DECLINLINE(void) gen_op_ldu_T0_A0(int idx)
830#endif /* VBOX */
831{
832 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
833}
834
835#ifndef VBOX
836static inline void gen_op_ld_T1_A0(int idx)
837#else /* VBOX */
838DECLINLINE(void) gen_op_ld_T1_A0(int idx)
839#endif /* VBOX */
840{
841 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
842}
843
844#ifndef VBOX
845static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
846#else /* VBOX */
847DECLINLINE(void) gen_op_st_v(int idx, TCGv t0, TCGv a0)
848#endif /* VBOX */
849{
850 int mem_index = (idx >> 2) - 1;
851 switch(idx & 3) {
852 case 0:
853 tcg_gen_qemu_st8(t0, a0, mem_index);
854 break;
855 case 1:
856 tcg_gen_qemu_st16(t0, a0, mem_index);
857 break;
858 case 2:
859 tcg_gen_qemu_st32(t0, a0, mem_index);
860 break;
861 default:
862 case 3:
863 tcg_gen_qemu_st64(t0, a0, mem_index);
864 break;
865 }
866}
867
868#ifndef VBOX
869static inline void gen_op_st_T0_A0(int idx)
870#else /* VBOX */
871DECLINLINE(void) gen_op_st_T0_A0(int idx)
872#endif /* VBOX */
873{
874 gen_op_st_v(idx, cpu_T[0], cpu_A0);
875}
876
877#ifndef VBOX
878static inline void gen_op_st_T1_A0(int idx)
879#else /* VBOX */
880DECLINLINE(void) gen_op_st_T1_A0(int idx)
881#endif /* VBOX */
882{
883 gen_op_st_v(idx, cpu_T[1], cpu_A0);
884}
885
886#ifdef VBOX
887static void gen_check_external_event()
888{
889#if 1
890 /** @todo: once TCG codegen improves, we may want to use version
891 from else version */
892 tcg_gen_helper_0_0(helper_check_external_event);
893#else
894 int skip_label;
895 TCGv t0;
896
897 skip_label = gen_new_label();
898 t0 = tcg_temp_local_new(TCG_TYPE_TL);
899 /* t0 = cpu_tmp0; */
900
901 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, interrupt_request));
902 /* Keep in sync with helper_check_external_event() */
903 tcg_gen_andi_tl(t0, t0,
904 CPU_INTERRUPT_EXTERNAL_EXIT
905 | CPU_INTERRUPT_EXTERNAL_TIMER
906 | CPU_INTERRUPT_EXTERNAL_DMA
907 | CPU_INTERRUPT_EXTERNAL_HARD);
908 /** @todo: predict branch as taken */
909 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
910 tcg_temp_free(t0);
911
912 tcg_gen_helper_0_0(helper_check_external_event);
913
914 gen_set_label(skip_label);
915#endif
916}
917
918#if 0 /* unused code? */
919static void gen_check_external_event2()
920{
921 tcg_gen_helper_0_0(helper_check_external_event);
922}
923#endif
924
925#endif
926
927#ifndef VBOX
928static inline void gen_jmp_im(target_ulong pc)
929#else /* VBOX */
930DECLINLINE(void) gen_jmp_im(target_ulong pc)
931#endif /* VBOX */
932{
933 tcg_gen_movi_tl(cpu_tmp0, pc);
934 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
935}
936
937#ifdef VBOX
938DECLINLINE(void) gen_update_eip(target_ulong pc)
939{
940 gen_jmp_im(pc);
941#ifdef VBOX_DUMP_STATE
942 tcg_gen_helper_0_0(helper_dump_state);
943#endif
944}
945
946#endif
947
948#ifndef VBOX
949static inline void gen_string_movl_A0_ESI(DisasContext *s)
950#else /* VBOX */
951DECLINLINE(void) gen_string_movl_A0_ESI(DisasContext *s)
952#endif /* VBOX */
953{
954 int override;
955
956 override = s->override;
957#ifdef TARGET_X86_64
958 if (s->aflag == 2) {
959 if (override >= 0) {
960 gen_op_movq_A0_seg(override);
961 gen_op_addq_A0_reg_sN(0, R_ESI);
962 } else {
963 gen_op_movq_A0_reg(R_ESI);
964 }
965 } else
966#endif
967 if (s->aflag) {
968 /* 32 bit address */
969 if (s->addseg && override < 0)
970 override = R_DS;
971 if (override >= 0) {
972 gen_op_movl_A0_seg(override);
973 gen_op_addl_A0_reg_sN(0, R_ESI);
974 } else {
975 gen_op_movl_A0_reg(R_ESI);
976 }
977 } else {
978 /* 16 address, always override */
979 if (override < 0)
980 override = R_DS;
981 gen_op_movl_A0_reg(R_ESI);
982 gen_op_andl_A0_ffff();
983 gen_op_addl_A0_seg(override);
984 }
985}
986
987#ifndef VBOX
988static inline void gen_string_movl_A0_EDI(DisasContext *s)
989#else /* VBOX */
990DECLINLINE(void) gen_string_movl_A0_EDI(DisasContext *s)
991#endif /* VBOX */
992{
993#ifdef TARGET_X86_64
994 if (s->aflag == 2) {
995 gen_op_movq_A0_reg(R_EDI);
996 } else
997#endif
998 if (s->aflag) {
999 if (s->addseg) {
1000 gen_op_movl_A0_seg(R_ES);
1001 gen_op_addl_A0_reg_sN(0, R_EDI);
1002 } else {
1003 gen_op_movl_A0_reg(R_EDI);
1004 }
1005 } else {
1006 gen_op_movl_A0_reg(R_EDI);
1007 gen_op_andl_A0_ffff();
1008 gen_op_addl_A0_seg(R_ES);
1009 }
1010}
1011
1012#ifndef VBOX
1013static inline void gen_op_movl_T0_Dshift(int ot)
1014#else /* VBOX */
1015DECLINLINE(void) gen_op_movl_T0_Dshift(int ot)
1016#endif /* VBOX */
1017{
1018 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
1019 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
1020};
1021
1022static void gen_extu(int ot, TCGv reg)
1023{
1024 switch(ot) {
1025 case OT_BYTE:
1026 tcg_gen_ext8u_tl(reg, reg);
1027 break;
1028 case OT_WORD:
1029 tcg_gen_ext16u_tl(reg, reg);
1030 break;
1031 case OT_LONG:
1032 tcg_gen_ext32u_tl(reg, reg);
1033 break;
1034 default:
1035 break;
1036 }
1037}
1038
1039static void gen_exts(int ot, TCGv reg)
1040{
1041 switch(ot) {
1042 case OT_BYTE:
1043 tcg_gen_ext8s_tl(reg, reg);
1044 break;
1045 case OT_WORD:
1046 tcg_gen_ext16s_tl(reg, reg);
1047 break;
1048 case OT_LONG:
1049 tcg_gen_ext32s_tl(reg, reg);
1050 break;
1051 default:
1052 break;
1053 }
1054}
1055
1056#ifndef VBOX
1057static inline void gen_op_jnz_ecx(int size, int label1)
1058#else /* VBOX */
1059DECLINLINE(void) gen_op_jnz_ecx(int size, int label1)
1060#endif /* VBOX */
1061{
1062 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1063 gen_extu(size + 1, cpu_tmp0);
1064 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
1065}
1066
1067#ifndef VBOX
1068static inline void gen_op_jz_ecx(int size, int label1)
1069#else /* VBOX */
1070DECLINLINE(void) gen_op_jz_ecx(int size, int label1)
1071#endif /* VBOX */
1072{
1073 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1074 gen_extu(size + 1, cpu_tmp0);
1075 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
1076}
1077
1078static void *helper_in_func[3] = {
1079 helper_inb,
1080 helper_inw,
1081 helper_inl,
1082};
1083
1084static void *helper_out_func[3] = {
1085 helper_outb,
1086 helper_outw,
1087 helper_outl,
1088};
1089
1090static void *gen_check_io_func[3] = {
1091 helper_check_iob,
1092 helper_check_iow,
1093 helper_check_iol,
1094};
1095
1096static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
1097 uint32_t svm_flags)
1098{
1099 int state_saved;
1100 target_ulong next_eip;
1101
1102 state_saved = 0;
1103 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1104 if (s->cc_op != CC_OP_DYNAMIC)
1105 gen_op_set_cc_op(s->cc_op);
1106 gen_jmp_im(cur_eip);
1107 state_saved = 1;
1108 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1109 tcg_gen_helper_0_1(gen_check_io_func[ot],
1110 cpu_tmp2_i32);
1111 }
1112 if(s->flags & HF_SVMI_MASK) {
1113 if (!state_saved) {
1114 if (s->cc_op != CC_OP_DYNAMIC)
1115 gen_op_set_cc_op(s->cc_op);
1116 gen_jmp_im(cur_eip);
1117 state_saved = 1;
1118 }
1119 svm_flags |= (1 << (4 + ot));
1120 next_eip = s->pc - s->cs_base;
1121 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1122 tcg_gen_helper_0_3(helper_svm_check_io,
1123 cpu_tmp2_i32,
1124 tcg_const_i32(svm_flags),
1125 tcg_const_i32(next_eip - cur_eip));
1126 }
1127}
1128
1129#ifndef VBOX
1130static inline void gen_movs(DisasContext *s, int ot)
1131#else /* VBOX */
1132DECLINLINE(void) gen_movs(DisasContext *s, int ot)
1133#endif /* VBOX */
1134{
1135 gen_string_movl_A0_ESI(s);
1136 gen_op_ld_T0_A0(ot + s->mem_index);
1137 gen_string_movl_A0_EDI(s);
1138 gen_op_st_T0_A0(ot + s->mem_index);
1139 gen_op_movl_T0_Dshift(ot);
1140 gen_op_add_reg_T0(s->aflag, R_ESI);
1141 gen_op_add_reg_T0(s->aflag, R_EDI);
1142}
1143
1144#ifndef VBOX
1145static inline void gen_update_cc_op(DisasContext *s)
1146#else /* VBOX */
1147DECLINLINE(void) gen_update_cc_op(DisasContext *s)
1148#endif /* VBOX */
1149{
1150 if (s->cc_op != CC_OP_DYNAMIC) {
1151 gen_op_set_cc_op(s->cc_op);
1152 s->cc_op = CC_OP_DYNAMIC;
1153 }
1154}
1155
1156static void gen_op_update1_cc(void)
1157{
1158 tcg_gen_discard_tl(cpu_cc_src);
1159 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1160}
1161
1162static void gen_op_update2_cc(void)
1163{
1164 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1165 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1166}
1167
1168#ifndef VBOX
1169static inline void gen_op_cmpl_T0_T1_cc(void)
1170#else /* VBOX */
1171DECLINLINE(void) gen_op_cmpl_T0_T1_cc(void)
1172#endif /* VBOX */
1173{
1174 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1175 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1176}
1177
1178#ifndef VBOX
1179static inline void gen_op_testl_T0_T1_cc(void)
1180#else /* VBOX */
1181DECLINLINE(void) gen_op_testl_T0_T1_cc(void)
1182#endif /* VBOX */
1183{
1184 tcg_gen_discard_tl(cpu_cc_src);
1185 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1186}
1187
1188static void gen_op_update_neg_cc(void)
1189{
1190 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
1191 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1192}
1193
1194/* compute eflags.C to reg */
1195static void gen_compute_eflags_c(TCGv reg)
1196{
1197#if TCG_TARGET_REG_BITS == 32
1198 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1199 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1200 (long)cc_table + offsetof(CCTable, compute_c));
1201 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1202 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1203 1, &cpu_tmp2_i32, 0, NULL);
1204#else
1205 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1206 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1207 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1208 (long)cc_table + offsetof(CCTable, compute_c));
1209 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1210 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1211 1, &cpu_tmp2_i32, 0, NULL);
1212#endif
1213 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1214}
1215
1216/* compute all eflags to cc_src */
1217static void gen_compute_eflags(TCGv reg)
1218{
1219#if TCG_TARGET_REG_BITS == 32
1220 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1221 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1222 (long)cc_table + offsetof(CCTable, compute_all));
1223 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1224 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1225 1, &cpu_tmp2_i32, 0, NULL);
1226#else
1227 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1228 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1229 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1230 (long)cc_table + offsetof(CCTable, compute_all));
1231 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1232 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1233 1, &cpu_tmp2_i32, 0, NULL);
1234#endif
1235 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1236}
1237
1238#ifndef VBOX
1239static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1240#else /* VBOX */
1241DECLINLINE(void) gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1242#endif /* VBOX */
1243{
1244 if (s->cc_op != CC_OP_DYNAMIC)
1245 gen_op_set_cc_op(s->cc_op);
1246 switch(jcc_op) {
1247 case JCC_O:
1248 gen_compute_eflags(cpu_T[0]);
1249 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
1250 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1251 break;
1252 case JCC_B:
1253 gen_compute_eflags_c(cpu_T[0]);
1254 break;
1255 case JCC_Z:
1256 gen_compute_eflags(cpu_T[0]);
1257 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
1258 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1259 break;
1260 case JCC_BE:
1261 gen_compute_eflags(cpu_tmp0);
1262 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
1263 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1264 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1265 break;
1266 case JCC_S:
1267 gen_compute_eflags(cpu_T[0]);
1268 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
1269 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1270 break;
1271 case JCC_P:
1272 gen_compute_eflags(cpu_T[0]);
1273 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
1274 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1275 break;
1276 case JCC_L:
1277 gen_compute_eflags(cpu_tmp0);
1278 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1279 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
1280 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1281 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1282 break;
1283 default:
1284 case JCC_LE:
1285 gen_compute_eflags(cpu_tmp0);
1286 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1287 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
1288 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
1289 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1290 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1291 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1292 break;
1293 }
1294}
1295
1296/* return true if setcc_slow is not needed (WARNING: must be kept in
1297 sync with gen_jcc1) */
1298static int is_fast_jcc_case(DisasContext *s, int b)
1299{
1300 int jcc_op;
1301 jcc_op = (b >> 1) & 7;
1302 switch(s->cc_op) {
1303 /* we optimize the cmp/jcc case */
1304 case CC_OP_SUBB:
1305 case CC_OP_SUBW:
1306 case CC_OP_SUBL:
1307 case CC_OP_SUBQ:
1308 if (jcc_op == JCC_O || jcc_op == JCC_P)
1309 goto slow_jcc;
1310 break;
1311
1312 /* some jumps are easy to compute */
1313 case CC_OP_ADDB:
1314 case CC_OP_ADDW:
1315 case CC_OP_ADDL:
1316 case CC_OP_ADDQ:
1317
1318 case CC_OP_LOGICB:
1319 case CC_OP_LOGICW:
1320 case CC_OP_LOGICL:
1321 case CC_OP_LOGICQ:
1322
1323 case CC_OP_INCB:
1324 case CC_OP_INCW:
1325 case CC_OP_INCL:
1326 case CC_OP_INCQ:
1327
1328 case CC_OP_DECB:
1329 case CC_OP_DECW:
1330 case CC_OP_DECL:
1331 case CC_OP_DECQ:
1332
1333 case CC_OP_SHLB:
1334 case CC_OP_SHLW:
1335 case CC_OP_SHLL:
1336 case CC_OP_SHLQ:
1337 if (jcc_op != JCC_Z && jcc_op != JCC_S)
1338 goto slow_jcc;
1339 break;
1340 default:
1341 slow_jcc:
1342 return 0;
1343 }
1344 return 1;
1345}
1346
1347/* generate a conditional jump to label 'l1' according to jump opcode
1348 value 'b'. In the fast case, T0 is guaranted not to be used. */
1349#ifndef VBOX
1350static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1351#else /* VBOX */
1352DECLINLINE(void) gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1353#endif /* VBOX */
1354{
1355 int inv, jcc_op, size, cond;
1356 TCGv t0;
1357
1358 inv = b & 1;
1359 jcc_op = (b >> 1) & 7;
1360
1361 switch(cc_op) {
1362 /* we optimize the cmp/jcc case */
1363 case CC_OP_SUBB:
1364 case CC_OP_SUBW:
1365 case CC_OP_SUBL:
1366 case CC_OP_SUBQ:
1367
1368 size = cc_op - CC_OP_SUBB;
1369 switch(jcc_op) {
1370 case JCC_Z:
1371 fast_jcc_z:
1372 switch(size) {
1373 case 0:
1374 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
1375 t0 = cpu_tmp0;
1376 break;
1377 case 1:
1378 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
1379 t0 = cpu_tmp0;
1380 break;
1381#ifdef TARGET_X86_64
1382 case 2:
1383 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
1384 t0 = cpu_tmp0;
1385 break;
1386#endif
1387 default:
1388 t0 = cpu_cc_dst;
1389 break;
1390 }
1391 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
1392 break;
1393 case JCC_S:
1394 fast_jcc_s:
1395 switch(size) {
1396 case 0:
1397 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1398 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1399 0, l1);
1400 break;
1401 case 1:
1402 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1403 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1404 0, l1);
1405 break;
1406#ifdef TARGET_X86_64
1407 case 2:
1408 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1409 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1410 0, l1);
1411 break;
1412#endif
1413 default:
1414 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1415 0, l1);
1416 break;
1417 }
1418 break;
1419
1420 case JCC_B:
1421 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1422 goto fast_jcc_b;
1423 case JCC_BE:
1424 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1425 fast_jcc_b:
1426 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1427 switch(size) {
1428 case 0:
1429 t0 = cpu_tmp0;
1430 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1431 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1432 break;
1433 case 1:
1434 t0 = cpu_tmp0;
1435 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1436 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1437 break;
1438#ifdef TARGET_X86_64
1439 case 2:
1440 t0 = cpu_tmp0;
1441 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1442 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1443 break;
1444#endif
1445 default:
1446 t0 = cpu_cc_src;
1447 break;
1448 }
1449 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1450 break;
1451
1452 case JCC_L:
1453 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1454 goto fast_jcc_l;
1455 case JCC_LE:
1456 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1457 fast_jcc_l:
1458 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1459 switch(size) {
1460 case 0:
1461 t0 = cpu_tmp0;
1462 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1463 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1464 break;
1465 case 1:
1466 t0 = cpu_tmp0;
1467 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1468 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1469 break;
1470#ifdef TARGET_X86_64
1471 case 2:
1472 t0 = cpu_tmp0;
1473 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1474 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1475 break;
1476#endif
1477 default:
1478 t0 = cpu_cc_src;
1479 break;
1480 }
1481 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1482 break;
1483
1484 default:
1485 goto slow_jcc;
1486 }
1487 break;
1488
1489 /* some jumps are easy to compute */
1490 case CC_OP_ADDB:
1491 case CC_OP_ADDW:
1492 case CC_OP_ADDL:
1493 case CC_OP_ADDQ:
1494
1495 case CC_OP_ADCB:
1496 case CC_OP_ADCW:
1497 case CC_OP_ADCL:
1498 case CC_OP_ADCQ:
1499
1500 case CC_OP_SBBB:
1501 case CC_OP_SBBW:
1502 case CC_OP_SBBL:
1503 case CC_OP_SBBQ:
1504
1505 case CC_OP_LOGICB:
1506 case CC_OP_LOGICW:
1507 case CC_OP_LOGICL:
1508 case CC_OP_LOGICQ:
1509
1510 case CC_OP_INCB:
1511 case CC_OP_INCW:
1512 case CC_OP_INCL:
1513 case CC_OP_INCQ:
1514
1515 case CC_OP_DECB:
1516 case CC_OP_DECW:
1517 case CC_OP_DECL:
1518 case CC_OP_DECQ:
1519
1520 case CC_OP_SHLB:
1521 case CC_OP_SHLW:
1522 case CC_OP_SHLL:
1523 case CC_OP_SHLQ:
1524
1525 case CC_OP_SARB:
1526 case CC_OP_SARW:
1527 case CC_OP_SARL:
1528 case CC_OP_SARQ:
1529 switch(jcc_op) {
1530 case JCC_Z:
1531 size = (cc_op - CC_OP_ADDB) & 3;
1532 goto fast_jcc_z;
1533 case JCC_S:
1534 size = (cc_op - CC_OP_ADDB) & 3;
1535 goto fast_jcc_s;
1536 default:
1537 goto slow_jcc;
1538 }
1539 break;
1540 default:
1541 slow_jcc:
1542 gen_setcc_slow_T0(s, jcc_op);
1543 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1544 cpu_T[0], 0, l1);
1545 break;
1546 }
1547}
1548
1549/* XXX: does not work with gdbstub "ice" single step - not a
1550 serious problem */
1551static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1552{
1553 int l1, l2;
1554
1555 l1 = gen_new_label();
1556 l2 = gen_new_label();
1557 gen_op_jnz_ecx(s->aflag, l1);
1558 gen_set_label(l2);
1559 gen_jmp_tb(s, next_eip, 1);
1560 gen_set_label(l1);
1561 return l2;
1562}
1563
1564#ifndef VBOX
1565static inline void gen_stos(DisasContext *s, int ot)
1566#else /* VBOX */
1567DECLINLINE(void) gen_stos(DisasContext *s, int ot)
1568#endif /* VBOX */
1569{
1570 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1571 gen_string_movl_A0_EDI(s);
1572 gen_op_st_T0_A0(ot + s->mem_index);
1573 gen_op_movl_T0_Dshift(ot);
1574 gen_op_add_reg_T0(s->aflag, R_EDI);
1575}
1576
1577#ifndef VBOX
1578static inline void gen_lods(DisasContext *s, int ot)
1579#else /* VBOX */
1580DECLINLINE(void) gen_lods(DisasContext *s, int ot)
1581#endif /* VBOX */
1582{
1583 gen_string_movl_A0_ESI(s);
1584 gen_op_ld_T0_A0(ot + s->mem_index);
1585 gen_op_mov_reg_T0(ot, R_EAX);
1586 gen_op_movl_T0_Dshift(ot);
1587 gen_op_add_reg_T0(s->aflag, R_ESI);
1588}
1589
1590#ifndef VBOX
1591static inline void gen_scas(DisasContext *s, int ot)
1592#else /* VBOX */
1593DECLINLINE(void) gen_scas(DisasContext *s, int ot)
1594#endif /* VBOX */
1595{
1596 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1597 gen_string_movl_A0_EDI(s);
1598 gen_op_ld_T1_A0(ot + s->mem_index);
1599 gen_op_cmpl_T0_T1_cc();
1600 gen_op_movl_T0_Dshift(ot);
1601 gen_op_add_reg_T0(s->aflag, R_EDI);
1602}
1603
1604#ifndef VBOX
1605static inline void gen_cmps(DisasContext *s, int ot)
1606#else /* VBOX */
1607DECLINLINE(void) gen_cmps(DisasContext *s, int ot)
1608#endif /* VBOX */
1609{
1610 gen_string_movl_A0_ESI(s);
1611 gen_op_ld_T0_A0(ot + s->mem_index);
1612 gen_string_movl_A0_EDI(s);
1613 gen_op_ld_T1_A0(ot + s->mem_index);
1614 gen_op_cmpl_T0_T1_cc();
1615 gen_op_movl_T0_Dshift(ot);
1616 gen_op_add_reg_T0(s->aflag, R_ESI);
1617 gen_op_add_reg_T0(s->aflag, R_EDI);
1618}
1619
1620#ifndef VBOX
1621static inline void gen_ins(DisasContext *s, int ot)
1622#else /* VBOX */
1623DECLINLINE(void) gen_ins(DisasContext *s, int ot)
1624#endif /* VBOX */
1625{
1626 if (use_icount)
1627 gen_io_start();
1628 gen_string_movl_A0_EDI(s);
1629 /* Note: we must do this dummy write first to be restartable in
1630 case of page fault. */
1631 gen_op_movl_T0_0();
1632 gen_op_st_T0_A0(ot + s->mem_index);
1633 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1634 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1635 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1636 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1637 gen_op_st_T0_A0(ot + s->mem_index);
1638 gen_op_movl_T0_Dshift(ot);
1639 gen_op_add_reg_T0(s->aflag, R_EDI);
1640 if (use_icount)
1641 gen_io_end();
1642}
1643
1644#ifndef VBOX
1645static inline void gen_outs(DisasContext *s, int ot)
1646#else /* VBOX */
1647DECLINLINE(void) gen_outs(DisasContext *s, int ot)
1648#endif /* VBOX */
1649{
1650 if (use_icount)
1651 gen_io_start();
1652 gen_string_movl_A0_ESI(s);
1653 gen_op_ld_T0_A0(ot + s->mem_index);
1654
1655 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1656 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1657 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1658 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1659 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1660
1661 gen_op_movl_T0_Dshift(ot);
1662 gen_op_add_reg_T0(s->aflag, R_ESI);
1663 if (use_icount)
1664 gen_io_end();
1665}
1666
1667/* same method as Valgrind : we generate jumps to current or next
1668 instruction */
1669#ifndef VBOX
1670#define GEN_REPZ(op) \
1671static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1672 target_ulong cur_eip, target_ulong next_eip) \
1673{ \
1674 int l2; \
1675 gen_update_cc_op(s); \
1676 l2 = gen_jz_ecx_string(s, next_eip); \
1677 gen_ ## op(s, ot); \
1678 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1679 /* a loop would cause two single step exceptions if ECX = 1 \
1680 before rep string_insn */ \
1681 if (!s->jmp_opt) \
1682 gen_op_jz_ecx(s->aflag, l2); \
1683 gen_jmp(s, cur_eip); \
1684}
1685#else /* VBOX */
1686#define GEN_REPZ(op) \
1687DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1688 target_ulong cur_eip, target_ulong next_eip) \
1689{ \
1690 int l2; \
1691 gen_update_cc_op(s); \
1692 l2 = gen_jz_ecx_string(s, next_eip); \
1693 gen_ ## op(s, ot); \
1694 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1695 /* a loop would cause two single step exceptions if ECX = 1 \
1696 before rep string_insn */ \
1697 if (!s->jmp_opt) \
1698 gen_op_jz_ecx(s->aflag, l2); \
1699 gen_jmp(s, cur_eip); \
1700}
1701#endif /* VBOX */
1702
1703#ifndef VBOX
1704#define GEN_REPZ2(op) \
1705static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1706 target_ulong cur_eip, \
1707 target_ulong next_eip, \
1708 int nz) \
1709{ \
1710 int l2; \
1711 gen_update_cc_op(s); \
1712 l2 = gen_jz_ecx_string(s, next_eip); \
1713 gen_ ## op(s, ot); \
1714 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1715 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1716 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1717 if (!s->jmp_opt) \
1718 gen_op_jz_ecx(s->aflag, l2); \
1719 gen_jmp(s, cur_eip); \
1720}
1721#else /* VBOX */
1722#define GEN_REPZ2(op) \
1723DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1724 target_ulong cur_eip, \
1725 target_ulong next_eip, \
1726 int nz) \
1727{ \
1728 int l2;\
1729 gen_update_cc_op(s); \
1730 l2 = gen_jz_ecx_string(s, next_eip); \
1731 gen_ ## op(s, ot); \
1732 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1733 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1734 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1735 if (!s->jmp_opt) \
1736 gen_op_jz_ecx(s->aflag, l2); \
1737 gen_jmp(s, cur_eip); \
1738}
1739#endif /* VBOX */
1740
1741GEN_REPZ(movs)
1742GEN_REPZ(stos)
1743GEN_REPZ(lods)
1744GEN_REPZ(ins)
1745GEN_REPZ(outs)
1746GEN_REPZ2(scas)
1747GEN_REPZ2(cmps)
1748
1749static void *helper_fp_arith_ST0_FT0[8] = {
1750 helper_fadd_ST0_FT0,
1751 helper_fmul_ST0_FT0,
1752 helper_fcom_ST0_FT0,
1753 helper_fcom_ST0_FT0,
1754 helper_fsub_ST0_FT0,
1755 helper_fsubr_ST0_FT0,
1756 helper_fdiv_ST0_FT0,
1757 helper_fdivr_ST0_FT0,
1758};
1759
1760/* NOTE the exception in "r" op ordering */
1761static void *helper_fp_arith_STN_ST0[8] = {
1762 helper_fadd_STN_ST0,
1763 helper_fmul_STN_ST0,
1764 NULL,
1765 NULL,
1766 helper_fsubr_STN_ST0,
1767 helper_fsub_STN_ST0,
1768 helper_fdivr_STN_ST0,
1769 helper_fdiv_STN_ST0,
1770};
1771
1772/* if d == OR_TMP0, it means memory operand (address in A0) */
1773static void gen_op(DisasContext *s1, int op, int ot, int d)
1774{
1775 if (d != OR_TMP0) {
1776 gen_op_mov_TN_reg(ot, 0, d);
1777 } else {
1778 gen_op_ld_T0_A0(ot + s1->mem_index);
1779 }
1780 switch(op) {
1781 case OP_ADCL:
1782 if (s1->cc_op != CC_OP_DYNAMIC)
1783 gen_op_set_cc_op(s1->cc_op);
1784 gen_compute_eflags_c(cpu_tmp4);
1785 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1786 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1787 if (d != OR_TMP0)
1788 gen_op_mov_reg_T0(ot, d);
1789 else
1790 gen_op_st_T0_A0(ot + s1->mem_index);
1791 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1792 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1793 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1794 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1795 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1796 s1->cc_op = CC_OP_DYNAMIC;
1797 break;
1798 case OP_SBBL:
1799 if (s1->cc_op != CC_OP_DYNAMIC)
1800 gen_op_set_cc_op(s1->cc_op);
1801 gen_compute_eflags_c(cpu_tmp4);
1802 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1803 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1804 if (d != OR_TMP0)
1805 gen_op_mov_reg_T0(ot, d);
1806 else
1807 gen_op_st_T0_A0(ot + s1->mem_index);
1808 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1809 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1810 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1811 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1812 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1813 s1->cc_op = CC_OP_DYNAMIC;
1814 break;
1815 case OP_ADDL:
1816 gen_op_addl_T0_T1();
1817 if (d != OR_TMP0)
1818 gen_op_mov_reg_T0(ot, d);
1819 else
1820 gen_op_st_T0_A0(ot + s1->mem_index);
1821 gen_op_update2_cc();
1822 s1->cc_op = CC_OP_ADDB + ot;
1823 break;
1824 case OP_SUBL:
1825 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1826 if (d != OR_TMP0)
1827 gen_op_mov_reg_T0(ot, d);
1828 else
1829 gen_op_st_T0_A0(ot + s1->mem_index);
1830 gen_op_update2_cc();
1831 s1->cc_op = CC_OP_SUBB + ot;
1832 break;
1833 default:
1834 case OP_ANDL:
1835 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1836 if (d != OR_TMP0)
1837 gen_op_mov_reg_T0(ot, d);
1838 else
1839 gen_op_st_T0_A0(ot + s1->mem_index);
1840 gen_op_update1_cc();
1841 s1->cc_op = CC_OP_LOGICB + ot;
1842 break;
1843 case OP_ORL:
1844 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1845 if (d != OR_TMP0)
1846 gen_op_mov_reg_T0(ot, d);
1847 else
1848 gen_op_st_T0_A0(ot + s1->mem_index);
1849 gen_op_update1_cc();
1850 s1->cc_op = CC_OP_LOGICB + ot;
1851 break;
1852 case OP_XORL:
1853 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1854 if (d != OR_TMP0)
1855 gen_op_mov_reg_T0(ot, d);
1856 else
1857 gen_op_st_T0_A0(ot + s1->mem_index);
1858 gen_op_update1_cc();
1859 s1->cc_op = CC_OP_LOGICB + ot;
1860 break;
1861 case OP_CMPL:
1862 gen_op_cmpl_T0_T1_cc();
1863 s1->cc_op = CC_OP_SUBB + ot;
1864 break;
1865 }
1866}
1867
1868/* if d == OR_TMP0, it means memory operand (address in A0) */
1869static void gen_inc(DisasContext *s1, int ot, int d, int c)
1870{
1871 if (d != OR_TMP0)
1872 gen_op_mov_TN_reg(ot, 0, d);
1873 else
1874 gen_op_ld_T0_A0(ot + s1->mem_index);
1875 if (s1->cc_op != CC_OP_DYNAMIC)
1876 gen_op_set_cc_op(s1->cc_op);
1877 if (c > 0) {
1878 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1879 s1->cc_op = CC_OP_INCB + ot;
1880 } else {
1881 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1882 s1->cc_op = CC_OP_DECB + ot;
1883 }
1884 if (d != OR_TMP0)
1885 gen_op_mov_reg_T0(ot, d);
1886 else
1887 gen_op_st_T0_A0(ot + s1->mem_index);
1888 gen_compute_eflags_c(cpu_cc_src);
1889 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1890}
1891
1892static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1893 int is_right, int is_arith)
1894{
1895 target_ulong mask;
1896 int shift_label;
1897 TCGv t0, t1;
1898
1899 if (ot == OT_QUAD)
1900 mask = 0x3f;
1901 else
1902 mask = 0x1f;
1903
1904 /* load */
1905 if (op1 == OR_TMP0)
1906 gen_op_ld_T0_A0(ot + s->mem_index);
1907 else
1908 gen_op_mov_TN_reg(ot, 0, op1);
1909
1910 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1911
1912 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1913
1914 if (is_right) {
1915 if (is_arith) {
1916 gen_exts(ot, cpu_T[0]);
1917 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1918 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1919 } else {
1920 gen_extu(ot, cpu_T[0]);
1921 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1922 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1923 }
1924 } else {
1925 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1926 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1927 }
1928
1929 /* store */
1930 if (op1 == OR_TMP0)
1931 gen_op_st_T0_A0(ot + s->mem_index);
1932 else
1933 gen_op_mov_reg_T0(ot, op1);
1934
1935 /* update eflags if non zero shift */
1936 if (s->cc_op != CC_OP_DYNAMIC)
1937 gen_op_set_cc_op(s->cc_op);
1938
1939 /* XXX: inefficient */
1940 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1941 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1942
1943 tcg_gen_mov_tl(t0, cpu_T[0]);
1944 tcg_gen_mov_tl(t1, cpu_T3);
1945
1946 shift_label = gen_new_label();
1947 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1948
1949 tcg_gen_mov_tl(cpu_cc_src, t1);
1950 tcg_gen_mov_tl(cpu_cc_dst, t0);
1951 if (is_right)
1952 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1953 else
1954 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1955
1956 gen_set_label(shift_label);
1957 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1958
1959 tcg_temp_free(t0);
1960 tcg_temp_free(t1);
1961}
1962
1963static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1964 int is_right, int is_arith)
1965{
1966 int mask;
1967
1968 if (ot == OT_QUAD)
1969 mask = 0x3f;
1970 else
1971 mask = 0x1f;
1972
1973 /* load */
1974 if (op1 == OR_TMP0)
1975 gen_op_ld_T0_A0(ot + s->mem_index);
1976 else
1977 gen_op_mov_TN_reg(ot, 0, op1);
1978
1979 op2 &= mask;
1980 if (op2 != 0) {
1981 if (is_right) {
1982 if (is_arith) {
1983 gen_exts(ot, cpu_T[0]);
1984 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1985 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1986 } else {
1987 gen_extu(ot, cpu_T[0]);
1988 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1989 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1990 }
1991 } else {
1992 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1993 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1994 }
1995 }
1996
1997 /* store */
1998 if (op1 == OR_TMP0)
1999 gen_op_st_T0_A0(ot + s->mem_index);
2000 else
2001 gen_op_mov_reg_T0(ot, op1);
2002
2003 /* update eflags if non zero shift */
2004 if (op2 != 0) {
2005 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
2006 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
2007 if (is_right)
2008 s->cc_op = CC_OP_SARB + ot;
2009 else
2010 s->cc_op = CC_OP_SHLB + ot;
2011 }
2012}
2013
2014#ifndef VBOX
2015static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
2016#else /* VBOX */
2017DECLINLINE(void) tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
2018#endif /* VBOX */
2019{
2020 if (arg2 >= 0)
2021 tcg_gen_shli_tl(ret, arg1, arg2);
2022 else
2023 tcg_gen_shri_tl(ret, arg1, -arg2);
2024}
2025
2026/* XXX: add faster immediate case */
2027static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
2028 int is_right)
2029{
2030 target_ulong mask;
2031 int label1, label2, data_bits;
2032 TCGv t0, t1, t2, a0;
2033
2034 /* XXX: inefficient, but we must use local temps */
2035 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2036 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2037 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2038 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2039
2040 if (ot == OT_QUAD)
2041 mask = 0x3f;
2042 else
2043 mask = 0x1f;
2044
2045 /* load */
2046 if (op1 == OR_TMP0) {
2047 tcg_gen_mov_tl(a0, cpu_A0);
2048 gen_op_ld_v(ot + s->mem_index, t0, a0);
2049 } else {
2050 gen_op_mov_v_reg(ot, t0, op1);
2051 }
2052
2053 tcg_gen_mov_tl(t1, cpu_T[1]);
2054
2055 tcg_gen_andi_tl(t1, t1, mask);
2056
2057 /* Must test zero case to avoid using undefined behaviour in TCG
2058 shifts. */
2059 label1 = gen_new_label();
2060 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
2061
2062 if (ot <= OT_WORD)
2063 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
2064 else
2065 tcg_gen_mov_tl(cpu_tmp0, t1);
2066
2067 gen_extu(ot, t0);
2068 tcg_gen_mov_tl(t2, t0);
2069
2070 data_bits = 8 << ot;
2071 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
2072 fix TCG definition) */
2073 if (is_right) {
2074 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
2075 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2076 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
2077 } else {
2078 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
2079 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2080 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
2081 }
2082 tcg_gen_or_tl(t0, t0, cpu_tmp4);
2083
2084 gen_set_label(label1);
2085 /* store */
2086 if (op1 == OR_TMP0) {
2087 gen_op_st_v(ot + s->mem_index, t0, a0);
2088 } else {
2089 gen_op_mov_reg_v(ot, op1, t0);
2090 }
2091
2092 /* update eflags */
2093 if (s->cc_op != CC_OP_DYNAMIC)
2094 gen_op_set_cc_op(s->cc_op);
2095
2096 label2 = gen_new_label();
2097 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
2098
2099 gen_compute_eflags(cpu_cc_src);
2100 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
2101 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
2102 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
2103 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
2104 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
2105 if (is_right) {
2106 tcg_gen_shri_tl(t0, t0, data_bits - 1);
2107 }
2108 tcg_gen_andi_tl(t0, t0, CC_C);
2109 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
2110
2111 tcg_gen_discard_tl(cpu_cc_dst);
2112 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2113
2114 gen_set_label(label2);
2115 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2116
2117 tcg_temp_free(t0);
2118 tcg_temp_free(t1);
2119 tcg_temp_free(t2);
2120 tcg_temp_free(a0);
2121}
2122
2123static void *helper_rotc[8] = {
2124 helper_rclb,
2125 helper_rclw,
2126 helper_rcll,
2127 X86_64_ONLY(helper_rclq),
2128 helper_rcrb,
2129 helper_rcrw,
2130 helper_rcrl,
2131 X86_64_ONLY(helper_rcrq),
2132};
2133
2134/* XXX: add faster immediate = 1 case */
2135static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
2136 int is_right)
2137{
2138 int label1;
2139
2140 if (s->cc_op != CC_OP_DYNAMIC)
2141 gen_op_set_cc_op(s->cc_op);
2142
2143 /* load */
2144 if (op1 == OR_TMP0)
2145 gen_op_ld_T0_A0(ot + s->mem_index);
2146 else
2147 gen_op_mov_TN_reg(ot, 0, op1);
2148
2149 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
2150 cpu_T[0], cpu_T[0], cpu_T[1]);
2151 /* store */
2152 if (op1 == OR_TMP0)
2153 gen_op_st_T0_A0(ot + s->mem_index);
2154 else
2155 gen_op_mov_reg_T0(ot, op1);
2156
2157 /* update eflags */
2158 label1 = gen_new_label();
2159 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
2160
2161 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
2162 tcg_gen_discard_tl(cpu_cc_dst);
2163 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2164
2165 gen_set_label(label1);
2166 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2167}
2168
2169/* XXX: add faster immediate case */
2170static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
2171 int is_right)
2172{
2173 int label1, label2, data_bits;
2174 target_ulong mask;
2175 TCGv t0, t1, t2, a0;
2176
2177 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2178 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2179 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2180 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2181
2182 if (ot == OT_QUAD)
2183 mask = 0x3f;
2184 else
2185 mask = 0x1f;
2186
2187 /* load */
2188 if (op1 == OR_TMP0) {
2189 tcg_gen_mov_tl(a0, cpu_A0);
2190 gen_op_ld_v(ot + s->mem_index, t0, a0);
2191 } else {
2192 gen_op_mov_v_reg(ot, t0, op1);
2193 }
2194
2195 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
2196
2197 tcg_gen_mov_tl(t1, cpu_T[1]);
2198 tcg_gen_mov_tl(t2, cpu_T3);
2199
2200 /* Must test zero case to avoid using undefined behaviour in TCG
2201 shifts. */
2202 label1 = gen_new_label();
2203 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
2204
2205 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
2206 if (ot == OT_WORD) {
2207 /* Note: we implement the Intel behaviour for shift count > 16 */
2208 if (is_right) {
2209 tcg_gen_andi_tl(t0, t0, 0xffff);
2210 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
2211 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2212 tcg_gen_ext32u_tl(t0, t0);
2213
2214 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2215
2216 /* only needed if count > 16, but a test would complicate */
2217 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2218 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
2219
2220 tcg_gen_shr_tl(t0, t0, t2);
2221
2222 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2223 } else {
2224 /* XXX: not optimal */
2225 tcg_gen_andi_tl(t0, t0, 0xffff);
2226 tcg_gen_shli_tl(t1, t1, 16);
2227 tcg_gen_or_tl(t1, t1, t0);
2228 tcg_gen_ext32u_tl(t1, t1);
2229
2230 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2231 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
2232 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
2233 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
2234
2235 tcg_gen_shl_tl(t0, t0, t2);
2236 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2237 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2238 tcg_gen_or_tl(t0, t0, t1);
2239 }
2240 } else {
2241 data_bits = 8 << ot;
2242 if (is_right) {
2243 if (ot == OT_LONG)
2244 tcg_gen_ext32u_tl(t0, t0);
2245
2246 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2247
2248 tcg_gen_shr_tl(t0, t0, t2);
2249 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2250 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
2251 tcg_gen_or_tl(t0, t0, t1);
2252
2253 } else {
2254 if (ot == OT_LONG)
2255 tcg_gen_ext32u_tl(t1, t1);
2256
2257 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2258
2259 tcg_gen_shl_tl(t0, t0, t2);
2260 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2261 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2262 tcg_gen_or_tl(t0, t0, t1);
2263 }
2264 }
2265 tcg_gen_mov_tl(t1, cpu_tmp4);
2266
2267 gen_set_label(label1);
2268 /* store */
2269 if (op1 == OR_TMP0) {
2270 gen_op_st_v(ot + s->mem_index, t0, a0);
2271 } else {
2272 gen_op_mov_reg_v(ot, op1, t0);
2273 }
2274
2275 /* update eflags */
2276 if (s->cc_op != CC_OP_DYNAMIC)
2277 gen_op_set_cc_op(s->cc_op);
2278
2279 label2 = gen_new_label();
2280 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
2281
2282 tcg_gen_mov_tl(cpu_cc_src, t1);
2283 tcg_gen_mov_tl(cpu_cc_dst, t0);
2284 if (is_right) {
2285 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
2286 } else {
2287 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
2288 }
2289 gen_set_label(label2);
2290 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2291
2292 tcg_temp_free(t0);
2293 tcg_temp_free(t1);
2294 tcg_temp_free(t2);
2295 tcg_temp_free(a0);
2296}
2297
2298static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
2299{
2300 if (s != OR_TMP1)
2301 gen_op_mov_TN_reg(ot, 1, s);
2302 switch(op) {
2303 case OP_ROL:
2304 gen_rot_rm_T1(s1, ot, d, 0);
2305 break;
2306 case OP_ROR:
2307 gen_rot_rm_T1(s1, ot, d, 1);
2308 break;
2309 case OP_SHL:
2310 case OP_SHL1:
2311 gen_shift_rm_T1(s1, ot, d, 0, 0);
2312 break;
2313 case OP_SHR:
2314 gen_shift_rm_T1(s1, ot, d, 1, 0);
2315 break;
2316 case OP_SAR:
2317 gen_shift_rm_T1(s1, ot, d, 1, 1);
2318 break;
2319 case OP_RCL:
2320 gen_rotc_rm_T1(s1, ot, d, 0);
2321 break;
2322 case OP_RCR:
2323 gen_rotc_rm_T1(s1, ot, d, 1);
2324 break;
2325 }
2326}
2327
2328static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
2329{
2330 switch(op) {
2331 case OP_SHL:
2332 case OP_SHL1:
2333 gen_shift_rm_im(s1, ot, d, c, 0, 0);
2334 break;
2335 case OP_SHR:
2336 gen_shift_rm_im(s1, ot, d, c, 1, 0);
2337 break;
2338 case OP_SAR:
2339 gen_shift_rm_im(s1, ot, d, c, 1, 1);
2340 break;
2341 default:
2342 /* currently not optimized */
2343 gen_op_movl_T1_im(c);
2344 gen_shift(s1, op, ot, d, OR_TMP1);
2345 break;
2346 }
2347}
2348
2349static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
2350{
2351 target_long disp;
2352 int havesib;
2353 int base;
2354 int index;
2355 int scale;
2356 int opreg;
2357 int mod, rm, code, override, must_add_seg;
2358
2359 override = s->override;
2360 must_add_seg = s->addseg;
2361 if (override >= 0)
2362 must_add_seg = 1;
2363 mod = (modrm >> 6) & 3;
2364 rm = modrm & 7;
2365
2366 if (s->aflag) {
2367
2368 havesib = 0;
2369 base = rm;
2370 index = 0;
2371 scale = 0;
2372
2373 if (base == 4) {
2374 havesib = 1;
2375 code = ldub_code(s->pc++);
2376 scale = (code >> 6) & 3;
2377 index = ((code >> 3) & 7) | REX_X(s);
2378 base = (code & 7);
2379 }
2380 base |= REX_B(s);
2381
2382 switch (mod) {
2383 case 0:
2384 if ((base & 7) == 5) {
2385 base = -1;
2386 disp = (int32_t)ldl_code(s->pc);
2387 s->pc += 4;
2388 if (CODE64(s) && !havesib) {
2389 disp += s->pc + s->rip_offset;
2390 }
2391 } else {
2392 disp = 0;
2393 }
2394 break;
2395 case 1:
2396 disp = (int8_t)ldub_code(s->pc++);
2397 break;
2398 default:
2399 case 2:
2400#ifdef VBOX
2401 disp = (int32_t)ldl_code(s->pc);
2402#else
2403 disp = ldl_code(s->pc);
2404#endif
2405 s->pc += 4;
2406 break;
2407 }
2408
2409 if (base >= 0) {
2410 /* for correct popl handling with esp */
2411 if (base == 4 && s->popl_esp_hack)
2412 disp += s->popl_esp_hack;
2413#ifdef TARGET_X86_64
2414 if (s->aflag == 2) {
2415 gen_op_movq_A0_reg(base);
2416 if (disp != 0) {
2417 gen_op_addq_A0_im(disp);
2418 }
2419 } else
2420#endif
2421 {
2422 gen_op_movl_A0_reg(base);
2423 if (disp != 0)
2424 gen_op_addl_A0_im(disp);
2425 }
2426 } else {
2427#ifdef TARGET_X86_64
2428 if (s->aflag == 2) {
2429 gen_op_movq_A0_im(disp);
2430 } else
2431#endif
2432 {
2433 gen_op_movl_A0_im(disp);
2434 }
2435 }
2436 /* XXX: index == 4 is always invalid */
2437 if (havesib && (index != 4 || scale != 0)) {
2438#ifdef TARGET_X86_64
2439 if (s->aflag == 2) {
2440 gen_op_addq_A0_reg_sN(scale, index);
2441 } else
2442#endif
2443 {
2444 gen_op_addl_A0_reg_sN(scale, index);
2445 }
2446 }
2447 if (must_add_seg) {
2448 if (override < 0) {
2449 if (base == R_EBP || base == R_ESP)
2450 override = R_SS;
2451 else
2452 override = R_DS;
2453 }
2454#ifdef TARGET_X86_64
2455 if (s->aflag == 2) {
2456 gen_op_addq_A0_seg(override);
2457 } else
2458#endif
2459 {
2460 gen_op_addl_A0_seg(override);
2461 }
2462 }
2463 } else {
2464 switch (mod) {
2465 case 0:
2466 if (rm == 6) {
2467 disp = lduw_code(s->pc);
2468 s->pc += 2;
2469 gen_op_movl_A0_im(disp);
2470 rm = 0; /* avoid SS override */
2471 goto no_rm;
2472 } else {
2473 disp = 0;
2474 }
2475 break;
2476 case 1:
2477 disp = (int8_t)ldub_code(s->pc++);
2478 break;
2479 default:
2480 case 2:
2481 disp = lduw_code(s->pc);
2482 s->pc += 2;
2483 break;
2484 }
2485 switch(rm) {
2486 case 0:
2487 gen_op_movl_A0_reg(R_EBX);
2488 gen_op_addl_A0_reg_sN(0, R_ESI);
2489 break;
2490 case 1:
2491 gen_op_movl_A0_reg(R_EBX);
2492 gen_op_addl_A0_reg_sN(0, R_EDI);
2493 break;
2494 case 2:
2495 gen_op_movl_A0_reg(R_EBP);
2496 gen_op_addl_A0_reg_sN(0, R_ESI);
2497 break;
2498 case 3:
2499 gen_op_movl_A0_reg(R_EBP);
2500 gen_op_addl_A0_reg_sN(0, R_EDI);
2501 break;
2502 case 4:
2503 gen_op_movl_A0_reg(R_ESI);
2504 break;
2505 case 5:
2506 gen_op_movl_A0_reg(R_EDI);
2507 break;
2508 case 6:
2509 gen_op_movl_A0_reg(R_EBP);
2510 break;
2511 default:
2512 case 7:
2513 gen_op_movl_A0_reg(R_EBX);
2514 break;
2515 }
2516 if (disp != 0)
2517 gen_op_addl_A0_im(disp);
2518 gen_op_andl_A0_ffff();
2519 no_rm:
2520 if (must_add_seg) {
2521 if (override < 0) {
2522 if (rm == 2 || rm == 3 || rm == 6)
2523 override = R_SS;
2524 else
2525 override = R_DS;
2526 }
2527 gen_op_addl_A0_seg(override);
2528 }
2529 }
2530
2531 opreg = OR_A0;
2532 disp = 0;
2533 *reg_ptr = opreg;
2534 *offset_ptr = disp;
2535}
2536
2537static void gen_nop_modrm(DisasContext *s, int modrm)
2538{
2539 int mod, rm, base, code;
2540
2541 mod = (modrm >> 6) & 3;
2542 if (mod == 3)
2543 return;
2544 rm = modrm & 7;
2545
2546 if (s->aflag) {
2547
2548 base = rm;
2549
2550 if (base == 4) {
2551 code = ldub_code(s->pc++);
2552 base = (code & 7);
2553 }
2554
2555 switch (mod) {
2556 case 0:
2557 if (base == 5) {
2558 s->pc += 4;
2559 }
2560 break;
2561 case 1:
2562 s->pc++;
2563 break;
2564 default:
2565 case 2:
2566 s->pc += 4;
2567 break;
2568 }
2569 } else {
2570 switch (mod) {
2571 case 0:
2572 if (rm == 6) {
2573 s->pc += 2;
2574 }
2575 break;
2576 case 1:
2577 s->pc++;
2578 break;
2579 default:
2580 case 2:
2581 s->pc += 2;
2582 break;
2583 }
2584 }
2585}
2586
2587/* used for LEA and MOV AX, mem */
2588static void gen_add_A0_ds_seg(DisasContext *s)
2589{
2590 int override, must_add_seg;
2591 must_add_seg = s->addseg;
2592 override = R_DS;
2593 if (s->override >= 0) {
2594 override = s->override;
2595 must_add_seg = 1;
2596 } else {
2597 override = R_DS;
2598 }
2599 if (must_add_seg) {
2600#ifdef TARGET_X86_64
2601 if (CODE64(s)) {
2602 gen_op_addq_A0_seg(override);
2603 } else
2604#endif
2605 {
2606 gen_op_addl_A0_seg(override);
2607 }
2608 }
2609}
2610
2611/* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2612 OR_TMP0 */
2613static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2614{
2615 int mod, rm, opreg, disp;
2616
2617 mod = (modrm >> 6) & 3;
2618 rm = (modrm & 7) | REX_B(s);
2619 if (mod == 3) {
2620 if (is_store) {
2621 if (reg != OR_TMP0)
2622 gen_op_mov_TN_reg(ot, 0, reg);
2623 gen_op_mov_reg_T0(ot, rm);
2624 } else {
2625 gen_op_mov_TN_reg(ot, 0, rm);
2626 if (reg != OR_TMP0)
2627 gen_op_mov_reg_T0(ot, reg);
2628 }
2629 } else {
2630 gen_lea_modrm(s, modrm, &opreg, &disp);
2631 if (is_store) {
2632 if (reg != OR_TMP0)
2633 gen_op_mov_TN_reg(ot, 0, reg);
2634 gen_op_st_T0_A0(ot + s->mem_index);
2635 } else {
2636 gen_op_ld_T0_A0(ot + s->mem_index);
2637 if (reg != OR_TMP0)
2638 gen_op_mov_reg_T0(ot, reg);
2639 }
2640 }
2641}
2642
2643#ifndef VBOX
2644static inline uint32_t insn_get(DisasContext *s, int ot)
2645#else /* VBOX */
2646DECLINLINE(uint32_t) insn_get(DisasContext *s, int ot)
2647#endif /* VBOX */
2648{
2649 uint32_t ret;
2650
2651 switch(ot) {
2652 case OT_BYTE:
2653 ret = ldub_code(s->pc);
2654 s->pc++;
2655 break;
2656 case OT_WORD:
2657 ret = lduw_code(s->pc);
2658 s->pc += 2;
2659 break;
2660 default:
2661 case OT_LONG:
2662 ret = ldl_code(s->pc);
2663 s->pc += 4;
2664 break;
2665 }
2666 return ret;
2667}
2668
2669#ifndef VBOX
2670static inline int insn_const_size(unsigned int ot)
2671#else /* VBOX */
2672DECLINLINE(int) insn_const_size(unsigned int ot)
2673#endif /* VBOX */
2674{
2675 if (ot <= OT_LONG)
2676 return 1 << ot;
2677 else
2678 return 4;
2679}
2680
2681#ifndef VBOX
2682static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2683#else /* VBOX */
2684DECLINLINE(void) gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2685#endif /* VBOX */
2686{
2687 TranslationBlock *tb;
2688 target_ulong pc;
2689
2690 pc = s->cs_base + eip;
2691 tb = s->tb;
2692 /* NOTE: we handle the case where the TB spans two pages here */
2693 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2694 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2695#ifdef VBOX
2696 gen_check_external_event(s);
2697#endif /* VBOX */
2698 /* jump to same page: we can use a direct jump */
2699 tcg_gen_goto_tb(tb_num);
2700 gen_jmp_im(eip);
2701 tcg_gen_exit_tb((long)tb + tb_num);
2702 } else {
2703 /* jump to another page: currently not optimized */
2704 gen_jmp_im(eip);
2705 gen_eob(s);
2706 }
2707}
2708
2709#ifndef VBOX
2710static inline void gen_jcc(DisasContext *s, int b,
2711#else /* VBOX */
2712DECLINLINE(void) gen_jcc(DisasContext *s, int b,
2713#endif /* VBOX */
2714 target_ulong val, target_ulong next_eip)
2715{
2716 int l1, l2, cc_op;
2717
2718 cc_op = s->cc_op;
2719 if (s->cc_op != CC_OP_DYNAMIC) {
2720 gen_op_set_cc_op(s->cc_op);
2721 s->cc_op = CC_OP_DYNAMIC;
2722 }
2723 if (s->jmp_opt) {
2724 l1 = gen_new_label();
2725 gen_jcc1(s, cc_op, b, l1);
2726
2727 gen_goto_tb(s, 0, next_eip);
2728
2729 gen_set_label(l1);
2730 gen_goto_tb(s, 1, val);
2731 s->is_jmp = 3;
2732 } else {
2733
2734 l1 = gen_new_label();
2735 l2 = gen_new_label();
2736 gen_jcc1(s, cc_op, b, l1);
2737
2738 gen_jmp_im(next_eip);
2739 tcg_gen_br(l2);
2740
2741 gen_set_label(l1);
2742 gen_jmp_im(val);
2743 gen_set_label(l2);
2744 gen_eob(s);
2745 }
2746}
2747
2748static void gen_setcc(DisasContext *s, int b)
2749{
2750 int inv, jcc_op, l1;
2751 TCGv t0;
2752
2753 if (is_fast_jcc_case(s, b)) {
2754 /* nominal case: we use a jump */
2755 /* XXX: make it faster by adding new instructions in TCG */
2756 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2757 tcg_gen_movi_tl(t0, 0);
2758 l1 = gen_new_label();
2759 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2760 tcg_gen_movi_tl(t0, 1);
2761 gen_set_label(l1);
2762 tcg_gen_mov_tl(cpu_T[0], t0);
2763 tcg_temp_free(t0);
2764 } else {
2765 /* slow case: it is more efficient not to generate a jump,
2766 although it is questionnable whether this optimization is
2767 worth to */
2768 inv = b & 1;
2769 jcc_op = (b >> 1) & 7;
2770 gen_setcc_slow_T0(s, jcc_op);
2771 if (inv) {
2772 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2773 }
2774 }
2775}
2776
2777#ifndef VBOX
2778static inline void gen_op_movl_T0_seg(int seg_reg)
2779#else /* VBOX */
2780DECLINLINE(void) gen_op_movl_T0_seg(int seg_reg)
2781#endif /* VBOX */
2782{
2783 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2784 offsetof(CPUX86State,segs[seg_reg].selector));
2785}
2786
2787#ifndef VBOX
2788static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2789#else /* VBOX */
2790DECLINLINE(void) gen_op_movl_seg_T0_vm(int seg_reg)
2791#endif /* VBOX */
2792{
2793 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2794 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2795 offsetof(CPUX86State,segs[seg_reg].selector));
2796 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2797 tcg_gen_st_tl(cpu_T[0], cpu_env,
2798 offsetof(CPUX86State,segs[seg_reg].base));
2799#ifdef VBOX
2800 int flags = DESC_P_MASK | DESC_S_MASK | DESC_W_MASK;
2801 if (seg_reg == R_CS)
2802 flags |= DESC_CS_MASK;
2803 gen_op_movl_T0_im(flags);
2804 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].flags));
2805
2806 /* Set the limit to 0xffff. */
2807 gen_op_movl_T0_im(0xffff);
2808 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].limit));
2809#endif
2810}
2811
2812/* move T0 to seg_reg and compute if the CPU state may change. Never
2813 call this function with seg_reg == R_CS */
2814static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2815{
2816 if (s->pe && !s->vm86) {
2817 /* XXX: optimize by finding processor state dynamically */
2818 if (s->cc_op != CC_OP_DYNAMIC)
2819 gen_op_set_cc_op(s->cc_op);
2820 gen_jmp_im(cur_eip);
2821 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2822 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2823 /* abort translation because the addseg value may change or
2824 because ss32 may change. For R_SS, translation must always
2825 stop as a special handling must be done to disable hardware
2826 interrupts for the next instruction */
2827 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2828 s->is_jmp = 3;
2829 } else {
2830 gen_op_movl_seg_T0_vm(seg_reg);
2831 if (seg_reg == R_SS)
2832 s->is_jmp = 3;
2833 }
2834}
2835
2836#ifndef VBOX
2837static inline int svm_is_rep(int prefixes)
2838#else /* VBOX */
2839DECLINLINE(int) svm_is_rep(int prefixes)
2840#endif /* VBOX */
2841{
2842 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2843}
2844
2845#ifndef VBOX
2846static inline void
2847#else /* VBOX */
2848DECLINLINE(void)
2849#endif /* VBOX */
2850gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2851 uint32_t type, uint64_t param)
2852{
2853 /* no SVM activated; fast case */
2854 if (likely(!(s->flags & HF_SVMI_MASK)))
2855 return;
2856 if (s->cc_op != CC_OP_DYNAMIC)
2857 gen_op_set_cc_op(s->cc_op);
2858 gen_jmp_im(pc_start - s->cs_base);
2859 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2860 tcg_const_i32(type), tcg_const_i64(param));
2861}
2862
2863#ifndef VBOX
2864static inline void
2865#else /* VBOX */
2866DECLINLINE(void)
2867#endif
2868gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2869{
2870 gen_svm_check_intercept_param(s, pc_start, type, 0);
2871}
2872
2873#ifndef VBOX
2874static inline void gen_stack_update(DisasContext *s, int addend)
2875#else /* VBOX */
2876DECLINLINE(void) gen_stack_update(DisasContext *s, int addend)
2877#endif /* VBOX */
2878{
2879#ifdef TARGET_X86_64
2880 if (CODE64(s)) {
2881 gen_op_add_reg_im(2, R_ESP, addend);
2882 } else
2883#endif
2884 if (s->ss32) {
2885 gen_op_add_reg_im(1, R_ESP, addend);
2886 } else {
2887 gen_op_add_reg_im(0, R_ESP, addend);
2888 }
2889}
2890
2891/* generate a push. It depends on ss32, addseg and dflag */
2892static void gen_push_T0(DisasContext *s)
2893{
2894#ifdef TARGET_X86_64
2895 if (CODE64(s)) {
2896 gen_op_movq_A0_reg(R_ESP);
2897 if (s->dflag) {
2898 gen_op_addq_A0_im(-8);
2899 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2900 } else {
2901 gen_op_addq_A0_im(-2);
2902 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2903 }
2904 gen_op_mov_reg_A0(2, R_ESP);
2905 } else
2906#endif
2907 {
2908 gen_op_movl_A0_reg(R_ESP);
2909 if (!s->dflag)
2910 gen_op_addl_A0_im(-2);
2911 else
2912 gen_op_addl_A0_im(-4);
2913 if (s->ss32) {
2914 if (s->addseg) {
2915 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2916 gen_op_addl_A0_seg(R_SS);
2917 }
2918 } else {
2919 gen_op_andl_A0_ffff();
2920 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2921 gen_op_addl_A0_seg(R_SS);
2922 }
2923 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2924 if (s->ss32 && !s->addseg)
2925 gen_op_mov_reg_A0(1, R_ESP);
2926 else
2927 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2928 }
2929}
2930
2931/* generate a push. It depends on ss32, addseg and dflag */
2932/* slower version for T1, only used for call Ev */
2933static void gen_push_T1(DisasContext *s)
2934{
2935#ifdef TARGET_X86_64
2936 if (CODE64(s)) {
2937 gen_op_movq_A0_reg(R_ESP);
2938 if (s->dflag) {
2939 gen_op_addq_A0_im(-8);
2940 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2941 } else {
2942 gen_op_addq_A0_im(-2);
2943 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2944 }
2945 gen_op_mov_reg_A0(2, R_ESP);
2946 } else
2947#endif
2948 {
2949 gen_op_movl_A0_reg(R_ESP);
2950 if (!s->dflag)
2951 gen_op_addl_A0_im(-2);
2952 else
2953 gen_op_addl_A0_im(-4);
2954 if (s->ss32) {
2955 if (s->addseg) {
2956 gen_op_addl_A0_seg(R_SS);
2957 }
2958 } else {
2959 gen_op_andl_A0_ffff();
2960 gen_op_addl_A0_seg(R_SS);
2961 }
2962 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2963
2964 if (s->ss32 && !s->addseg)
2965 gen_op_mov_reg_A0(1, R_ESP);
2966 else
2967 gen_stack_update(s, (-2) << s->dflag);
2968 }
2969}
2970
2971/* two step pop is necessary for precise exceptions */
2972static void gen_pop_T0(DisasContext *s)
2973{
2974#ifdef TARGET_X86_64
2975 if (CODE64(s)) {
2976 gen_op_movq_A0_reg(R_ESP);
2977 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2978 } else
2979#endif
2980 {
2981 gen_op_movl_A0_reg(R_ESP);
2982 if (s->ss32) {
2983 if (s->addseg)
2984 gen_op_addl_A0_seg(R_SS);
2985 } else {
2986 gen_op_andl_A0_ffff();
2987 gen_op_addl_A0_seg(R_SS);
2988 }
2989 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2990 }
2991}
2992
2993static void gen_pop_update(DisasContext *s)
2994{
2995#ifdef TARGET_X86_64
2996 if (CODE64(s) && s->dflag) {
2997 gen_stack_update(s, 8);
2998 } else
2999#endif
3000 {
3001 gen_stack_update(s, 2 << s->dflag);
3002 }
3003}
3004
3005static void gen_stack_A0(DisasContext *s)
3006{
3007 gen_op_movl_A0_reg(R_ESP);
3008 if (!s->ss32)
3009 gen_op_andl_A0_ffff();
3010 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3011 if (s->addseg)
3012 gen_op_addl_A0_seg(R_SS);
3013}
3014
3015/* NOTE: wrap around in 16 bit not fully handled */
3016static void gen_pusha(DisasContext *s)
3017{
3018 int i;
3019 gen_op_movl_A0_reg(R_ESP);
3020 gen_op_addl_A0_im(-16 << s->dflag);
3021 if (!s->ss32)
3022 gen_op_andl_A0_ffff();
3023 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3024 if (s->addseg)
3025 gen_op_addl_A0_seg(R_SS);
3026 for(i = 0;i < 8; i++) {
3027 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
3028 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
3029 gen_op_addl_A0_im(2 << s->dflag);
3030 }
3031 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3032}
3033
3034/* NOTE: wrap around in 16 bit not fully handled */
3035static void gen_popa(DisasContext *s)
3036{
3037 int i;
3038 gen_op_movl_A0_reg(R_ESP);
3039 if (!s->ss32)
3040 gen_op_andl_A0_ffff();
3041 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3042 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
3043 if (s->addseg)
3044 gen_op_addl_A0_seg(R_SS);
3045 for(i = 0;i < 8; i++) {
3046 /* ESP is not reloaded */
3047 if (i != 3) {
3048 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
3049 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
3050 }
3051 gen_op_addl_A0_im(2 << s->dflag);
3052 }
3053 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3054}
3055
3056static void gen_enter(DisasContext *s, int esp_addend, int level)
3057{
3058 int ot, opsize;
3059
3060 level &= 0x1f;
3061#ifdef TARGET_X86_64
3062 if (CODE64(s)) {
3063 ot = s->dflag ? OT_QUAD : OT_WORD;
3064 opsize = 1 << ot;
3065
3066 gen_op_movl_A0_reg(R_ESP);
3067 gen_op_addq_A0_im(-opsize);
3068 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3069
3070 /* push bp */
3071 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3072 gen_op_st_T0_A0(ot + s->mem_index);
3073 if (level) {
3074 /* XXX: must save state */
3075 tcg_gen_helper_0_3(helper_enter64_level,
3076 tcg_const_i32(level),
3077 tcg_const_i32((ot == OT_QUAD)),
3078 cpu_T[1]);
3079 }
3080 gen_op_mov_reg_T1(ot, R_EBP);
3081 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3082 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
3083 } else
3084#endif
3085 {
3086 ot = s->dflag + OT_WORD;
3087 opsize = 2 << s->dflag;
3088
3089 gen_op_movl_A0_reg(R_ESP);
3090 gen_op_addl_A0_im(-opsize);
3091 if (!s->ss32)
3092 gen_op_andl_A0_ffff();
3093 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3094 if (s->addseg)
3095 gen_op_addl_A0_seg(R_SS);
3096 /* push bp */
3097 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3098 gen_op_st_T0_A0(ot + s->mem_index);
3099 if (level) {
3100 /* XXX: must save state */
3101 tcg_gen_helper_0_3(helper_enter_level,
3102 tcg_const_i32(level),
3103 tcg_const_i32(s->dflag),
3104 cpu_T[1]);
3105 }
3106 gen_op_mov_reg_T1(ot, R_EBP);
3107 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3108 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3109 }
3110}
3111
3112static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
3113{
3114 if (s->cc_op != CC_OP_DYNAMIC)
3115 gen_op_set_cc_op(s->cc_op);
3116 gen_jmp_im(cur_eip);
3117 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
3118 s->is_jmp = 3;
3119}
3120
3121/* an interrupt is different from an exception because of the
3122 privilege checks */
3123static void gen_interrupt(DisasContext *s, int intno,
3124 target_ulong cur_eip, target_ulong next_eip)
3125{
3126 if (s->cc_op != CC_OP_DYNAMIC)
3127 gen_op_set_cc_op(s->cc_op);
3128 gen_jmp_im(cur_eip);
3129 tcg_gen_helper_0_2(helper_raise_interrupt,
3130 tcg_const_i32(intno),
3131 tcg_const_i32(next_eip - cur_eip));
3132 s->is_jmp = 3;
3133}
3134
3135static void gen_debug(DisasContext *s, target_ulong cur_eip)
3136{
3137 if (s->cc_op != CC_OP_DYNAMIC)
3138 gen_op_set_cc_op(s->cc_op);
3139 gen_jmp_im(cur_eip);
3140 tcg_gen_helper_0_0(helper_debug);
3141 s->is_jmp = 3;
3142}
3143
3144/* generate a generic end of block. Trace exception is also generated
3145 if needed */
3146static void gen_eob(DisasContext *s)
3147{
3148#ifdef VBOX
3149 gen_check_external_event(s);
3150#endif /* VBOX */
3151 if (s->cc_op != CC_OP_DYNAMIC)
3152 gen_op_set_cc_op(s->cc_op);
3153 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
3154 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
3155 }
3156 if (s->singlestep_enabled) {
3157 tcg_gen_helper_0_0(helper_debug);
3158 } else if (s->tf) {
3159 tcg_gen_helper_0_0(helper_single_step);
3160 } else {
3161 tcg_gen_exit_tb(0);
3162 }
3163 s->is_jmp = 3;
3164}
3165
3166/* generate a jump to eip. No segment change must happen before as a
3167 direct call to the next block may occur */
3168static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
3169{
3170 if (s->jmp_opt) {
3171 if (s->cc_op != CC_OP_DYNAMIC) {
3172 gen_op_set_cc_op(s->cc_op);
3173 s->cc_op = CC_OP_DYNAMIC;
3174 }
3175 gen_goto_tb(s, tb_num, eip);
3176 s->is_jmp = 3;
3177 } else {
3178 gen_jmp_im(eip);
3179 gen_eob(s);
3180 }
3181}
3182
3183static void gen_jmp(DisasContext *s, target_ulong eip)
3184{
3185 gen_jmp_tb(s, eip, 0);
3186}
3187
3188#ifndef VBOX
3189static inline void gen_ldq_env_A0(int idx, int offset)
3190#else /* VBOX */
3191DECLINLINE(void) gen_ldq_env_A0(int idx, int offset)
3192#endif /* VBOX */
3193{
3194 int mem_index = (idx >> 2) - 1;
3195 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3196 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
3197}
3198
3199#ifndef VBOX
3200static inline void gen_stq_env_A0(int idx, int offset)
3201#else /* VBOX */
3202DECLINLINE(void) gen_stq_env_A0(int idx, int offset)
3203#endif /* VBOX */
3204{
3205 int mem_index = (idx >> 2) - 1;
3206 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
3207 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3208}
3209
3210#ifndef VBOX
3211static inline void gen_ldo_env_A0(int idx, int offset)
3212#else /* VBOX */
3213DECLINLINE(void) gen_ldo_env_A0(int idx, int offset)
3214#endif /* VBOX */
3215{
3216 int mem_index = (idx >> 2) - 1;
3217 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3218 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3219 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3220 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3221 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3222}
3223
3224#ifndef VBOX
3225static inline void gen_sto_env_A0(int idx, int offset)
3226#else /* VBOX */
3227DECLINLINE(void) gen_sto_env_A0(int idx, int offset)
3228#endif /* VBOX */
3229{
3230 int mem_index = (idx >> 2) - 1;
3231 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3232 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3233 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3234 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3235 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3236}
3237
3238#ifndef VBOX
3239static inline void gen_op_movo(int d_offset, int s_offset)
3240#else /* VBOX */
3241DECLINLINE(void) gen_op_movo(int d_offset, int s_offset)
3242#endif /* VBOX */
3243{
3244 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3245 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3246 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
3247 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
3248}
3249
3250#ifndef VBOX
3251static inline void gen_op_movq(int d_offset, int s_offset)
3252#else /* VBOX */
3253DECLINLINE(void) gen_op_movq(int d_offset, int s_offset)
3254#endif /* VBOX */
3255{
3256 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3257 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3258}
3259
3260#ifndef VBOX
3261static inline void gen_op_movl(int d_offset, int s_offset)
3262#else /* VBOX */
3263DECLINLINE(void) gen_op_movl(int d_offset, int s_offset)
3264#endif /* VBOX */
3265{
3266 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
3267 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
3268}
3269
3270#ifndef VBOX
3271static inline void gen_op_movq_env_0(int d_offset)
3272#else /* VBOX */
3273DECLINLINE(void) gen_op_movq_env_0(int d_offset)
3274#endif /* VBOX */
3275{
3276 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
3277 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3278}
3279
3280#define SSE_SPECIAL ((void *)1)
3281#define SSE_DUMMY ((void *)2)
3282
3283#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
3284#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
3285 helper_ ## x ## ss, helper_ ## x ## sd, }
3286
3287static void *sse_op_table1[256][4] = {
3288 /* 3DNow! extensions */
3289 [0x0e] = { SSE_DUMMY }, /* femms */
3290 [0x0f] = { SSE_DUMMY }, /* pf... */
3291 /* pure SSE operations */
3292 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3293 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3294 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
3295 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
3296 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
3297 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
3298 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
3299 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
3300
3301 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3302 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3303 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
3304 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
3305 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
3306 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
3307 [0x2e] = { helper_ucomiss, helper_ucomisd },
3308 [0x2f] = { helper_comiss, helper_comisd },
3309 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
3310 [0x51] = SSE_FOP(sqrt),
3311 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
3312 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
3313 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
3314 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
3315 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
3316 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
3317 [0x58] = SSE_FOP(add),
3318 [0x59] = SSE_FOP(mul),
3319 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
3320 helper_cvtss2sd, helper_cvtsd2ss },
3321 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
3322 [0x5c] = SSE_FOP(sub),
3323 [0x5d] = SSE_FOP(min),
3324 [0x5e] = SSE_FOP(div),
3325 [0x5f] = SSE_FOP(max),
3326
3327 [0xc2] = SSE_FOP(cmpeq),
3328 [0xc6] = { helper_shufps, helper_shufpd },
3329
3330 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3331 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3332
3333 /* MMX ops and their SSE extensions */
3334 [0x60] = MMX_OP2(punpcklbw),
3335 [0x61] = MMX_OP2(punpcklwd),
3336 [0x62] = MMX_OP2(punpckldq),
3337 [0x63] = MMX_OP2(packsswb),
3338 [0x64] = MMX_OP2(pcmpgtb),
3339 [0x65] = MMX_OP2(pcmpgtw),
3340 [0x66] = MMX_OP2(pcmpgtl),
3341 [0x67] = MMX_OP2(packuswb),
3342 [0x68] = MMX_OP2(punpckhbw),
3343 [0x69] = MMX_OP2(punpckhwd),
3344 [0x6a] = MMX_OP2(punpckhdq),
3345 [0x6b] = MMX_OP2(packssdw),
3346 [0x6c] = { NULL, helper_punpcklqdq_xmm },
3347 [0x6d] = { NULL, helper_punpckhqdq_xmm },
3348 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
3349 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
3350 [0x70] = { helper_pshufw_mmx,
3351 helper_pshufd_xmm,
3352 helper_pshufhw_xmm,
3353 helper_pshuflw_xmm },
3354 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
3355 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
3356 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
3357 [0x74] = MMX_OP2(pcmpeqb),
3358 [0x75] = MMX_OP2(pcmpeqw),
3359 [0x76] = MMX_OP2(pcmpeql),
3360 [0x77] = { SSE_DUMMY }, /* emms */
3361 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
3362 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
3363 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
3364 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
3365 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
3366 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
3367 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
3368 [0xd1] = MMX_OP2(psrlw),
3369 [0xd2] = MMX_OP2(psrld),
3370 [0xd3] = MMX_OP2(psrlq),
3371 [0xd4] = MMX_OP2(paddq),
3372 [0xd5] = MMX_OP2(pmullw),
3373 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
3374 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
3375 [0xd8] = MMX_OP2(psubusb),
3376 [0xd9] = MMX_OP2(psubusw),
3377 [0xda] = MMX_OP2(pminub),
3378 [0xdb] = MMX_OP2(pand),
3379 [0xdc] = MMX_OP2(paddusb),
3380 [0xdd] = MMX_OP2(paddusw),
3381 [0xde] = MMX_OP2(pmaxub),
3382 [0xdf] = MMX_OP2(pandn),
3383 [0xe0] = MMX_OP2(pavgb),
3384 [0xe1] = MMX_OP2(psraw),
3385 [0xe2] = MMX_OP2(psrad),
3386 [0xe3] = MMX_OP2(pavgw),
3387 [0xe4] = MMX_OP2(pmulhuw),
3388 [0xe5] = MMX_OP2(pmulhw),
3389 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
3390 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
3391 [0xe8] = MMX_OP2(psubsb),
3392 [0xe9] = MMX_OP2(psubsw),
3393 [0xea] = MMX_OP2(pminsw),
3394 [0xeb] = MMX_OP2(por),
3395 [0xec] = MMX_OP2(paddsb),
3396 [0xed] = MMX_OP2(paddsw),
3397 [0xee] = MMX_OP2(pmaxsw),
3398 [0xef] = MMX_OP2(pxor),
3399 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
3400 [0xf1] = MMX_OP2(psllw),
3401 [0xf2] = MMX_OP2(pslld),
3402 [0xf3] = MMX_OP2(psllq),
3403 [0xf4] = MMX_OP2(pmuludq),
3404 [0xf5] = MMX_OP2(pmaddwd),
3405 [0xf6] = MMX_OP2(psadbw),
3406 [0xf7] = MMX_OP2(maskmov),
3407 [0xf8] = MMX_OP2(psubb),
3408 [0xf9] = MMX_OP2(psubw),
3409 [0xfa] = MMX_OP2(psubl),
3410 [0xfb] = MMX_OP2(psubq),
3411 [0xfc] = MMX_OP2(paddb),
3412 [0xfd] = MMX_OP2(paddw),
3413 [0xfe] = MMX_OP2(paddl),
3414};
3415
3416static void *sse_op_table2[3 * 8][2] = {
3417 [0 + 2] = MMX_OP2(psrlw),
3418 [0 + 4] = MMX_OP2(psraw),
3419 [0 + 6] = MMX_OP2(psllw),
3420 [8 + 2] = MMX_OP2(psrld),
3421 [8 + 4] = MMX_OP2(psrad),
3422 [8 + 6] = MMX_OP2(pslld),
3423 [16 + 2] = MMX_OP2(psrlq),
3424 [16 + 3] = { NULL, helper_psrldq_xmm },
3425 [16 + 6] = MMX_OP2(psllq),
3426 [16 + 7] = { NULL, helper_pslldq_xmm },
3427};
3428
3429static void *sse_op_table3[4 * 3] = {
3430 helper_cvtsi2ss,
3431 helper_cvtsi2sd,
3432 X86_64_ONLY(helper_cvtsq2ss),
3433 X86_64_ONLY(helper_cvtsq2sd),
3434
3435 helper_cvttss2si,
3436 helper_cvttsd2si,
3437 X86_64_ONLY(helper_cvttss2sq),
3438 X86_64_ONLY(helper_cvttsd2sq),
3439
3440 helper_cvtss2si,
3441 helper_cvtsd2si,
3442 X86_64_ONLY(helper_cvtss2sq),
3443 X86_64_ONLY(helper_cvtsd2sq),
3444};
3445
3446static void *sse_op_table4[8][4] = {
3447 SSE_FOP(cmpeq),
3448 SSE_FOP(cmplt),
3449 SSE_FOP(cmple),
3450 SSE_FOP(cmpunord),
3451 SSE_FOP(cmpneq),
3452 SSE_FOP(cmpnlt),
3453 SSE_FOP(cmpnle),
3454 SSE_FOP(cmpord),
3455};
3456
3457static void *sse_op_table5[256] = {
3458 [0x0c] = helper_pi2fw,
3459 [0x0d] = helper_pi2fd,
3460 [0x1c] = helper_pf2iw,
3461 [0x1d] = helper_pf2id,
3462 [0x8a] = helper_pfnacc,
3463 [0x8e] = helper_pfpnacc,
3464 [0x90] = helper_pfcmpge,
3465 [0x94] = helper_pfmin,
3466 [0x96] = helper_pfrcp,
3467 [0x97] = helper_pfrsqrt,
3468 [0x9a] = helper_pfsub,
3469 [0x9e] = helper_pfadd,
3470 [0xa0] = helper_pfcmpgt,
3471 [0xa4] = helper_pfmax,
3472 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
3473 [0xa7] = helper_movq, /* pfrsqit1 */
3474 [0xaa] = helper_pfsubr,
3475 [0xae] = helper_pfacc,
3476 [0xb0] = helper_pfcmpeq,
3477 [0xb4] = helper_pfmul,
3478 [0xb6] = helper_movq, /* pfrcpit2 */
3479 [0xb7] = helper_pmulhrw_mmx,
3480 [0xbb] = helper_pswapd,
3481 [0xbf] = helper_pavgb_mmx /* pavgusb */
3482};
3483
3484struct sse_op_helper_s {
3485 void *op[2]; uint32_t ext_mask;
3486};
3487#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
3488#define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3489#define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3490#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3491static struct sse_op_helper_s sse_op_table6[256] = {
3492 [0x00] = SSSE3_OP(pshufb),
3493 [0x01] = SSSE3_OP(phaddw),
3494 [0x02] = SSSE3_OP(phaddd),
3495 [0x03] = SSSE3_OP(phaddsw),
3496 [0x04] = SSSE3_OP(pmaddubsw),
3497 [0x05] = SSSE3_OP(phsubw),
3498 [0x06] = SSSE3_OP(phsubd),
3499 [0x07] = SSSE3_OP(phsubsw),
3500 [0x08] = SSSE3_OP(psignb),
3501 [0x09] = SSSE3_OP(psignw),
3502 [0x0a] = SSSE3_OP(psignd),
3503 [0x0b] = SSSE3_OP(pmulhrsw),
3504 [0x10] = SSE41_OP(pblendvb),
3505 [0x14] = SSE41_OP(blendvps),
3506 [0x15] = SSE41_OP(blendvpd),
3507 [0x17] = SSE41_OP(ptest),
3508 [0x1c] = SSSE3_OP(pabsb),
3509 [0x1d] = SSSE3_OP(pabsw),
3510 [0x1e] = SSSE3_OP(pabsd),
3511 [0x20] = SSE41_OP(pmovsxbw),
3512 [0x21] = SSE41_OP(pmovsxbd),
3513 [0x22] = SSE41_OP(pmovsxbq),
3514 [0x23] = SSE41_OP(pmovsxwd),
3515 [0x24] = SSE41_OP(pmovsxwq),
3516 [0x25] = SSE41_OP(pmovsxdq),
3517 [0x28] = SSE41_OP(pmuldq),
3518 [0x29] = SSE41_OP(pcmpeqq),
3519 [0x2a] = SSE41_SPECIAL, /* movntqda */
3520 [0x2b] = SSE41_OP(packusdw),
3521 [0x30] = SSE41_OP(pmovzxbw),
3522 [0x31] = SSE41_OP(pmovzxbd),
3523 [0x32] = SSE41_OP(pmovzxbq),
3524 [0x33] = SSE41_OP(pmovzxwd),
3525 [0x34] = SSE41_OP(pmovzxwq),
3526 [0x35] = SSE41_OP(pmovzxdq),
3527 [0x37] = SSE42_OP(pcmpgtq),
3528 [0x38] = SSE41_OP(pminsb),
3529 [0x39] = SSE41_OP(pminsd),
3530 [0x3a] = SSE41_OP(pminuw),
3531 [0x3b] = SSE41_OP(pminud),
3532 [0x3c] = SSE41_OP(pmaxsb),
3533 [0x3d] = SSE41_OP(pmaxsd),
3534 [0x3e] = SSE41_OP(pmaxuw),
3535 [0x3f] = SSE41_OP(pmaxud),
3536 [0x40] = SSE41_OP(pmulld),
3537 [0x41] = SSE41_OP(phminposuw),
3538};
3539
3540static struct sse_op_helper_s sse_op_table7[256] = {
3541 [0x08] = SSE41_OP(roundps),
3542 [0x09] = SSE41_OP(roundpd),
3543 [0x0a] = SSE41_OP(roundss),
3544 [0x0b] = SSE41_OP(roundsd),
3545 [0x0c] = SSE41_OP(blendps),
3546 [0x0d] = SSE41_OP(blendpd),
3547 [0x0e] = SSE41_OP(pblendw),
3548 [0x0f] = SSSE3_OP(palignr),
3549 [0x14] = SSE41_SPECIAL, /* pextrb */
3550 [0x15] = SSE41_SPECIAL, /* pextrw */
3551 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3552 [0x17] = SSE41_SPECIAL, /* extractps */
3553 [0x20] = SSE41_SPECIAL, /* pinsrb */
3554 [0x21] = SSE41_SPECIAL, /* insertps */
3555 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3556 [0x40] = SSE41_OP(dpps),
3557 [0x41] = SSE41_OP(dppd),
3558 [0x42] = SSE41_OP(mpsadbw),
3559 [0x60] = SSE42_OP(pcmpestrm),
3560 [0x61] = SSE42_OP(pcmpestri),
3561 [0x62] = SSE42_OP(pcmpistrm),
3562 [0x63] = SSE42_OP(pcmpistri),
3563};
3564
3565static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3566{
3567 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3568 int modrm, mod, rm, reg, reg_addr, offset_addr;
3569 void *sse_op2;
3570
3571 b &= 0xff;
3572 if (s->prefix & PREFIX_DATA)
3573 b1 = 1;
3574 else if (s->prefix & PREFIX_REPZ)
3575 b1 = 2;
3576 else if (s->prefix & PREFIX_REPNZ)
3577 b1 = 3;
3578 else
3579 b1 = 0;
3580 sse_op2 = sse_op_table1[b][b1];
3581 if (!sse_op2)
3582 goto illegal_op;
3583 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3584 is_xmm = 1;
3585 } else {
3586 if (b1 == 0) {
3587 /* MMX case */
3588 is_xmm = 0;
3589 } else {
3590 is_xmm = 1;
3591 }
3592 }
3593 /* simple MMX/SSE operation */
3594 if (s->flags & HF_TS_MASK) {
3595 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3596 return;
3597 }
3598 if (s->flags & HF_EM_MASK) {
3599 illegal_op:
3600 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3601 return;
3602 }
3603 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3604 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3605 goto illegal_op;
3606 if (b == 0x0e) {
3607 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3608 goto illegal_op;
3609 /* femms */
3610 tcg_gen_helper_0_0(helper_emms);
3611 return;
3612 }
3613 if (b == 0x77) {
3614 /* emms */
3615 tcg_gen_helper_0_0(helper_emms);
3616 return;
3617 }
3618 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3619 the static cpu state) */
3620 if (!is_xmm) {
3621 tcg_gen_helper_0_0(helper_enter_mmx);
3622 }
3623
3624 modrm = ldub_code(s->pc++);
3625 reg = ((modrm >> 3) & 7);
3626 if (is_xmm)
3627 reg |= rex_r;
3628 mod = (modrm >> 6) & 3;
3629 if (sse_op2 == SSE_SPECIAL) {
3630 b |= (b1 << 8);
3631 switch(b) {
3632 case 0x0e7: /* movntq */
3633 if (mod == 3)
3634 goto illegal_op;
3635 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3636 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3637 break;
3638 case 0x1e7: /* movntdq */
3639 case 0x02b: /* movntps */
3640 case 0x12b: /* movntps */
3641 case 0x3f0: /* lddqu */
3642 if (mod == 3)
3643 goto illegal_op;
3644 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3645 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3646 break;
3647 case 0x6e: /* movd mm, ea */
3648#ifdef TARGET_X86_64
3649 if (s->dflag == 2) {
3650 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3651 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3652 } else
3653#endif
3654 {
3655 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3656 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3657 offsetof(CPUX86State,fpregs[reg].mmx));
3658 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3659 }
3660 break;
3661 case 0x16e: /* movd xmm, ea */
3662#ifdef TARGET_X86_64
3663 if (s->dflag == 2) {
3664 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3665 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3666 offsetof(CPUX86State,xmm_regs[reg]));
3667 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3668 } else
3669#endif
3670 {
3671 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3672 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3673 offsetof(CPUX86State,xmm_regs[reg]));
3674 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3675 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3676 }
3677 break;
3678 case 0x6f: /* movq mm, ea */
3679 if (mod != 3) {
3680 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3681 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3682 } else {
3683 rm = (modrm & 7);
3684 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3685 offsetof(CPUX86State,fpregs[rm].mmx));
3686 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3687 offsetof(CPUX86State,fpregs[reg].mmx));
3688 }
3689 break;
3690 case 0x010: /* movups */
3691 case 0x110: /* movupd */
3692 case 0x028: /* movaps */
3693 case 0x128: /* movapd */
3694 case 0x16f: /* movdqa xmm, ea */
3695 case 0x26f: /* movdqu xmm, ea */
3696 if (mod != 3) {
3697 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3698 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3699 } else {
3700 rm = (modrm & 7) | REX_B(s);
3701 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3702 offsetof(CPUX86State,xmm_regs[rm]));
3703 }
3704 break;
3705 case 0x210: /* movss xmm, ea */
3706 if (mod != 3) {
3707 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3708 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3709 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3710 gen_op_movl_T0_0();
3711 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3712 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3713 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3714 } else {
3715 rm = (modrm & 7) | REX_B(s);
3716 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3717 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3718 }
3719 break;
3720 case 0x310: /* movsd xmm, ea */
3721 if (mod != 3) {
3722 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3723 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3724 gen_op_movl_T0_0();
3725 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3726 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3727 } else {
3728 rm = (modrm & 7) | REX_B(s);
3729 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3730 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3731 }
3732 break;
3733 case 0x012: /* movlps */
3734 case 0x112: /* movlpd */
3735 if (mod != 3) {
3736 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3737 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3738 } else {
3739 /* movhlps */
3740 rm = (modrm & 7) | REX_B(s);
3741 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3742 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3743 }
3744 break;
3745 case 0x212: /* movsldup */
3746 if (mod != 3) {
3747 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3748 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3749 } else {
3750 rm = (modrm & 7) | REX_B(s);
3751 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3752 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3753 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3754 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3755 }
3756 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3757 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3758 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3759 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3760 break;
3761 case 0x312: /* movddup */
3762 if (mod != 3) {
3763 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3764 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3765 } else {
3766 rm = (modrm & 7) | REX_B(s);
3767 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3768 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3769 }
3770 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3771 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3772 break;
3773 case 0x016: /* movhps */
3774 case 0x116: /* movhpd */
3775 if (mod != 3) {
3776 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3777 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3778 } else {
3779 /* movlhps */
3780 rm = (modrm & 7) | REX_B(s);
3781 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3782 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3783 }
3784 break;
3785 case 0x216: /* movshdup */
3786 if (mod != 3) {
3787 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3788 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3789 } else {
3790 rm = (modrm & 7) | REX_B(s);
3791 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3792 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3793 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3794 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3795 }
3796 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3797 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3798 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3799 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3800 break;
3801 case 0x7e: /* movd ea, mm */
3802#ifdef TARGET_X86_64
3803 if (s->dflag == 2) {
3804 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3805 offsetof(CPUX86State,fpregs[reg].mmx));
3806 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3807 } else
3808#endif
3809 {
3810 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3811 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3812 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3813 }
3814 break;
3815 case 0x17e: /* movd ea, xmm */
3816#ifdef TARGET_X86_64
3817 if (s->dflag == 2) {
3818 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3819 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3820 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3821 } else
3822#endif
3823 {
3824 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3825 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3826 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3827 }
3828 break;
3829 case 0x27e: /* movq xmm, ea */
3830 if (mod != 3) {
3831 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3832 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3833 } else {
3834 rm = (modrm & 7) | REX_B(s);
3835 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3836 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3837 }
3838 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3839 break;
3840 case 0x7f: /* movq ea, mm */
3841 if (mod != 3) {
3842 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3843 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3844 } else {
3845 rm = (modrm & 7);
3846 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3847 offsetof(CPUX86State,fpregs[reg].mmx));
3848 }
3849 break;
3850 case 0x011: /* movups */
3851 case 0x111: /* movupd */
3852 case 0x029: /* movaps */
3853 case 0x129: /* movapd */
3854 case 0x17f: /* movdqa ea, xmm */
3855 case 0x27f: /* movdqu ea, xmm */
3856 if (mod != 3) {
3857 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3858 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3859 } else {
3860 rm = (modrm & 7) | REX_B(s);
3861 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3862 offsetof(CPUX86State,xmm_regs[reg]));
3863 }
3864 break;
3865 case 0x211: /* movss ea, xmm */
3866 if (mod != 3) {
3867 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3868 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3869 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3870 } else {
3871 rm = (modrm & 7) | REX_B(s);
3872 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3873 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3874 }
3875 break;
3876 case 0x311: /* movsd ea, xmm */
3877 if (mod != 3) {
3878 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3879 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3880 } else {
3881 rm = (modrm & 7) | REX_B(s);
3882 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3883 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3884 }
3885 break;
3886 case 0x013: /* movlps */
3887 case 0x113: /* movlpd */
3888 if (mod != 3) {
3889 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3890 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3891 } else {
3892 goto illegal_op;
3893 }
3894 break;
3895 case 0x017: /* movhps */
3896 case 0x117: /* movhpd */
3897 if (mod != 3) {
3898 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3899 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3900 } else {
3901 goto illegal_op;
3902 }
3903 break;
3904 case 0x71: /* shift mm, im */
3905 case 0x72:
3906 case 0x73:
3907 case 0x171: /* shift xmm, im */
3908 case 0x172:
3909 case 0x173:
3910 val = ldub_code(s->pc++);
3911 if (is_xmm) {
3912 gen_op_movl_T0_im(val);
3913 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3914 gen_op_movl_T0_0();
3915 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3916 op1_offset = offsetof(CPUX86State,xmm_t0);
3917 } else {
3918 gen_op_movl_T0_im(val);
3919 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3920 gen_op_movl_T0_0();
3921 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3922 op1_offset = offsetof(CPUX86State,mmx_t0);
3923 }
3924 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3925 if (!sse_op2)
3926 goto illegal_op;
3927 if (is_xmm) {
3928 rm = (modrm & 7) | REX_B(s);
3929 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3930 } else {
3931 rm = (modrm & 7);
3932 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3933 }
3934 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3935 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3936 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3937 break;
3938 case 0x050: /* movmskps */
3939 rm = (modrm & 7) | REX_B(s);
3940 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3941 offsetof(CPUX86State,xmm_regs[rm]));
3942 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3943 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3944 gen_op_mov_reg_T0(OT_LONG, reg);
3945 break;
3946 case 0x150: /* movmskpd */
3947 rm = (modrm & 7) | REX_B(s);
3948 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3949 offsetof(CPUX86State,xmm_regs[rm]));
3950 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3951 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3952 gen_op_mov_reg_T0(OT_LONG, reg);
3953 break;
3954 case 0x02a: /* cvtpi2ps */
3955 case 0x12a: /* cvtpi2pd */
3956 tcg_gen_helper_0_0(helper_enter_mmx);
3957 if (mod != 3) {
3958 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3959 op2_offset = offsetof(CPUX86State,mmx_t0);
3960 gen_ldq_env_A0(s->mem_index, op2_offset);
3961 } else {
3962 rm = (modrm & 7);
3963 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3964 }
3965 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3966 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3967 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3968 switch(b >> 8) {
3969 case 0x0:
3970 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3971 break;
3972 default:
3973 case 0x1:
3974 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3975 break;
3976 }
3977 break;
3978 case 0x22a: /* cvtsi2ss */
3979 case 0x32a: /* cvtsi2sd */
3980 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3981 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3982 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3983 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3984 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3985 if (ot == OT_LONG) {
3986 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3987 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3988 } else {
3989 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_T[0]);
3990 }
3991 break;
3992 case 0x02c: /* cvttps2pi */
3993 case 0x12c: /* cvttpd2pi */
3994 case 0x02d: /* cvtps2pi */
3995 case 0x12d: /* cvtpd2pi */
3996 tcg_gen_helper_0_0(helper_enter_mmx);
3997 if (mod != 3) {
3998 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3999 op2_offset = offsetof(CPUX86State,xmm_t0);
4000 gen_ldo_env_A0(s->mem_index, op2_offset);
4001 } else {
4002 rm = (modrm & 7) | REX_B(s);
4003 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4004 }
4005 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
4006 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4007 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4008 switch(b) {
4009 case 0x02c:
4010 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
4011 break;
4012 case 0x12c:
4013 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
4014 break;
4015 case 0x02d:
4016 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
4017 break;
4018 case 0x12d:
4019 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
4020 break;
4021 }
4022 break;
4023 case 0x22c: /* cvttss2si */
4024 case 0x32c: /* cvttsd2si */
4025 case 0x22d: /* cvtss2si */
4026 case 0x32d: /* cvtsd2si */
4027 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4028 if (mod != 3) {
4029 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4030 if ((b >> 8) & 1) {
4031 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
4032 } else {
4033 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4034 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4035 }
4036 op2_offset = offsetof(CPUX86State,xmm_t0);
4037 } else {
4038 rm = (modrm & 7) | REX_B(s);
4039 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4040 }
4041 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
4042 (b & 1) * 4];
4043 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
4044 if (ot == OT_LONG) {
4045 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
4046 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4047 } else {
4048 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
4049 }
4050 gen_op_mov_reg_T0(ot, reg);
4051 break;
4052 case 0xc4: /* pinsrw */
4053 case 0x1c4:
4054 s->rip_offset = 1;
4055 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4056 val = ldub_code(s->pc++);
4057 if (b1) {
4058 val &= 7;
4059 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4060 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
4061 } else {
4062 val &= 3;
4063 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4064 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
4065 }
4066 break;
4067 case 0xc5: /* pextrw */
4068 case 0x1c5:
4069 if (mod != 3)
4070 goto illegal_op;
4071 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4072 val = ldub_code(s->pc++);
4073 if (b1) {
4074 val &= 7;
4075 rm = (modrm & 7) | REX_B(s);
4076 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4077 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
4078 } else {
4079 val &= 3;
4080 rm = (modrm & 7);
4081 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4082 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
4083 }
4084 reg = ((modrm >> 3) & 7) | rex_r;
4085 gen_op_mov_reg_T0(ot, reg);
4086 break;
4087 case 0x1d6: /* movq ea, xmm */
4088 if (mod != 3) {
4089 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4090 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4091 } else {
4092 rm = (modrm & 7) | REX_B(s);
4093 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
4094 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4095 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
4096 }
4097 break;
4098 case 0x2d6: /* movq2dq */
4099 tcg_gen_helper_0_0(helper_enter_mmx);
4100 rm = (modrm & 7);
4101 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
4102 offsetof(CPUX86State,fpregs[rm].mmx));
4103 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
4104 break;
4105 case 0x3d6: /* movdq2q */
4106 tcg_gen_helper_0_0(helper_enter_mmx);
4107 rm = (modrm & 7) | REX_B(s);
4108 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
4109 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
4110 break;
4111 case 0xd7: /* pmovmskb */
4112 case 0x1d7:
4113 if (mod != 3)
4114 goto illegal_op;
4115 if (b1) {
4116 rm = (modrm & 7) | REX_B(s);
4117 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
4118 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
4119 } else {
4120 rm = (modrm & 7);
4121 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
4122 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
4123 }
4124 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4125 reg = ((modrm >> 3) & 7) | rex_r;
4126 gen_op_mov_reg_T0(OT_LONG, reg);
4127 break;
4128 case 0x138:
4129 if (s->prefix & PREFIX_REPNZ)
4130 goto crc32;
4131 case 0x038:
4132 b = modrm;
4133 modrm = ldub_code(s->pc++);
4134 rm = modrm & 7;
4135 reg = ((modrm >> 3) & 7) | rex_r;
4136 mod = (modrm >> 6) & 3;
4137
4138 sse_op2 = sse_op_table6[b].op[b1];
4139 if (!sse_op2)
4140 goto illegal_op;
4141 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
4142 goto illegal_op;
4143
4144 if (b1) {
4145 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4146 if (mod == 3) {
4147 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4148 } else {
4149 op2_offset = offsetof(CPUX86State,xmm_t0);
4150 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4151 switch (b) {
4152 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
4153 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
4154 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
4155 gen_ldq_env_A0(s->mem_index, op2_offset +
4156 offsetof(XMMReg, XMM_Q(0)));
4157 break;
4158 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
4159 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
4160 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
4161 (s->mem_index >> 2) - 1);
4162 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
4163 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
4164 offsetof(XMMReg, XMM_L(0)));
4165 break;
4166 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
4167 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
4168 (s->mem_index >> 2) - 1);
4169 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
4170 offsetof(XMMReg, XMM_W(0)));
4171 break;
4172 case 0x2a: /* movntqda */
4173 gen_ldo_env_A0(s->mem_index, op1_offset);
4174 return;
4175 default:
4176 gen_ldo_env_A0(s->mem_index, op2_offset);
4177 }
4178 }
4179 } else {
4180 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4181 if (mod == 3) {
4182 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4183 } else {
4184 op2_offset = offsetof(CPUX86State,mmx_t0);
4185 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4186 gen_ldq_env_A0(s->mem_index, op2_offset);
4187 }
4188 }
4189 if (sse_op2 == SSE_SPECIAL)
4190 goto illegal_op;
4191
4192 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4193 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4194 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4195
4196 if (b == 0x17)
4197 s->cc_op = CC_OP_EFLAGS;
4198 break;
4199 case 0x338: /* crc32 */
4200 crc32:
4201 b = modrm;
4202 modrm = ldub_code(s->pc++);
4203 reg = ((modrm >> 3) & 7) | rex_r;
4204
4205 if (b != 0xf0 && b != 0xf1)
4206 goto illegal_op;
4207 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
4208 goto illegal_op;
4209
4210 if (b == 0xf0)
4211 ot = OT_BYTE;
4212 else if (b == 0xf1 && s->dflag != 2)
4213 if (s->prefix & PREFIX_DATA)
4214 ot = OT_WORD;
4215 else
4216 ot = OT_LONG;
4217 else
4218 ot = OT_QUAD;
4219
4220 gen_op_mov_TN_reg(OT_LONG, 0, reg);
4221 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4222 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4223 tcg_gen_helper_1_3(helper_crc32, cpu_T[0], cpu_tmp2_i32,
4224 cpu_T[0], tcg_const_i32(8 << ot));
4225
4226 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4227 gen_op_mov_reg_T0(ot, reg);
4228 break;
4229 case 0x03a:
4230 case 0x13a:
4231 b = modrm;
4232 modrm = ldub_code(s->pc++);
4233 rm = modrm & 7;
4234 reg = ((modrm >> 3) & 7) | rex_r;
4235 mod = (modrm >> 6) & 3;
4236
4237 sse_op2 = sse_op_table7[b].op[b1];
4238 if (!sse_op2)
4239 goto illegal_op;
4240 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
4241 goto illegal_op;
4242
4243 if (sse_op2 == SSE_SPECIAL) {
4244 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4245 rm = (modrm & 7) | REX_B(s);
4246 if (mod != 3)
4247 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4248 reg = ((modrm >> 3) & 7) | rex_r;
4249 val = ldub_code(s->pc++);
4250 switch (b) {
4251 case 0x14: /* pextrb */
4252 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4253 xmm_regs[reg].XMM_B(val & 15)));
4254 if (mod == 3)
4255 gen_op_mov_reg_T0(ot, rm);
4256 else
4257 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
4258 (s->mem_index >> 2) - 1);
4259 break;
4260 case 0x15: /* pextrw */
4261 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4262 xmm_regs[reg].XMM_W(val & 7)));
4263 if (mod == 3)
4264 gen_op_mov_reg_T0(ot, rm);
4265 else
4266 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
4267 (s->mem_index >> 2) - 1);
4268 break;
4269 case 0x16:
4270 if (ot == OT_LONG) { /* pextrd */
4271 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4272 offsetof(CPUX86State,
4273 xmm_regs[reg].XMM_L(val & 3)));
4274 if (mod == 3)
4275 gen_op_mov_reg_v(ot, rm, cpu_tmp2_i32);
4276 else
4277 tcg_gen_qemu_st32(cpu_tmp2_i32, cpu_A0,
4278 (s->mem_index >> 2) - 1);
4279 } else { /* pextrq */
4280 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
4281 offsetof(CPUX86State,
4282 xmm_regs[reg].XMM_Q(val & 1)));
4283 if (mod == 3)
4284 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
4285 else
4286 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4287 (s->mem_index >> 2) - 1);
4288 }
4289 break;
4290 case 0x17: /* extractps */
4291 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4292 xmm_regs[reg].XMM_L(val & 3)));
4293 if (mod == 3)
4294 gen_op_mov_reg_T0(ot, rm);
4295 else
4296 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
4297 (s->mem_index >> 2) - 1);
4298 break;
4299 case 0x20: /* pinsrb */
4300 if (mod == 3)
4301 gen_op_mov_TN_reg(OT_LONG, 0, rm);
4302 else
4303 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
4304 (s->mem_index >> 2) - 1);
4305 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4306 xmm_regs[reg].XMM_B(val & 15)));
4307 break;
4308 case 0x21: /* insertps */
4309 if (mod == 3)
4310 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4311 offsetof(CPUX86State,xmm_regs[rm]
4312 .XMM_L((val >> 6) & 3)));
4313 else
4314 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4315 (s->mem_index >> 2) - 1);
4316 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4317 offsetof(CPUX86State,xmm_regs[reg]
4318 .XMM_L((val >> 4) & 3)));
4319 if ((val >> 0) & 1)
4320 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4321 cpu_env, offsetof(CPUX86State,
4322 xmm_regs[reg].XMM_L(0)));
4323 if ((val >> 1) & 1)
4324 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4325 cpu_env, offsetof(CPUX86State,
4326 xmm_regs[reg].XMM_L(1)));
4327 if ((val >> 2) & 1)
4328 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4329 cpu_env, offsetof(CPUX86State,
4330 xmm_regs[reg].XMM_L(2)));
4331 if ((val >> 3) & 1)
4332 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4333 cpu_env, offsetof(CPUX86State,
4334 xmm_regs[reg].XMM_L(3)));
4335 break;
4336 case 0x22:
4337 if (ot == OT_LONG) { /* pinsrd */
4338 if (mod == 3)
4339 gen_op_mov_v_reg(ot, cpu_tmp2_i32, rm);
4340 else
4341 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4342 (s->mem_index >> 2) - 1);
4343 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4344 offsetof(CPUX86State,
4345 xmm_regs[reg].XMM_L(val & 3)));
4346 } else { /* pinsrq */
4347 if (mod == 3)
4348 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
4349 else
4350 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4351 (s->mem_index >> 2) - 1);
4352 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
4353 offsetof(CPUX86State,
4354 xmm_regs[reg].XMM_Q(val & 1)));
4355 }
4356 break;
4357 }
4358 return;
4359 }
4360
4361 if (b1) {
4362 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4363 if (mod == 3) {
4364 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4365 } else {
4366 op2_offset = offsetof(CPUX86State,xmm_t0);
4367 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4368 gen_ldo_env_A0(s->mem_index, op2_offset);
4369 }
4370 } else {
4371 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4372 if (mod == 3) {
4373 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4374 } else {
4375 op2_offset = offsetof(CPUX86State,mmx_t0);
4376 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4377 gen_ldq_env_A0(s->mem_index, op2_offset);
4378 }
4379 }
4380 val = ldub_code(s->pc++);
4381
4382 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
4383 s->cc_op = CC_OP_EFLAGS;
4384
4385 if (s->dflag == 2)
4386 /* The helper must use entire 64-bit gp registers */
4387 val |= 1 << 8;
4388 }
4389
4390 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4391 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4392 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4393 break;
4394 default:
4395 goto illegal_op;
4396 }
4397 } else {
4398 /* generic MMX or SSE operation */
4399 switch(b) {
4400 case 0x70: /* pshufx insn */
4401 case 0xc6: /* pshufx insn */
4402 case 0xc2: /* compare insns */
4403 s->rip_offset = 1;
4404 break;
4405 default:
4406 break;
4407 }
4408 if (is_xmm) {
4409 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4410 if (mod != 3) {
4411 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4412 op2_offset = offsetof(CPUX86State,xmm_t0);
4413 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
4414 b == 0xc2)) {
4415 /* specific case for SSE single instructions */
4416 if (b1 == 2) {
4417 /* 32 bit access */
4418 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4419 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4420 } else {
4421 /* 64 bit access */
4422 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
4423 }
4424 } else {
4425 gen_ldo_env_A0(s->mem_index, op2_offset);
4426 }
4427 } else {
4428 rm = (modrm & 7) | REX_B(s);
4429 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4430 }
4431 } else {
4432 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4433 if (mod != 3) {
4434 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4435 op2_offset = offsetof(CPUX86State,mmx_t0);
4436 gen_ldq_env_A0(s->mem_index, op2_offset);
4437 } else {
4438 rm = (modrm & 7);
4439 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4440 }
4441 }
4442 switch(b) {
4443 case 0x0f: /* 3DNow! data insns */
4444 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4445 goto illegal_op;
4446 val = ldub_code(s->pc++);
4447 sse_op2 = sse_op_table5[val];
4448 if (!sse_op2)
4449 goto illegal_op;
4450 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4451 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4452 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4453 break;
4454 case 0x70: /* pshufx insn */
4455 case 0xc6: /* pshufx insn */
4456 val = ldub_code(s->pc++);
4457 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4458 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4459 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4460 break;
4461 case 0xc2:
4462 /* compare insns */
4463 val = ldub_code(s->pc++);
4464 if (val >= 8)
4465 goto illegal_op;
4466 sse_op2 = sse_op_table4[val][b1];
4467 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4468 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4469 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4470 break;
4471 case 0xf7:
4472 /* maskmov : we must prepare A0 */
4473 if (mod != 3)
4474 goto illegal_op;
4475#ifdef TARGET_X86_64
4476 if (s->aflag == 2) {
4477 gen_op_movq_A0_reg(R_EDI);
4478 } else
4479#endif
4480 {
4481 gen_op_movl_A0_reg(R_EDI);
4482 if (s->aflag == 0)
4483 gen_op_andl_A0_ffff();
4484 }
4485 gen_add_A0_ds_seg(s);
4486
4487 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4488 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4489 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
4490 break;
4491 default:
4492 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4493 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4494 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4495 break;
4496 }
4497 if (b == 0x2e || b == 0x2f) {
4498 s->cc_op = CC_OP_EFLAGS;
4499 }
4500 }
4501}
4502
4503#ifdef VBOX
4504/* Checks if it's an invalid lock sequence. Only a few instructions
4505 can be used together with the lock prefix and of those only the
4506 form that write a memory operand. So, this is kind of annoying
4507 work to do...
4508 The AMD manual lists the following instructions.
4509 ADC
4510 ADD
4511 AND
4512 BTC
4513 BTR
4514 BTS
4515 CMPXCHG
4516 CMPXCHG8B
4517 CMPXCHG16B
4518 DEC
4519 INC
4520 NEG
4521 NOT
4522 OR
4523 SBB
4524 SUB
4525 XADD
4526 XCHG
4527 XOR */
4528static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
4529{
4530 target_ulong pc = s->pc;
4531 int modrm, mod, op;
4532
4533 /* X={8,16,32,64} Y={16,32,64} */
4534 switch (b)
4535 {
4536 /* /2: ADC reg/memX, immX */
4537 /* /0: ADD reg/memX, immX */
4538 /* /4: AND reg/memX, immX */
4539 /* /1: OR reg/memX, immX */
4540 /* /3: SBB reg/memX, immX */
4541 /* /5: SUB reg/memX, immX */
4542 /* /6: XOR reg/memX, immX */
4543 case 0x80:
4544 case 0x81:
4545 case 0x83:
4546 modrm = ldub_code(pc++);
4547 op = (modrm >> 3) & 7;
4548 if (op == 7) /* /7: CMP */
4549 break;
4550 mod = (modrm >> 6) & 3;
4551 if (mod == 3) /* register destination */
4552 break;
4553 return false;
4554
4555 case 0x10: /* /r: ADC reg/mem8, reg8 */
4556 case 0x11: /* /r: ADC reg/memX, regY */
4557 case 0x00: /* /r: ADD reg/mem8, reg8 */
4558 case 0x01: /* /r: ADD reg/memX, regY */
4559 case 0x20: /* /r: AND reg/mem8, reg8 */
4560 case 0x21: /* /r: AND reg/memY, regY */
4561 case 0x08: /* /r: OR reg/mem8, reg8 */
4562 case 0x09: /* /r: OR reg/memY, regY */
4563 case 0x18: /* /r: SBB reg/mem8, reg8 */
4564 case 0x19: /* /r: SBB reg/memY, regY */
4565 case 0x28: /* /r: SUB reg/mem8, reg8 */
4566 case 0x29: /* /r: SUB reg/memY, regY */
4567 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
4568 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
4569 case 0x30: /* /r: XOR reg/mem8, reg8 */
4570 case 0x31: /* /r: XOR reg/memY, regY */
4571 modrm = ldub_code(pc++);
4572 mod = (modrm >> 6) & 3;
4573 if (mod == 3) /* register destination */
4574 break;
4575 return false;
4576
4577 /* /1: DEC reg/memX */
4578 /* /0: INC reg/memX */
4579 case 0xfe:
4580 case 0xff:
4581 modrm = ldub_code(pc++);
4582 mod = (modrm >> 6) & 3;
4583 if (mod == 3) /* register destination */
4584 break;
4585 return false;
4586
4587 /* /3: NEG reg/memX */
4588 /* /2: NOT reg/memX */
4589 case 0xf6:
4590 case 0xf7:
4591 modrm = ldub_code(pc++);
4592 mod = (modrm >> 6) & 3;
4593 if (mod == 3) /* register destination */
4594 break;
4595 return false;
4596
4597 case 0x0f:
4598 b = ldub_code(pc++);
4599 switch (b)
4600 {
4601 /* /7: BTC reg/memY, imm8 */
4602 /* /6: BTR reg/memY, imm8 */
4603 /* /5: BTS reg/memY, imm8 */
4604 case 0xba:
4605 modrm = ldub_code(pc++);
4606 op = (modrm >> 3) & 7;
4607 if (op < 5)
4608 break;
4609 mod = (modrm >> 6) & 3;
4610 if (mod == 3) /* register destination */
4611 break;
4612 return false;
4613
4614 case 0xbb: /* /r: BTC reg/memY, regY */
4615 case 0xb3: /* /r: BTR reg/memY, regY */
4616 case 0xab: /* /r: BTS reg/memY, regY */
4617 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
4618 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
4619 case 0xc0: /* /r: XADD reg/mem8, reg8 */
4620 case 0xc1: /* /r: XADD reg/memY, regY */
4621 modrm = ldub_code(pc++);
4622 mod = (modrm >> 6) & 3;
4623 if (mod == 3) /* register destination */
4624 break;
4625 return false;
4626
4627 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
4628 case 0xc7:
4629 modrm = ldub_code(pc++);
4630 op = (modrm >> 3) & 7;
4631 if (op != 1)
4632 break;
4633 return false;
4634 }
4635 break;
4636 }
4637
4638 /* illegal sequence. The s->pc is past the lock prefix and that
4639 is sufficient for the TB, I think. */
4640 Log(("illegal lock sequence %RGv (b=%#x)\n", pc_start, b));
4641 return true;
4642}
4643#endif /* VBOX */
4644
4645
4646/* convert one instruction. s->is_jmp is set if the translation must
4647 be stopped. Return the next pc value */
4648static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4649{
4650 int b, prefixes, aflag, dflag;
4651 int shift, ot;
4652 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4653 target_ulong next_eip, tval;
4654 int rex_w, rex_r;
4655
4656 if (unlikely(loglevel & CPU_LOG_TB_OP))
4657 tcg_gen_debug_insn_start(pc_start);
4658
4659 s->pc = pc_start;
4660 prefixes = 0;
4661 aflag = s->code32;
4662 dflag = s->code32;
4663 s->override = -1;
4664 rex_w = -1;
4665 rex_r = 0;
4666#ifdef TARGET_X86_64
4667 s->rex_x = 0;
4668 s->rex_b = 0;
4669 x86_64_hregs = 0;
4670#endif
4671 s->rip_offset = 0; /* for relative ip address */
4672#ifdef VBOX
4673 /* nike: seems only slow down things */
4674# if 0
4675 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
4676
4677 gen_update_eip(pc_start - s->cs_base);
4678# endif
4679#endif
4680
4681 next_byte:
4682 b = ldub_code(s->pc);
4683 s->pc++;
4684 /* check prefixes */
4685#ifdef TARGET_X86_64
4686 if (CODE64(s)) {
4687 switch (b) {
4688 case 0xf3:
4689 prefixes |= PREFIX_REPZ;
4690 goto next_byte;
4691 case 0xf2:
4692 prefixes |= PREFIX_REPNZ;
4693 goto next_byte;
4694 case 0xf0:
4695 prefixes |= PREFIX_LOCK;
4696 goto next_byte;
4697 case 0x2e:
4698 s->override = R_CS;
4699 goto next_byte;
4700 case 0x36:
4701 s->override = R_SS;
4702 goto next_byte;
4703 case 0x3e:
4704 s->override = R_DS;
4705 goto next_byte;
4706 case 0x26:
4707 s->override = R_ES;
4708 goto next_byte;
4709 case 0x64:
4710 s->override = R_FS;
4711 goto next_byte;
4712 case 0x65:
4713 s->override = R_GS;
4714 goto next_byte;
4715 case 0x66:
4716 prefixes |= PREFIX_DATA;
4717 goto next_byte;
4718 case 0x67:
4719 prefixes |= PREFIX_ADR;
4720 goto next_byte;
4721 case 0x40 ... 0x4f:
4722 /* REX prefix */
4723 rex_w = (b >> 3) & 1;
4724 rex_r = (b & 0x4) << 1;
4725 s->rex_x = (b & 0x2) << 2;
4726 REX_B(s) = (b & 0x1) << 3;
4727 x86_64_hregs = 1; /* select uniform byte register addressing */
4728 goto next_byte;
4729 }
4730 if (rex_w == 1) {
4731 /* 0x66 is ignored if rex.w is set */
4732 dflag = 2;
4733 } else {
4734 if (prefixes & PREFIX_DATA)
4735 dflag ^= 1;
4736 }
4737 if (!(prefixes & PREFIX_ADR))
4738 aflag = 2;
4739 } else
4740#endif
4741 {
4742 switch (b) {
4743 case 0xf3:
4744 prefixes |= PREFIX_REPZ;
4745 goto next_byte;
4746 case 0xf2:
4747 prefixes |= PREFIX_REPNZ;
4748 goto next_byte;
4749 case 0xf0:
4750 prefixes |= PREFIX_LOCK;
4751 goto next_byte;
4752 case 0x2e:
4753 s->override = R_CS;
4754 goto next_byte;
4755 case 0x36:
4756 s->override = R_SS;
4757 goto next_byte;
4758 case 0x3e:
4759 s->override = R_DS;
4760 goto next_byte;
4761 case 0x26:
4762 s->override = R_ES;
4763 goto next_byte;
4764 case 0x64:
4765 s->override = R_FS;
4766 goto next_byte;
4767 case 0x65:
4768 s->override = R_GS;
4769 goto next_byte;
4770 case 0x66:
4771 prefixes |= PREFIX_DATA;
4772 goto next_byte;
4773 case 0x67:
4774 prefixes |= PREFIX_ADR;
4775 goto next_byte;
4776 }
4777 if (prefixes & PREFIX_DATA)
4778 dflag ^= 1;
4779 if (prefixes & PREFIX_ADR)
4780 aflag ^= 1;
4781 }
4782
4783 s->prefix = prefixes;
4784 s->aflag = aflag;
4785 s->dflag = dflag;
4786
4787 /* lock generation */
4788#ifndef VBOX
4789 if (prefixes & PREFIX_LOCK)
4790 tcg_gen_helper_0_0(helper_lock);
4791#else /* VBOX */
4792 if (prefixes & PREFIX_LOCK) {
4793 if (is_invalid_lock_sequence(s, pc_start, b)) {
4794 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
4795 return s->pc;
4796 }
4797 tcg_gen_helper_0_0(helper_lock);
4798 }
4799#endif /* VBOX */
4800
4801 /* now check op code */
4802 reswitch:
4803 switch(b) {
4804 case 0x0f:
4805 /**************************/
4806 /* extended op code */
4807 b = ldub_code(s->pc++) | 0x100;
4808 goto reswitch;
4809
4810 /**************************/
4811 /* arith & logic */
4812 case 0x00 ... 0x05:
4813 case 0x08 ... 0x0d:
4814 case 0x10 ... 0x15:
4815 case 0x18 ... 0x1d:
4816 case 0x20 ... 0x25:
4817 case 0x28 ... 0x2d:
4818 case 0x30 ... 0x35:
4819 case 0x38 ... 0x3d:
4820 {
4821 int op, f, val;
4822 op = (b >> 3) & 7;
4823 f = (b >> 1) & 3;
4824
4825 if ((b & 1) == 0)
4826 ot = OT_BYTE;
4827 else
4828 ot = dflag + OT_WORD;
4829
4830 switch(f) {
4831 case 0: /* OP Ev, Gv */
4832 modrm = ldub_code(s->pc++);
4833 reg = ((modrm >> 3) & 7) | rex_r;
4834 mod = (modrm >> 6) & 3;
4835 rm = (modrm & 7) | REX_B(s);
4836 if (mod != 3) {
4837 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4838 opreg = OR_TMP0;
4839 } else if (op == OP_XORL && rm == reg) {
4840 xor_zero:
4841 /* xor reg, reg optimisation */
4842 gen_op_movl_T0_0();
4843 s->cc_op = CC_OP_LOGICB + ot;
4844 gen_op_mov_reg_T0(ot, reg);
4845 gen_op_update1_cc();
4846 break;
4847 } else {
4848 opreg = rm;
4849 }
4850 gen_op_mov_TN_reg(ot, 1, reg);
4851 gen_op(s, op, ot, opreg);
4852 break;
4853 case 1: /* OP Gv, Ev */
4854 modrm = ldub_code(s->pc++);
4855 mod = (modrm >> 6) & 3;
4856 reg = ((modrm >> 3) & 7) | rex_r;
4857 rm = (modrm & 7) | REX_B(s);
4858 if (mod != 3) {
4859 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4860 gen_op_ld_T1_A0(ot + s->mem_index);
4861 } else if (op == OP_XORL && rm == reg) {
4862 goto xor_zero;
4863 } else {
4864 gen_op_mov_TN_reg(ot, 1, rm);
4865 }
4866 gen_op(s, op, ot, reg);
4867 break;
4868 case 2: /* OP A, Iv */
4869 val = insn_get(s, ot);
4870 gen_op_movl_T1_im(val);
4871 gen_op(s, op, ot, OR_EAX);
4872 break;
4873 }
4874 }
4875 break;
4876
4877 case 0x82:
4878 if (CODE64(s))
4879 goto illegal_op;
4880 case 0x80: /* GRP1 */
4881 case 0x81:
4882 case 0x83:
4883 {
4884 int val;
4885
4886 if ((b & 1) == 0)
4887 ot = OT_BYTE;
4888 else
4889 ot = dflag + OT_WORD;
4890
4891 modrm = ldub_code(s->pc++);
4892 mod = (modrm >> 6) & 3;
4893 rm = (modrm & 7) | REX_B(s);
4894 op = (modrm >> 3) & 7;
4895
4896 if (mod != 3) {
4897 if (b == 0x83)
4898 s->rip_offset = 1;
4899 else
4900 s->rip_offset = insn_const_size(ot);
4901 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4902 opreg = OR_TMP0;
4903 } else {
4904 opreg = rm;
4905 }
4906
4907 switch(b) {
4908 default:
4909 case 0x80:
4910 case 0x81:
4911 case 0x82:
4912 val = insn_get(s, ot);
4913 break;
4914 case 0x83:
4915 val = (int8_t)insn_get(s, OT_BYTE);
4916 break;
4917 }
4918 gen_op_movl_T1_im(val);
4919 gen_op(s, op, ot, opreg);
4920 }
4921 break;
4922
4923 /**************************/
4924 /* inc, dec, and other misc arith */
4925 case 0x40 ... 0x47: /* inc Gv */
4926 ot = dflag ? OT_LONG : OT_WORD;
4927 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4928 break;
4929 case 0x48 ... 0x4f: /* dec Gv */
4930 ot = dflag ? OT_LONG : OT_WORD;
4931 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4932 break;
4933 case 0xf6: /* GRP3 */
4934 case 0xf7:
4935 if ((b & 1) == 0)
4936 ot = OT_BYTE;
4937 else
4938 ot = dflag + OT_WORD;
4939
4940 modrm = ldub_code(s->pc++);
4941 mod = (modrm >> 6) & 3;
4942 rm = (modrm & 7) | REX_B(s);
4943 op = (modrm >> 3) & 7;
4944 if (mod != 3) {
4945 if (op == 0)
4946 s->rip_offset = insn_const_size(ot);
4947 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4948 gen_op_ld_T0_A0(ot + s->mem_index);
4949 } else {
4950 gen_op_mov_TN_reg(ot, 0, rm);
4951 }
4952
4953 switch(op) {
4954 case 0: /* test */
4955 val = insn_get(s, ot);
4956 gen_op_movl_T1_im(val);
4957 gen_op_testl_T0_T1_cc();
4958 s->cc_op = CC_OP_LOGICB + ot;
4959 break;
4960 case 2: /* not */
4961 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4962 if (mod != 3) {
4963 gen_op_st_T0_A0(ot + s->mem_index);
4964 } else {
4965 gen_op_mov_reg_T0(ot, rm);
4966 }
4967 break;
4968 case 3: /* neg */
4969 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4970 if (mod != 3) {
4971 gen_op_st_T0_A0(ot + s->mem_index);
4972 } else {
4973 gen_op_mov_reg_T0(ot, rm);
4974 }
4975 gen_op_update_neg_cc();
4976 s->cc_op = CC_OP_SUBB + ot;
4977 break;
4978 case 4: /* mul */
4979 switch(ot) {
4980 case OT_BYTE:
4981 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4982 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4983 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4984 /* XXX: use 32 bit mul which could be faster */
4985 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4986 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4987 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4988 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4989 s->cc_op = CC_OP_MULB;
4990 break;
4991 case OT_WORD:
4992 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4993 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4994 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
4995 /* XXX: use 32 bit mul which could be faster */
4996 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4997 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4998 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4999 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5000 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5001 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5002 s->cc_op = CC_OP_MULW;
5003 break;
5004 default:
5005 case OT_LONG:
5006#ifdef TARGET_X86_64
5007 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5008 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
5009 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
5010 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5011 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5012 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5013 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
5014 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5015 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5016#else
5017 {
5018 TCGv t0, t1;
5019 t0 = tcg_temp_new(TCG_TYPE_I64);
5020 t1 = tcg_temp_new(TCG_TYPE_I64);
5021 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5022 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
5023 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
5024 tcg_gen_mul_i64(t0, t0, t1);
5025 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5026 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5027 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5028 tcg_gen_shri_i64(t0, t0, 32);
5029 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5030 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5031 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5032 }
5033#endif
5034 s->cc_op = CC_OP_MULL;
5035 break;
5036#ifdef TARGET_X86_64
5037 case OT_QUAD:
5038 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
5039 s->cc_op = CC_OP_MULQ;
5040 break;
5041#endif
5042 }
5043 break;
5044 case 5: /* imul */
5045 switch(ot) {
5046 case OT_BYTE:
5047 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
5048 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5049 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
5050 /* XXX: use 32 bit mul which could be faster */
5051 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5052 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5053 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5054 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
5055 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5056 s->cc_op = CC_OP_MULB;
5057 break;
5058 case OT_WORD:
5059 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5060 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5061 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5062 /* XXX: use 32 bit mul which could be faster */
5063 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5064 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5065 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5066 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5067 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5068 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5069 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5070 s->cc_op = CC_OP_MULW;
5071 break;
5072 default:
5073 case OT_LONG:
5074#ifdef TARGET_X86_64
5075 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5076 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5077 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5078 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5079 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5080 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5081 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5082 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5083 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
5084 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5085#else
5086 {
5087 TCGv t0, t1;
5088 t0 = tcg_temp_new(TCG_TYPE_I64);
5089 t1 = tcg_temp_new(TCG_TYPE_I64);
5090 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5091 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5092 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5093 tcg_gen_mul_i64(t0, t0, t1);
5094 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5095 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5096 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5097 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5098 tcg_gen_shri_i64(t0, t0, 32);
5099 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5100 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5101 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5102 }
5103#endif
5104 s->cc_op = CC_OP_MULL;
5105 break;
5106#ifdef TARGET_X86_64
5107 case OT_QUAD:
5108 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
5109 s->cc_op = CC_OP_MULQ;
5110 break;
5111#endif
5112 }
5113 break;
5114 case 6: /* div */
5115 switch(ot) {
5116 case OT_BYTE:
5117 gen_jmp_im(pc_start - s->cs_base);
5118 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
5119 break;
5120 case OT_WORD:
5121 gen_jmp_im(pc_start - s->cs_base);
5122 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
5123 break;
5124 default:
5125 case OT_LONG:
5126 gen_jmp_im(pc_start - s->cs_base);
5127 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
5128 break;
5129#ifdef TARGET_X86_64
5130 case OT_QUAD:
5131 gen_jmp_im(pc_start - s->cs_base);
5132 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
5133 break;
5134#endif
5135 }
5136 break;
5137 case 7: /* idiv */
5138 switch(ot) {
5139 case OT_BYTE:
5140 gen_jmp_im(pc_start - s->cs_base);
5141 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
5142 break;
5143 case OT_WORD:
5144 gen_jmp_im(pc_start - s->cs_base);
5145 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
5146 break;
5147 default:
5148 case OT_LONG:
5149 gen_jmp_im(pc_start - s->cs_base);
5150 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
5151 break;
5152#ifdef TARGET_X86_64
5153 case OT_QUAD:
5154 gen_jmp_im(pc_start - s->cs_base);
5155 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
5156 break;
5157#endif
5158 }
5159 break;
5160 default:
5161 goto illegal_op;
5162 }
5163 break;
5164
5165 case 0xfe: /* GRP4 */
5166 case 0xff: /* GRP5 */
5167 if ((b & 1) == 0)
5168 ot = OT_BYTE;
5169 else
5170 ot = dflag + OT_WORD;
5171
5172 modrm = ldub_code(s->pc++);
5173 mod = (modrm >> 6) & 3;
5174 rm = (modrm & 7) | REX_B(s);
5175 op = (modrm >> 3) & 7;
5176 if (op >= 2 && b == 0xfe) {
5177 goto illegal_op;
5178 }
5179 if (CODE64(s)) {
5180 if (op == 2 || op == 4) {
5181 /* operand size for jumps is 64 bit */
5182 ot = OT_QUAD;
5183 } else if (op == 3 || op == 5) {
5184 /* for call calls, the operand is 16 or 32 bit, even
5185 in long mode */
5186 ot = dflag ? OT_LONG : OT_WORD;
5187 } else if (op == 6) {
5188 /* default push size is 64 bit */
5189 ot = dflag ? OT_QUAD : OT_WORD;
5190 }
5191 }
5192 if (mod != 3) {
5193 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5194 if (op >= 2 && op != 3 && op != 5)
5195 gen_op_ld_T0_A0(ot + s->mem_index);
5196 } else {
5197 gen_op_mov_TN_reg(ot, 0, rm);
5198 }
5199
5200 switch(op) {
5201 case 0: /* inc Ev */
5202 if (mod != 3)
5203 opreg = OR_TMP0;
5204 else
5205 opreg = rm;
5206 gen_inc(s, ot, opreg, 1);
5207 break;
5208 case 1: /* dec Ev */
5209 if (mod != 3)
5210 opreg = OR_TMP0;
5211 else
5212 opreg = rm;
5213 gen_inc(s, ot, opreg, -1);
5214 break;
5215 case 2: /* call Ev */
5216 /* XXX: optimize if memory (no 'and' is necessary) */
5217#ifdef VBOX_WITH_CALL_RECORD
5218 if (s->record_call)
5219 gen_op_record_call();
5220#endif
5221 if (s->dflag == 0)
5222 gen_op_andl_T0_ffff();
5223 next_eip = s->pc - s->cs_base;
5224 gen_movtl_T1_im(next_eip);
5225 gen_push_T1(s);
5226 gen_op_jmp_T0();
5227 gen_eob(s);
5228 break;
5229 case 3: /* lcall Ev */
5230 gen_op_ld_T1_A0(ot + s->mem_index);
5231 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5232 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5233 do_lcall:
5234 if (s->pe && !s->vm86) {
5235 if (s->cc_op != CC_OP_DYNAMIC)
5236 gen_op_set_cc_op(s->cc_op);
5237 gen_jmp_im(pc_start - s->cs_base);
5238 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5239 tcg_gen_helper_0_4(helper_lcall_protected,
5240 cpu_tmp2_i32, cpu_T[1],
5241 tcg_const_i32(dflag),
5242 tcg_const_i32(s->pc - pc_start));
5243 } else {
5244 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5245 tcg_gen_helper_0_4(helper_lcall_real,
5246 cpu_tmp2_i32, cpu_T[1],
5247 tcg_const_i32(dflag),
5248 tcg_const_i32(s->pc - s->cs_base));
5249 }
5250 gen_eob(s);
5251 break;
5252 case 4: /* jmp Ev */
5253 if (s->dflag == 0)
5254 gen_op_andl_T0_ffff();
5255 gen_op_jmp_T0();
5256 gen_eob(s);
5257 break;
5258 case 5: /* ljmp Ev */
5259 gen_op_ld_T1_A0(ot + s->mem_index);
5260 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5261 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5262 do_ljmp:
5263 if (s->pe && !s->vm86) {
5264 if (s->cc_op != CC_OP_DYNAMIC)
5265 gen_op_set_cc_op(s->cc_op);
5266 gen_jmp_im(pc_start - s->cs_base);
5267 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5268 tcg_gen_helper_0_3(helper_ljmp_protected,
5269 cpu_tmp2_i32,
5270 cpu_T[1],
5271 tcg_const_i32(s->pc - pc_start));
5272 } else {
5273 gen_op_movl_seg_T0_vm(R_CS);
5274 gen_op_movl_T0_T1();
5275 gen_op_jmp_T0();
5276 }
5277 gen_eob(s);
5278 break;
5279 case 6: /* push Ev */
5280 gen_push_T0(s);
5281 break;
5282 default:
5283 goto illegal_op;
5284 }
5285 break;
5286
5287 case 0x84: /* test Ev, Gv */
5288 case 0x85:
5289 if ((b & 1) == 0)
5290 ot = OT_BYTE;
5291 else
5292 ot = dflag + OT_WORD;
5293
5294 modrm = ldub_code(s->pc++);
5295 mod = (modrm >> 6) & 3;
5296 rm = (modrm & 7) | REX_B(s);
5297 reg = ((modrm >> 3) & 7) | rex_r;
5298
5299 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5300 gen_op_mov_TN_reg(ot, 1, reg);
5301 gen_op_testl_T0_T1_cc();
5302 s->cc_op = CC_OP_LOGICB + ot;
5303 break;
5304
5305 case 0xa8: /* test eAX, Iv */
5306 case 0xa9:
5307 if ((b & 1) == 0)
5308 ot = OT_BYTE;
5309 else
5310 ot = dflag + OT_WORD;
5311 val = insn_get(s, ot);
5312
5313 gen_op_mov_TN_reg(ot, 0, OR_EAX);
5314 gen_op_movl_T1_im(val);
5315 gen_op_testl_T0_T1_cc();
5316 s->cc_op = CC_OP_LOGICB + ot;
5317 break;
5318
5319 case 0x98: /* CWDE/CBW */
5320#ifdef TARGET_X86_64
5321 if (dflag == 2) {
5322 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5323 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5324 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
5325 } else
5326#endif
5327 if (dflag == 1) {
5328 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5329 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5330 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5331 } else {
5332 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
5333 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5334 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5335 }
5336 break;
5337 case 0x99: /* CDQ/CWD */
5338#ifdef TARGET_X86_64
5339 if (dflag == 2) {
5340 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5341 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
5342 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
5343 } else
5344#endif
5345 if (dflag == 1) {
5346 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5347 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5348 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
5349 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5350 } else {
5351 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5352 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5353 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
5354 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5355 }
5356 break;
5357 case 0x1af: /* imul Gv, Ev */
5358 case 0x69: /* imul Gv, Ev, I */
5359 case 0x6b:
5360 ot = dflag + OT_WORD;
5361 modrm = ldub_code(s->pc++);
5362 reg = ((modrm >> 3) & 7) | rex_r;
5363 if (b == 0x69)
5364 s->rip_offset = insn_const_size(ot);
5365 else if (b == 0x6b)
5366 s->rip_offset = 1;
5367 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5368 if (b == 0x69) {
5369 val = insn_get(s, ot);
5370 gen_op_movl_T1_im(val);
5371 } else if (b == 0x6b) {
5372 val = (int8_t)insn_get(s, OT_BYTE);
5373 gen_op_movl_T1_im(val);
5374 } else {
5375 gen_op_mov_TN_reg(ot, 1, reg);
5376 }
5377
5378#ifdef TARGET_X86_64
5379 if (ot == OT_QUAD) {
5380 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
5381 } else
5382#endif
5383 if (ot == OT_LONG) {
5384#ifdef TARGET_X86_64
5385 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5386 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5387 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5388 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5389 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5390 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5391#else
5392 {
5393 TCGv t0, t1;
5394 t0 = tcg_temp_new(TCG_TYPE_I64);
5395 t1 = tcg_temp_new(TCG_TYPE_I64);
5396 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5397 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5398 tcg_gen_mul_i64(t0, t0, t1);
5399 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5400 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5401 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5402 tcg_gen_shri_i64(t0, t0, 32);
5403 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
5404 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
5405 }
5406#endif
5407 } else {
5408 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5409 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5410 /* XXX: use 32 bit mul which could be faster */
5411 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5412 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5413 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5414 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5415 }
5416 gen_op_mov_reg_T0(ot, reg);
5417 s->cc_op = CC_OP_MULB + ot;
5418 break;
5419 case 0x1c0:
5420 case 0x1c1: /* xadd Ev, Gv */
5421 if ((b & 1) == 0)
5422 ot = OT_BYTE;
5423 else
5424 ot = dflag + OT_WORD;
5425 modrm = ldub_code(s->pc++);
5426 reg = ((modrm >> 3) & 7) | rex_r;
5427 mod = (modrm >> 6) & 3;
5428 if (mod == 3) {
5429 rm = (modrm & 7) | REX_B(s);
5430 gen_op_mov_TN_reg(ot, 0, reg);
5431 gen_op_mov_TN_reg(ot, 1, rm);
5432 gen_op_addl_T0_T1();
5433 gen_op_mov_reg_T1(ot, reg);
5434 gen_op_mov_reg_T0(ot, rm);
5435 } else {
5436 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5437 gen_op_mov_TN_reg(ot, 0, reg);
5438 gen_op_ld_T1_A0(ot + s->mem_index);
5439 gen_op_addl_T0_T1();
5440 gen_op_st_T0_A0(ot + s->mem_index);
5441 gen_op_mov_reg_T1(ot, reg);
5442 }
5443 gen_op_update2_cc();
5444 s->cc_op = CC_OP_ADDB + ot;
5445 break;
5446 case 0x1b0:
5447 case 0x1b1: /* cmpxchg Ev, Gv */
5448 {
5449 int label1, label2;
5450 TCGv t0, t1, t2, a0;
5451
5452 if ((b & 1) == 0)
5453 ot = OT_BYTE;
5454 else
5455 ot = dflag + OT_WORD;
5456 modrm = ldub_code(s->pc++);
5457 reg = ((modrm >> 3) & 7) | rex_r;
5458 mod = (modrm >> 6) & 3;
5459 t0 = tcg_temp_local_new(TCG_TYPE_TL);
5460 t1 = tcg_temp_local_new(TCG_TYPE_TL);
5461 t2 = tcg_temp_local_new(TCG_TYPE_TL);
5462 a0 = tcg_temp_local_new(TCG_TYPE_TL);
5463 gen_op_mov_v_reg(ot, t1, reg);
5464 if (mod == 3) {
5465 rm = (modrm & 7) | REX_B(s);
5466 gen_op_mov_v_reg(ot, t0, rm);
5467 } else {
5468 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5469 tcg_gen_mov_tl(a0, cpu_A0);
5470 gen_op_ld_v(ot + s->mem_index, t0, a0);
5471 rm = 0; /* avoid warning */
5472 }
5473 label1 = gen_new_label();
5474 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
5475 tcg_gen_sub_tl(t2, t2, t0);
5476 gen_extu(ot, t2);
5477 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
5478 if (mod == 3) {
5479 label2 = gen_new_label();
5480 gen_op_mov_reg_v(ot, R_EAX, t0);
5481 tcg_gen_br(label2);
5482 gen_set_label(label1);
5483 gen_op_mov_reg_v(ot, rm, t1);
5484 gen_set_label(label2);
5485 } else {
5486 tcg_gen_mov_tl(t1, t0);
5487 gen_op_mov_reg_v(ot, R_EAX, t0);
5488 gen_set_label(label1);
5489 /* always store */
5490 gen_op_st_v(ot + s->mem_index, t1, a0);
5491 }
5492 tcg_gen_mov_tl(cpu_cc_src, t0);
5493 tcg_gen_mov_tl(cpu_cc_dst, t2);
5494 s->cc_op = CC_OP_SUBB + ot;
5495 tcg_temp_free(t0);
5496 tcg_temp_free(t1);
5497 tcg_temp_free(t2);
5498 tcg_temp_free(a0);
5499 }
5500 break;
5501 case 0x1c7: /* cmpxchg8b */
5502 modrm = ldub_code(s->pc++);
5503 mod = (modrm >> 6) & 3;
5504 if ((mod == 3) || ((modrm & 0x38) != 0x8))
5505 goto illegal_op;
5506#ifdef TARGET_X86_64
5507 if (dflag == 2) {
5508 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
5509 goto illegal_op;
5510 gen_jmp_im(pc_start - s->cs_base);
5511 if (s->cc_op != CC_OP_DYNAMIC)
5512 gen_op_set_cc_op(s->cc_op);
5513 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5514 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
5515 } else
5516#endif
5517 {
5518 if (!(s->cpuid_features & CPUID_CX8))
5519 goto illegal_op;
5520 gen_jmp_im(pc_start - s->cs_base);
5521 if (s->cc_op != CC_OP_DYNAMIC)
5522 gen_op_set_cc_op(s->cc_op);
5523 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5524 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
5525 }
5526 s->cc_op = CC_OP_EFLAGS;
5527 break;
5528
5529 /**************************/
5530 /* push/pop */
5531 case 0x50 ... 0x57: /* push */
5532 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
5533 gen_push_T0(s);
5534 break;
5535 case 0x58 ... 0x5f: /* pop */
5536 if (CODE64(s)) {
5537 ot = dflag ? OT_QUAD : OT_WORD;
5538 } else {
5539 ot = dflag + OT_WORD;
5540 }
5541 gen_pop_T0(s);
5542 /* NOTE: order is important for pop %sp */
5543 gen_pop_update(s);
5544 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
5545 break;
5546 case 0x60: /* pusha */
5547 if (CODE64(s))
5548 goto illegal_op;
5549 gen_pusha(s);
5550 break;
5551 case 0x61: /* popa */
5552 if (CODE64(s))
5553 goto illegal_op;
5554 gen_popa(s);
5555 break;
5556 case 0x68: /* push Iv */
5557 case 0x6a:
5558 if (CODE64(s)) {
5559 ot = dflag ? OT_QUAD : OT_WORD;
5560 } else {
5561 ot = dflag + OT_WORD;
5562 }
5563 if (b == 0x68)
5564 val = insn_get(s, ot);
5565 else
5566 val = (int8_t)insn_get(s, OT_BYTE);
5567 gen_op_movl_T0_im(val);
5568 gen_push_T0(s);
5569 break;
5570 case 0x8f: /* pop Ev */
5571 if (CODE64(s)) {
5572 ot = dflag ? OT_QUAD : OT_WORD;
5573 } else {
5574 ot = dflag + OT_WORD;
5575 }
5576 modrm = ldub_code(s->pc++);
5577 mod = (modrm >> 6) & 3;
5578 gen_pop_T0(s);
5579 if (mod == 3) {
5580 /* NOTE: order is important for pop %sp */
5581 gen_pop_update(s);
5582 rm = (modrm & 7) | REX_B(s);
5583 gen_op_mov_reg_T0(ot, rm);
5584 } else {
5585 /* NOTE: order is important too for MMU exceptions */
5586 s->popl_esp_hack = 1 << ot;
5587 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5588 s->popl_esp_hack = 0;
5589 gen_pop_update(s);
5590 }
5591 break;
5592 case 0xc8: /* enter */
5593 {
5594 int level;
5595 val = lduw_code(s->pc);
5596 s->pc += 2;
5597 level = ldub_code(s->pc++);
5598 gen_enter(s, val, level);
5599 }
5600 break;
5601 case 0xc9: /* leave */
5602 /* XXX: exception not precise (ESP is updated before potential exception) */
5603 if (CODE64(s)) {
5604 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5605 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
5606 } else if (s->ss32) {
5607 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5608 gen_op_mov_reg_T0(OT_LONG, R_ESP);
5609 } else {
5610 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5611 gen_op_mov_reg_T0(OT_WORD, R_ESP);
5612 }
5613 gen_pop_T0(s);
5614 if (CODE64(s)) {
5615 ot = dflag ? OT_QUAD : OT_WORD;
5616 } else {
5617 ot = dflag + OT_WORD;
5618 }
5619 gen_op_mov_reg_T0(ot, R_EBP);
5620 gen_pop_update(s);
5621 break;
5622 case 0x06: /* push es */
5623 case 0x0e: /* push cs */
5624 case 0x16: /* push ss */
5625 case 0x1e: /* push ds */
5626 if (CODE64(s))
5627 goto illegal_op;
5628 gen_op_movl_T0_seg(b >> 3);
5629 gen_push_T0(s);
5630 break;
5631 case 0x1a0: /* push fs */
5632 case 0x1a8: /* push gs */
5633 gen_op_movl_T0_seg((b >> 3) & 7);
5634 gen_push_T0(s);
5635 break;
5636 case 0x07: /* pop es */
5637 case 0x17: /* pop ss */
5638 case 0x1f: /* pop ds */
5639 if (CODE64(s))
5640 goto illegal_op;
5641 reg = b >> 3;
5642 gen_pop_T0(s);
5643 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5644 gen_pop_update(s);
5645 if (reg == R_SS) {
5646 /* if reg == SS, inhibit interrupts/trace. */
5647 /* If several instructions disable interrupts, only the
5648 _first_ does it */
5649 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5650 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5651 s->tf = 0;
5652 }
5653 if (s->is_jmp) {
5654 gen_jmp_im(s->pc - s->cs_base);
5655 gen_eob(s);
5656 }
5657 break;
5658 case 0x1a1: /* pop fs */
5659 case 0x1a9: /* pop gs */
5660 gen_pop_T0(s);
5661 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5662 gen_pop_update(s);
5663 if (s->is_jmp) {
5664 gen_jmp_im(s->pc - s->cs_base);
5665 gen_eob(s);
5666 }
5667 break;
5668
5669 /**************************/
5670 /* mov */
5671 case 0x88:
5672 case 0x89: /* mov Gv, Ev */
5673 if ((b & 1) == 0)
5674 ot = OT_BYTE;
5675 else
5676 ot = dflag + OT_WORD;
5677 modrm = ldub_code(s->pc++);
5678 reg = ((modrm >> 3) & 7) | rex_r;
5679
5680 /* generate a generic store */
5681 gen_ldst_modrm(s, modrm, ot, reg, 1);
5682 break;
5683 case 0xc6:
5684 case 0xc7: /* mov Ev, Iv */
5685 if ((b & 1) == 0)
5686 ot = OT_BYTE;
5687 else
5688 ot = dflag + OT_WORD;
5689 modrm = ldub_code(s->pc++);
5690 mod = (modrm >> 6) & 3;
5691 if (mod != 3) {
5692 s->rip_offset = insn_const_size(ot);
5693 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5694 }
5695 val = insn_get(s, ot);
5696 gen_op_movl_T0_im(val);
5697 if (mod != 3)
5698 gen_op_st_T0_A0(ot + s->mem_index);
5699 else
5700 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5701 break;
5702 case 0x8a:
5703 case 0x8b: /* mov Ev, Gv */
5704#ifdef VBOX /* dtrace hot fix */
5705 if (prefixes & PREFIX_LOCK)
5706 goto illegal_op;
5707#endif
5708 if ((b & 1) == 0)
5709 ot = OT_BYTE;
5710 else
5711 ot = OT_WORD + dflag;
5712 modrm = ldub_code(s->pc++);
5713 reg = ((modrm >> 3) & 7) | rex_r;
5714
5715 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5716 gen_op_mov_reg_T0(ot, reg);
5717 break;
5718 case 0x8e: /* mov seg, Gv */
5719 modrm = ldub_code(s->pc++);
5720 reg = (modrm >> 3) & 7;
5721 if (reg >= 6 || reg == R_CS)
5722 goto illegal_op;
5723 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5724 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5725 if (reg == R_SS) {
5726 /* if reg == SS, inhibit interrupts/trace */
5727 /* If several instructions disable interrupts, only the
5728 _first_ does it */
5729 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5730 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5731 s->tf = 0;
5732 }
5733 if (s->is_jmp) {
5734 gen_jmp_im(s->pc - s->cs_base);
5735 gen_eob(s);
5736 }
5737 break;
5738 case 0x8c: /* mov Gv, seg */
5739 modrm = ldub_code(s->pc++);
5740 reg = (modrm >> 3) & 7;
5741 mod = (modrm >> 6) & 3;
5742 if (reg >= 6)
5743 goto illegal_op;
5744 gen_op_movl_T0_seg(reg);
5745 if (mod == 3)
5746 ot = OT_WORD + dflag;
5747 else
5748 ot = OT_WORD;
5749 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5750 break;
5751
5752 case 0x1b6: /* movzbS Gv, Eb */
5753 case 0x1b7: /* movzwS Gv, Eb */
5754 case 0x1be: /* movsbS Gv, Eb */
5755 case 0x1bf: /* movswS Gv, Eb */
5756 {
5757 int d_ot;
5758 /* d_ot is the size of destination */
5759 d_ot = dflag + OT_WORD;
5760 /* ot is the size of source */
5761 ot = (b & 1) + OT_BYTE;
5762 modrm = ldub_code(s->pc++);
5763 reg = ((modrm >> 3) & 7) | rex_r;
5764 mod = (modrm >> 6) & 3;
5765 rm = (modrm & 7) | REX_B(s);
5766
5767 if (mod == 3) {
5768 gen_op_mov_TN_reg(ot, 0, rm);
5769 switch(ot | (b & 8)) {
5770 case OT_BYTE:
5771 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5772 break;
5773 case OT_BYTE | 8:
5774 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5775 break;
5776 case OT_WORD:
5777 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5778 break;
5779 default:
5780 case OT_WORD | 8:
5781 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5782 break;
5783 }
5784 gen_op_mov_reg_T0(d_ot, reg);
5785 } else {
5786 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5787 if (b & 8) {
5788 gen_op_lds_T0_A0(ot + s->mem_index);
5789 } else {
5790 gen_op_ldu_T0_A0(ot + s->mem_index);
5791 }
5792 gen_op_mov_reg_T0(d_ot, reg);
5793 }
5794 }
5795 break;
5796
5797 case 0x8d: /* lea */
5798 ot = dflag + OT_WORD;
5799 modrm = ldub_code(s->pc++);
5800 mod = (modrm >> 6) & 3;
5801 if (mod == 3)
5802 goto illegal_op;
5803 reg = ((modrm >> 3) & 7) | rex_r;
5804 /* we must ensure that no segment is added */
5805 s->override = -1;
5806 val = s->addseg;
5807 s->addseg = 0;
5808 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5809 s->addseg = val;
5810 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5811 break;
5812
5813 case 0xa0: /* mov EAX, Ov */
5814 case 0xa1:
5815 case 0xa2: /* mov Ov, EAX */
5816 case 0xa3:
5817 {
5818 target_ulong offset_addr;
5819
5820 if ((b & 1) == 0)
5821 ot = OT_BYTE;
5822 else
5823 ot = dflag + OT_WORD;
5824#ifdef TARGET_X86_64
5825 if (s->aflag == 2) {
5826 offset_addr = ldq_code(s->pc);
5827 s->pc += 8;
5828 gen_op_movq_A0_im(offset_addr);
5829 } else
5830#endif
5831 {
5832 if (s->aflag) {
5833 offset_addr = insn_get(s, OT_LONG);
5834 } else {
5835 offset_addr = insn_get(s, OT_WORD);
5836 }
5837 gen_op_movl_A0_im(offset_addr);
5838 }
5839 gen_add_A0_ds_seg(s);
5840 if ((b & 2) == 0) {
5841 gen_op_ld_T0_A0(ot + s->mem_index);
5842 gen_op_mov_reg_T0(ot, R_EAX);
5843 } else {
5844 gen_op_mov_TN_reg(ot, 0, R_EAX);
5845 gen_op_st_T0_A0(ot + s->mem_index);
5846 }
5847 }
5848 break;
5849 case 0xd7: /* xlat */
5850#ifdef TARGET_X86_64
5851 if (s->aflag == 2) {
5852 gen_op_movq_A0_reg(R_EBX);
5853 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5854 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5855 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5856 } else
5857#endif
5858 {
5859 gen_op_movl_A0_reg(R_EBX);
5860 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5861 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5862 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5863 if (s->aflag == 0)
5864 gen_op_andl_A0_ffff();
5865 else
5866 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5867 }
5868 gen_add_A0_ds_seg(s);
5869 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5870 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5871 break;
5872 case 0xb0 ... 0xb7: /* mov R, Ib */
5873 val = insn_get(s, OT_BYTE);
5874 gen_op_movl_T0_im(val);
5875 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5876 break;
5877 case 0xb8 ... 0xbf: /* mov R, Iv */
5878#ifdef TARGET_X86_64
5879 if (dflag == 2) {
5880 uint64_t tmp;
5881 /* 64 bit case */
5882 tmp = ldq_code(s->pc);
5883 s->pc += 8;
5884 reg = (b & 7) | REX_B(s);
5885 gen_movtl_T0_im(tmp);
5886 gen_op_mov_reg_T0(OT_QUAD, reg);
5887 } else
5888#endif
5889 {
5890 ot = dflag ? OT_LONG : OT_WORD;
5891 val = insn_get(s, ot);
5892 reg = (b & 7) | REX_B(s);
5893 gen_op_movl_T0_im(val);
5894 gen_op_mov_reg_T0(ot, reg);
5895 }
5896 break;
5897
5898 case 0x91 ... 0x97: /* xchg R, EAX */
5899 ot = dflag + OT_WORD;
5900 reg = (b & 7) | REX_B(s);
5901 rm = R_EAX;
5902 goto do_xchg_reg;
5903 case 0x86:
5904 case 0x87: /* xchg Ev, Gv */
5905 if ((b & 1) == 0)
5906 ot = OT_BYTE;
5907 else
5908 ot = dflag + OT_WORD;
5909 modrm = ldub_code(s->pc++);
5910 reg = ((modrm >> 3) & 7) | rex_r;
5911 mod = (modrm >> 6) & 3;
5912 if (mod == 3) {
5913 rm = (modrm & 7) | REX_B(s);
5914 do_xchg_reg:
5915 gen_op_mov_TN_reg(ot, 0, reg);
5916 gen_op_mov_TN_reg(ot, 1, rm);
5917 gen_op_mov_reg_T0(ot, rm);
5918 gen_op_mov_reg_T1(ot, reg);
5919 } else {
5920 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5921 gen_op_mov_TN_reg(ot, 0, reg);
5922 /* for xchg, lock is implicit */
5923 if (!(prefixes & PREFIX_LOCK))
5924 tcg_gen_helper_0_0(helper_lock);
5925 gen_op_ld_T1_A0(ot + s->mem_index);
5926 gen_op_st_T0_A0(ot + s->mem_index);
5927 if (!(prefixes & PREFIX_LOCK))
5928 tcg_gen_helper_0_0(helper_unlock);
5929 gen_op_mov_reg_T1(ot, reg);
5930 }
5931 break;
5932 case 0xc4: /* les Gv */
5933 if (CODE64(s))
5934 goto illegal_op;
5935 op = R_ES;
5936 goto do_lxx;
5937 case 0xc5: /* lds Gv */
5938 if (CODE64(s))
5939 goto illegal_op;
5940 op = R_DS;
5941 goto do_lxx;
5942 case 0x1b2: /* lss Gv */
5943 op = R_SS;
5944 goto do_lxx;
5945 case 0x1b4: /* lfs Gv */
5946 op = R_FS;
5947 goto do_lxx;
5948 case 0x1b5: /* lgs Gv */
5949 op = R_GS;
5950 do_lxx:
5951 ot = dflag ? OT_LONG : OT_WORD;
5952 modrm = ldub_code(s->pc++);
5953 reg = ((modrm >> 3) & 7) | rex_r;
5954 mod = (modrm >> 6) & 3;
5955 if (mod == 3)
5956 goto illegal_op;
5957 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5958 gen_op_ld_T1_A0(ot + s->mem_index);
5959 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5960 /* load the segment first to handle exceptions properly */
5961 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5962 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5963 /* then put the data */
5964 gen_op_mov_reg_T1(ot, reg);
5965 if (s->is_jmp) {
5966 gen_jmp_im(s->pc - s->cs_base);
5967 gen_eob(s);
5968 }
5969 break;
5970
5971 /************************/
5972 /* shifts */
5973 case 0xc0:
5974 case 0xc1:
5975 /* shift Ev,Ib */
5976 shift = 2;
5977 grp2:
5978 {
5979 if ((b & 1) == 0)
5980 ot = OT_BYTE;
5981 else
5982 ot = dflag + OT_WORD;
5983
5984 modrm = ldub_code(s->pc++);
5985 mod = (modrm >> 6) & 3;
5986 op = (modrm >> 3) & 7;
5987
5988 if (mod != 3) {
5989 if (shift == 2) {
5990 s->rip_offset = 1;
5991 }
5992 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5993 opreg = OR_TMP0;
5994 } else {
5995 opreg = (modrm & 7) | REX_B(s);
5996 }
5997
5998 /* simpler op */
5999 if (shift == 0) {
6000 gen_shift(s, op, ot, opreg, OR_ECX);
6001 } else {
6002 if (shift == 2) {
6003 shift = ldub_code(s->pc++);
6004 }
6005 gen_shifti(s, op, ot, opreg, shift);
6006 }
6007 }
6008 break;
6009 case 0xd0:
6010 case 0xd1:
6011 /* shift Ev,1 */
6012 shift = 1;
6013 goto grp2;
6014 case 0xd2:
6015 case 0xd3:
6016 /* shift Ev,cl */
6017 shift = 0;
6018 goto grp2;
6019
6020 case 0x1a4: /* shld imm */
6021 op = 0;
6022 shift = 1;
6023 goto do_shiftd;
6024 case 0x1a5: /* shld cl */
6025 op = 0;
6026 shift = 0;
6027 goto do_shiftd;
6028 case 0x1ac: /* shrd imm */
6029 op = 1;
6030 shift = 1;
6031 goto do_shiftd;
6032 case 0x1ad: /* shrd cl */
6033 op = 1;
6034 shift = 0;
6035 do_shiftd:
6036 ot = dflag + OT_WORD;
6037 modrm = ldub_code(s->pc++);
6038 mod = (modrm >> 6) & 3;
6039 rm = (modrm & 7) | REX_B(s);
6040 reg = ((modrm >> 3) & 7) | rex_r;
6041 if (mod != 3) {
6042 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6043 opreg = OR_TMP0;
6044 } else {
6045 opreg = rm;
6046 }
6047 gen_op_mov_TN_reg(ot, 1, reg);
6048
6049 if (shift) {
6050 val = ldub_code(s->pc++);
6051 tcg_gen_movi_tl(cpu_T3, val);
6052 } else {
6053 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
6054 }
6055 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
6056 break;
6057
6058 /************************/
6059 /* floats */
6060 case 0xd8 ... 0xdf:
6061 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
6062 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
6063 /* XXX: what to do if illegal op ? */
6064 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6065 break;
6066 }
6067 modrm = ldub_code(s->pc++);
6068 mod = (modrm >> 6) & 3;
6069 rm = modrm & 7;
6070 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
6071 if (mod != 3) {
6072 /* memory op */
6073 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6074 switch(op) {
6075 case 0x00 ... 0x07: /* fxxxs */
6076 case 0x10 ... 0x17: /* fixxxl */
6077 case 0x20 ... 0x27: /* fxxxl */
6078 case 0x30 ... 0x37: /* fixxx */
6079 {
6080 int op1;
6081 op1 = op & 7;
6082
6083 switch(op >> 4) {
6084 case 0:
6085 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6086 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6087 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
6088 break;
6089 case 1:
6090 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6091 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6092 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6093 break;
6094 case 2:
6095 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6096 (s->mem_index >> 2) - 1);
6097 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
6098 break;
6099 case 3:
6100 default:
6101 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6102 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6103 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6104 break;
6105 }
6106
6107 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6108 if (op1 == 3) {
6109 /* fcomp needs pop */
6110 tcg_gen_helper_0_0(helper_fpop);
6111 }
6112 }
6113 break;
6114 case 0x08: /* flds */
6115 case 0x0a: /* fsts */
6116 case 0x0b: /* fstps */
6117 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
6118 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
6119 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
6120 switch(op & 7) {
6121 case 0:
6122 switch(op >> 4) {
6123 case 0:
6124 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6125 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6126 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
6127 break;
6128 case 1:
6129 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6130 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6131 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6132 break;
6133 case 2:
6134 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6135 (s->mem_index >> 2) - 1);
6136 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
6137 break;
6138 case 3:
6139 default:
6140 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6141 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6142 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6143 break;
6144 }
6145 break;
6146 case 1:
6147 /* XXX: the corresponding CPUID bit must be tested ! */
6148 switch(op >> 4) {
6149 case 1:
6150 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
6151 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6152 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6153 break;
6154 case 2:
6155 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
6156 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6157 (s->mem_index >> 2) - 1);
6158 break;
6159 case 3:
6160 default:
6161 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
6162 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6163 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6164 break;
6165 }
6166 tcg_gen_helper_0_0(helper_fpop);
6167 break;
6168 default:
6169 switch(op >> 4) {
6170 case 0:
6171 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
6172 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6173 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6174 break;
6175 case 1:
6176 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
6177 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6178 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6179 break;
6180 case 2:
6181 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
6182 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6183 (s->mem_index >> 2) - 1);
6184 break;
6185 case 3:
6186 default:
6187 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
6188 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6189 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6190 break;
6191 }
6192 if ((op & 7) == 3)
6193 tcg_gen_helper_0_0(helper_fpop);
6194 break;
6195 }
6196 break;
6197 case 0x0c: /* fldenv mem */
6198 if (s->cc_op != CC_OP_DYNAMIC)
6199 gen_op_set_cc_op(s->cc_op);
6200 gen_jmp_im(pc_start - s->cs_base);
6201 tcg_gen_helper_0_2(helper_fldenv,
6202 cpu_A0, tcg_const_i32(s->dflag));
6203 break;
6204 case 0x0d: /* fldcw mem */
6205 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
6206 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6207 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
6208 break;
6209 case 0x0e: /* fnstenv mem */
6210 if (s->cc_op != CC_OP_DYNAMIC)
6211 gen_op_set_cc_op(s->cc_op);
6212 gen_jmp_im(pc_start - s->cs_base);
6213 tcg_gen_helper_0_2(helper_fstenv,
6214 cpu_A0, tcg_const_i32(s->dflag));
6215 break;
6216 case 0x0f: /* fnstcw mem */
6217 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
6218 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6219 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6220 break;
6221 case 0x1d: /* fldt mem */
6222 if (s->cc_op != CC_OP_DYNAMIC)
6223 gen_op_set_cc_op(s->cc_op);
6224 gen_jmp_im(pc_start - s->cs_base);
6225 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
6226 break;
6227 case 0x1f: /* fstpt mem */
6228 if (s->cc_op != CC_OP_DYNAMIC)
6229 gen_op_set_cc_op(s->cc_op);
6230 gen_jmp_im(pc_start - s->cs_base);
6231 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
6232 tcg_gen_helper_0_0(helper_fpop);
6233 break;
6234 case 0x2c: /* frstor mem */
6235 if (s->cc_op != CC_OP_DYNAMIC)
6236 gen_op_set_cc_op(s->cc_op);
6237 gen_jmp_im(pc_start - s->cs_base);
6238 tcg_gen_helper_0_2(helper_frstor,
6239 cpu_A0, tcg_const_i32(s->dflag));
6240 break;
6241 case 0x2e: /* fnsave mem */
6242 if (s->cc_op != CC_OP_DYNAMIC)
6243 gen_op_set_cc_op(s->cc_op);
6244 gen_jmp_im(pc_start - s->cs_base);
6245 tcg_gen_helper_0_2(helper_fsave,
6246 cpu_A0, tcg_const_i32(s->dflag));
6247 break;
6248 case 0x2f: /* fnstsw mem */
6249 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6250 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6251 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6252 break;
6253 case 0x3c: /* fbld */
6254 if (s->cc_op != CC_OP_DYNAMIC)
6255 gen_op_set_cc_op(s->cc_op);
6256 gen_jmp_im(pc_start - s->cs_base);
6257 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
6258 break;
6259 case 0x3e: /* fbstp */
6260 if (s->cc_op != CC_OP_DYNAMIC)
6261 gen_op_set_cc_op(s->cc_op);
6262 gen_jmp_im(pc_start - s->cs_base);
6263 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
6264 tcg_gen_helper_0_0(helper_fpop);
6265 break;
6266 case 0x3d: /* fildll */
6267 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6268 (s->mem_index >> 2) - 1);
6269 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
6270 break;
6271 case 0x3f: /* fistpll */
6272 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
6273 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6274 (s->mem_index >> 2) - 1);
6275 tcg_gen_helper_0_0(helper_fpop);
6276 break;
6277 default:
6278 goto illegal_op;
6279 }
6280 } else {
6281 /* register float ops */
6282 opreg = rm;
6283
6284 switch(op) {
6285 case 0x08: /* fld sti */
6286 tcg_gen_helper_0_0(helper_fpush);
6287 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
6288 break;
6289 case 0x09: /* fxchg sti */
6290 case 0x29: /* fxchg4 sti, undocumented op */
6291 case 0x39: /* fxchg7 sti, undocumented op */
6292 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
6293 break;
6294 case 0x0a: /* grp d9/2 */
6295 switch(rm) {
6296 case 0: /* fnop */
6297 /* check exceptions (FreeBSD FPU probe) */
6298 if (s->cc_op != CC_OP_DYNAMIC)
6299 gen_op_set_cc_op(s->cc_op);
6300 gen_jmp_im(pc_start - s->cs_base);
6301 tcg_gen_helper_0_0(helper_fwait);
6302 break;
6303 default:
6304 goto illegal_op;
6305 }
6306 break;
6307 case 0x0c: /* grp d9/4 */
6308 switch(rm) {
6309 case 0: /* fchs */
6310 tcg_gen_helper_0_0(helper_fchs_ST0);
6311 break;
6312 case 1: /* fabs */
6313 tcg_gen_helper_0_0(helper_fabs_ST0);
6314 break;
6315 case 4: /* ftst */
6316 tcg_gen_helper_0_0(helper_fldz_FT0);
6317 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6318 break;
6319 case 5: /* fxam */
6320 tcg_gen_helper_0_0(helper_fxam_ST0);
6321 break;
6322 default:
6323 goto illegal_op;
6324 }
6325 break;
6326 case 0x0d: /* grp d9/5 */
6327 {
6328 switch(rm) {
6329 case 0:
6330 tcg_gen_helper_0_0(helper_fpush);
6331 tcg_gen_helper_0_0(helper_fld1_ST0);
6332 break;
6333 case 1:
6334 tcg_gen_helper_0_0(helper_fpush);
6335 tcg_gen_helper_0_0(helper_fldl2t_ST0);
6336 break;
6337 case 2:
6338 tcg_gen_helper_0_0(helper_fpush);
6339 tcg_gen_helper_0_0(helper_fldl2e_ST0);
6340 break;
6341 case 3:
6342 tcg_gen_helper_0_0(helper_fpush);
6343 tcg_gen_helper_0_0(helper_fldpi_ST0);
6344 break;
6345 case 4:
6346 tcg_gen_helper_0_0(helper_fpush);
6347 tcg_gen_helper_0_0(helper_fldlg2_ST0);
6348 break;
6349 case 5:
6350 tcg_gen_helper_0_0(helper_fpush);
6351 tcg_gen_helper_0_0(helper_fldln2_ST0);
6352 break;
6353 case 6:
6354 tcg_gen_helper_0_0(helper_fpush);
6355 tcg_gen_helper_0_0(helper_fldz_ST0);
6356 break;
6357 default:
6358 goto illegal_op;
6359 }
6360 }
6361 break;
6362 case 0x0e: /* grp d9/6 */
6363 switch(rm) {
6364 case 0: /* f2xm1 */
6365 tcg_gen_helper_0_0(helper_f2xm1);
6366 break;
6367 case 1: /* fyl2x */
6368 tcg_gen_helper_0_0(helper_fyl2x);
6369 break;
6370 case 2: /* fptan */
6371 tcg_gen_helper_0_0(helper_fptan);
6372 break;
6373 case 3: /* fpatan */
6374 tcg_gen_helper_0_0(helper_fpatan);
6375 break;
6376 case 4: /* fxtract */
6377 tcg_gen_helper_0_0(helper_fxtract);
6378 break;
6379 case 5: /* fprem1 */
6380 tcg_gen_helper_0_0(helper_fprem1);
6381 break;
6382 case 6: /* fdecstp */
6383 tcg_gen_helper_0_0(helper_fdecstp);
6384 break;
6385 default:
6386 case 7: /* fincstp */
6387 tcg_gen_helper_0_0(helper_fincstp);
6388 break;
6389 }
6390 break;
6391 case 0x0f: /* grp d9/7 */
6392 switch(rm) {
6393 case 0: /* fprem */
6394 tcg_gen_helper_0_0(helper_fprem);
6395 break;
6396 case 1: /* fyl2xp1 */
6397 tcg_gen_helper_0_0(helper_fyl2xp1);
6398 break;
6399 case 2: /* fsqrt */
6400 tcg_gen_helper_0_0(helper_fsqrt);
6401 break;
6402 case 3: /* fsincos */
6403 tcg_gen_helper_0_0(helper_fsincos);
6404 break;
6405 case 5: /* fscale */
6406 tcg_gen_helper_0_0(helper_fscale);
6407 break;
6408 case 4: /* frndint */
6409 tcg_gen_helper_0_0(helper_frndint);
6410 break;
6411 case 6: /* fsin */
6412 tcg_gen_helper_0_0(helper_fsin);
6413 break;
6414 default:
6415 case 7: /* fcos */
6416 tcg_gen_helper_0_0(helper_fcos);
6417 break;
6418 }
6419 break;
6420 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
6421 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
6422 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
6423 {
6424 int op1;
6425
6426 op1 = op & 7;
6427 if (op >= 0x20) {
6428 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
6429 if (op >= 0x30)
6430 tcg_gen_helper_0_0(helper_fpop);
6431 } else {
6432 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6433 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6434 }
6435 }
6436 break;
6437 case 0x02: /* fcom */
6438 case 0x22: /* fcom2, undocumented op */
6439 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6440 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6441 break;
6442 case 0x03: /* fcomp */
6443 case 0x23: /* fcomp3, undocumented op */
6444 case 0x32: /* fcomp5, undocumented op */
6445 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6446 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6447 tcg_gen_helper_0_0(helper_fpop);
6448 break;
6449 case 0x15: /* da/5 */
6450 switch(rm) {
6451 case 1: /* fucompp */
6452 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6453 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6454 tcg_gen_helper_0_0(helper_fpop);
6455 tcg_gen_helper_0_0(helper_fpop);
6456 break;
6457 default:
6458 goto illegal_op;
6459 }
6460 break;
6461 case 0x1c:
6462 switch(rm) {
6463 case 0: /* feni (287 only, just do nop here) */
6464 break;
6465 case 1: /* fdisi (287 only, just do nop here) */
6466 break;
6467 case 2: /* fclex */
6468 tcg_gen_helper_0_0(helper_fclex);
6469 break;
6470 case 3: /* fninit */
6471 tcg_gen_helper_0_0(helper_fninit);
6472 break;
6473 case 4: /* fsetpm (287 only, just do nop here) */
6474 break;
6475 default:
6476 goto illegal_op;
6477 }
6478 break;
6479 case 0x1d: /* fucomi */
6480 if (s->cc_op != CC_OP_DYNAMIC)
6481 gen_op_set_cc_op(s->cc_op);
6482 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6483 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6484 s->cc_op = CC_OP_EFLAGS;
6485 break;
6486 case 0x1e: /* fcomi */
6487 if (s->cc_op != CC_OP_DYNAMIC)
6488 gen_op_set_cc_op(s->cc_op);
6489 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6490 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6491 s->cc_op = CC_OP_EFLAGS;
6492 break;
6493 case 0x28: /* ffree sti */
6494 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6495 break;
6496 case 0x2a: /* fst sti */
6497 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6498 break;
6499 case 0x2b: /* fstp sti */
6500 case 0x0b: /* fstp1 sti, undocumented op */
6501 case 0x3a: /* fstp8 sti, undocumented op */
6502 case 0x3b: /* fstp9 sti, undocumented op */
6503 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6504 tcg_gen_helper_0_0(helper_fpop);
6505 break;
6506 case 0x2c: /* fucom st(i) */
6507 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6508 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6509 break;
6510 case 0x2d: /* fucomp st(i) */
6511 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6512 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6513 tcg_gen_helper_0_0(helper_fpop);
6514 break;
6515 case 0x33: /* de/3 */
6516 switch(rm) {
6517 case 1: /* fcompp */
6518 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6519 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6520 tcg_gen_helper_0_0(helper_fpop);
6521 tcg_gen_helper_0_0(helper_fpop);
6522 break;
6523 default:
6524 goto illegal_op;
6525 }
6526 break;
6527 case 0x38: /* ffreep sti, undocumented op */
6528 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6529 tcg_gen_helper_0_0(helper_fpop);
6530 break;
6531 case 0x3c: /* df/4 */
6532 switch(rm) {
6533 case 0:
6534 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6535 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6536 gen_op_mov_reg_T0(OT_WORD, R_EAX);
6537 break;
6538 default:
6539 goto illegal_op;
6540 }
6541 break;
6542 case 0x3d: /* fucomip */
6543 if (s->cc_op != CC_OP_DYNAMIC)
6544 gen_op_set_cc_op(s->cc_op);
6545 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6546 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6547 tcg_gen_helper_0_0(helper_fpop);
6548 s->cc_op = CC_OP_EFLAGS;
6549 break;
6550 case 0x3e: /* fcomip */
6551 if (s->cc_op != CC_OP_DYNAMIC)
6552 gen_op_set_cc_op(s->cc_op);
6553 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6554 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6555 tcg_gen_helper_0_0(helper_fpop);
6556 s->cc_op = CC_OP_EFLAGS;
6557 break;
6558 case 0x10 ... 0x13: /* fcmovxx */
6559 case 0x18 ... 0x1b:
6560 {
6561 int op1, l1;
6562 static const uint8_t fcmov_cc[8] = {
6563 (JCC_B << 1),
6564 (JCC_Z << 1),
6565 (JCC_BE << 1),
6566 (JCC_P << 1),
6567 };
6568 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
6569 l1 = gen_new_label();
6570 gen_jcc1(s, s->cc_op, op1, l1);
6571 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
6572 gen_set_label(l1);
6573 }
6574 break;
6575 default:
6576 goto illegal_op;
6577 }
6578 }
6579 break;
6580 /************************/
6581 /* string ops */
6582
6583 case 0xa4: /* movsS */
6584 case 0xa5:
6585 if ((b & 1) == 0)
6586 ot = OT_BYTE;
6587 else
6588 ot = dflag + OT_WORD;
6589
6590 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6591 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6592 } else {
6593 gen_movs(s, ot);
6594 }
6595 break;
6596
6597 case 0xaa: /* stosS */
6598 case 0xab:
6599 if ((b & 1) == 0)
6600 ot = OT_BYTE;
6601 else
6602 ot = dflag + OT_WORD;
6603
6604 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6605 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6606 } else {
6607 gen_stos(s, ot);
6608 }
6609 break;
6610 case 0xac: /* lodsS */
6611 case 0xad:
6612 if ((b & 1) == 0)
6613 ot = OT_BYTE;
6614 else
6615 ot = dflag + OT_WORD;
6616 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6617 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6618 } else {
6619 gen_lods(s, ot);
6620 }
6621 break;
6622 case 0xae: /* scasS */
6623 case 0xaf:
6624 if ((b & 1) == 0)
6625 ot = OT_BYTE;
6626 else
6627 ot = dflag + OT_WORD;
6628 if (prefixes & PREFIX_REPNZ) {
6629 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6630 } else if (prefixes & PREFIX_REPZ) {
6631 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6632 } else {
6633 gen_scas(s, ot);
6634 s->cc_op = CC_OP_SUBB + ot;
6635 }
6636 break;
6637
6638 case 0xa6: /* cmpsS */
6639 case 0xa7:
6640 if ((b & 1) == 0)
6641 ot = OT_BYTE;
6642 else
6643 ot = dflag + OT_WORD;
6644 if (prefixes & PREFIX_REPNZ) {
6645 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6646 } else if (prefixes & PREFIX_REPZ) {
6647 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6648 } else {
6649 gen_cmps(s, ot);
6650 s->cc_op = CC_OP_SUBB + ot;
6651 }
6652 break;
6653 case 0x6c: /* insS */
6654 case 0x6d:
6655 if ((b & 1) == 0)
6656 ot = OT_BYTE;
6657 else
6658 ot = dflag ? OT_LONG : OT_WORD;
6659 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6660 gen_op_andl_T0_ffff();
6661 gen_check_io(s, ot, pc_start - s->cs_base,
6662 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6663 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6664 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6665 } else {
6666 gen_ins(s, ot);
6667 if (use_icount) {
6668 gen_jmp(s, s->pc - s->cs_base);
6669 }
6670 }
6671 break;
6672 case 0x6e: /* outsS */
6673 case 0x6f:
6674 if ((b & 1) == 0)
6675 ot = OT_BYTE;
6676 else
6677 ot = dflag ? OT_LONG : OT_WORD;
6678 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6679 gen_op_andl_T0_ffff();
6680 gen_check_io(s, ot, pc_start - s->cs_base,
6681 svm_is_rep(prefixes) | 4);
6682 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6683 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6684 } else {
6685 gen_outs(s, ot);
6686 if (use_icount) {
6687 gen_jmp(s, s->pc - s->cs_base);
6688 }
6689 }
6690 break;
6691
6692 /************************/
6693 /* port I/O */
6694
6695 case 0xe4:
6696 case 0xe5:
6697 if ((b & 1) == 0)
6698 ot = OT_BYTE;
6699 else
6700 ot = dflag ? OT_LONG : OT_WORD;
6701 val = ldub_code(s->pc++);
6702 gen_op_movl_T0_im(val);
6703 gen_check_io(s, ot, pc_start - s->cs_base,
6704 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6705 if (use_icount)
6706 gen_io_start();
6707 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6708 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6709 gen_op_mov_reg_T1(ot, R_EAX);
6710 if (use_icount) {
6711 gen_io_end();
6712 gen_jmp(s, s->pc - s->cs_base);
6713 }
6714 break;
6715 case 0xe6:
6716 case 0xe7:
6717 if ((b & 1) == 0)
6718 ot = OT_BYTE;
6719 else
6720 ot = dflag ? OT_LONG : OT_WORD;
6721 val = ldub_code(s->pc++);
6722 gen_op_movl_T0_im(val);
6723 gen_check_io(s, ot, pc_start - s->cs_base,
6724 svm_is_rep(prefixes));
6725#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
6726 if (val == 0x80)
6727 break;
6728#endif /* VBOX */
6729 gen_op_mov_TN_reg(ot, 1, R_EAX);
6730
6731 if (use_icount)
6732 gen_io_start();
6733 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6734 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6735 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6736 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6737 if (use_icount) {
6738 gen_io_end();
6739 gen_jmp(s, s->pc - s->cs_base);
6740 }
6741 break;
6742 case 0xec:
6743 case 0xed:
6744 if ((b & 1) == 0)
6745 ot = OT_BYTE;
6746 else
6747 ot = dflag ? OT_LONG : OT_WORD;
6748 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6749 gen_op_andl_T0_ffff();
6750 gen_check_io(s, ot, pc_start - s->cs_base,
6751 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6752 if (use_icount)
6753 gen_io_start();
6754 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6755 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6756 gen_op_mov_reg_T1(ot, R_EAX);
6757 if (use_icount) {
6758 gen_io_end();
6759 gen_jmp(s, s->pc - s->cs_base);
6760 }
6761 break;
6762 case 0xee:
6763 case 0xef:
6764 if ((b & 1) == 0)
6765 ot = OT_BYTE;
6766 else
6767 ot = dflag ? OT_LONG : OT_WORD;
6768 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6769 gen_op_andl_T0_ffff();
6770 gen_check_io(s, ot, pc_start - s->cs_base,
6771 svm_is_rep(prefixes));
6772 gen_op_mov_TN_reg(ot, 1, R_EAX);
6773
6774 if (use_icount)
6775 gen_io_start();
6776 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6777 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6778 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6779 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6780 if (use_icount) {
6781 gen_io_end();
6782 gen_jmp(s, s->pc - s->cs_base);
6783 }
6784 break;
6785
6786 /************************/
6787 /* control */
6788 case 0xc2: /* ret im */
6789 val = ldsw_code(s->pc);
6790 s->pc += 2;
6791 gen_pop_T0(s);
6792 if (CODE64(s) && s->dflag)
6793 s->dflag = 2;
6794 gen_stack_update(s, val + (2 << s->dflag));
6795 if (s->dflag == 0)
6796 gen_op_andl_T0_ffff();
6797 gen_op_jmp_T0();
6798 gen_eob(s);
6799 break;
6800 case 0xc3: /* ret */
6801 gen_pop_T0(s);
6802 gen_pop_update(s);
6803 if (s->dflag == 0)
6804 gen_op_andl_T0_ffff();
6805 gen_op_jmp_T0();
6806 gen_eob(s);
6807 break;
6808 case 0xca: /* lret im */
6809 val = ldsw_code(s->pc);
6810 s->pc += 2;
6811 do_lret:
6812 if (s->pe && !s->vm86) {
6813 if (s->cc_op != CC_OP_DYNAMIC)
6814 gen_op_set_cc_op(s->cc_op);
6815 gen_jmp_im(pc_start - s->cs_base);
6816 tcg_gen_helper_0_2(helper_lret_protected,
6817 tcg_const_i32(s->dflag),
6818 tcg_const_i32(val));
6819 } else {
6820 gen_stack_A0(s);
6821 /* pop offset */
6822 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6823 if (s->dflag == 0)
6824 gen_op_andl_T0_ffff();
6825 /* NOTE: keeping EIP updated is not a problem in case of
6826 exception */
6827 gen_op_jmp_T0();
6828 /* pop selector */
6829 gen_op_addl_A0_im(2 << s->dflag);
6830 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6831 gen_op_movl_seg_T0_vm(R_CS);
6832 /* add stack offset */
6833 gen_stack_update(s, val + (4 << s->dflag));
6834 }
6835 gen_eob(s);
6836 break;
6837 case 0xcb: /* lret */
6838 val = 0;
6839 goto do_lret;
6840 case 0xcf: /* iret */
6841 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6842 if (!s->pe) {
6843 /* real mode */
6844 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6845 s->cc_op = CC_OP_EFLAGS;
6846 } else if (s->vm86) {
6847#ifdef VBOX
6848 if (s->iopl != 3 && (!s->vme || s->dflag)) {
6849#else
6850 if (s->iopl != 3) {
6851#endif
6852 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6853 } else {
6854 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6855 s->cc_op = CC_OP_EFLAGS;
6856 }
6857 } else {
6858 if (s->cc_op != CC_OP_DYNAMIC)
6859 gen_op_set_cc_op(s->cc_op);
6860 gen_jmp_im(pc_start - s->cs_base);
6861 tcg_gen_helper_0_2(helper_iret_protected,
6862 tcg_const_i32(s->dflag),
6863 tcg_const_i32(s->pc - s->cs_base));
6864 s->cc_op = CC_OP_EFLAGS;
6865 }
6866 gen_eob(s);
6867 break;
6868 case 0xe8: /* call im */
6869 {
6870 if (dflag)
6871 tval = (int32_t)insn_get(s, OT_LONG);
6872 else
6873 tval = (int16_t)insn_get(s, OT_WORD);
6874 next_eip = s->pc - s->cs_base;
6875 tval += next_eip;
6876 if (s->dflag == 0)
6877 tval &= 0xffff;
6878 gen_movtl_T0_im(next_eip);
6879 gen_push_T0(s);
6880 gen_jmp(s, tval);
6881 }
6882 break;
6883 case 0x9a: /* lcall im */
6884 {
6885 unsigned int selector, offset;
6886
6887 if (CODE64(s))
6888 goto illegal_op;
6889 ot = dflag ? OT_LONG : OT_WORD;
6890 offset = insn_get(s, ot);
6891 selector = insn_get(s, OT_WORD);
6892
6893 gen_op_movl_T0_im(selector);
6894 gen_op_movl_T1_imu(offset);
6895 }
6896 goto do_lcall;
6897 case 0xe9: /* jmp im */
6898 if (dflag)
6899 tval = (int32_t)insn_get(s, OT_LONG);
6900 else
6901 tval = (int16_t)insn_get(s, OT_WORD);
6902 tval += s->pc - s->cs_base;
6903 if (s->dflag == 0)
6904 tval &= 0xffff;
6905 else if(!CODE64(s))
6906 tval &= 0xffffffff;
6907 gen_jmp(s, tval);
6908 break;
6909 case 0xea: /* ljmp im */
6910 {
6911 unsigned int selector, offset;
6912
6913 if (CODE64(s))
6914 goto illegal_op;
6915 ot = dflag ? OT_LONG : OT_WORD;
6916 offset = insn_get(s, ot);
6917 selector = insn_get(s, OT_WORD);
6918
6919 gen_op_movl_T0_im(selector);
6920 gen_op_movl_T1_imu(offset);
6921 }
6922 goto do_ljmp;
6923 case 0xeb: /* jmp Jb */
6924 tval = (int8_t)insn_get(s, OT_BYTE);
6925 tval += s->pc - s->cs_base;
6926 if (s->dflag == 0)
6927 tval &= 0xffff;
6928 gen_jmp(s, tval);
6929 break;
6930 case 0x70 ... 0x7f: /* jcc Jb */
6931 tval = (int8_t)insn_get(s, OT_BYTE);
6932 goto do_jcc;
6933 case 0x180 ... 0x18f: /* jcc Jv */
6934 if (dflag) {
6935 tval = (int32_t)insn_get(s, OT_LONG);
6936 } else {
6937 tval = (int16_t)insn_get(s, OT_WORD);
6938 }
6939 do_jcc:
6940 next_eip = s->pc - s->cs_base;
6941 tval += next_eip;
6942 if (s->dflag == 0)
6943 tval &= 0xffff;
6944 gen_jcc(s, b, tval, next_eip);
6945 break;
6946
6947 case 0x190 ... 0x19f: /* setcc Gv */
6948 modrm = ldub_code(s->pc++);
6949 gen_setcc(s, b);
6950 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6951 break;
6952 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6953 {
6954 int l1;
6955 TCGv t0;
6956
6957 ot = dflag + OT_WORD;
6958 modrm = ldub_code(s->pc++);
6959 reg = ((modrm >> 3) & 7) | rex_r;
6960 mod = (modrm >> 6) & 3;
6961 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6962 if (mod != 3) {
6963 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6964 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6965 } else {
6966 rm = (modrm & 7) | REX_B(s);
6967 gen_op_mov_v_reg(ot, t0, rm);
6968 }
6969#ifdef TARGET_X86_64
6970 if (ot == OT_LONG) {
6971 /* XXX: specific Intel behaviour ? */
6972 l1 = gen_new_label();
6973 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6974 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
6975 gen_set_label(l1);
6976 tcg_gen_movi_tl(cpu_tmp0, 0);
6977 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
6978 } else
6979#endif
6980 {
6981 l1 = gen_new_label();
6982 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6983 gen_op_mov_reg_v(ot, reg, t0);
6984 gen_set_label(l1);
6985 }
6986 tcg_temp_free(t0);
6987 }
6988 break;
6989
6990 /************************/
6991 /* flags */
6992 case 0x9c: /* pushf */
6993 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
6994#ifdef VBOX
6995 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6996#else
6997 if (s->vm86 && s->iopl != 3) {
6998#endif
6999 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7000 } else {
7001 if (s->cc_op != CC_OP_DYNAMIC)
7002 gen_op_set_cc_op(s->cc_op);
7003#ifdef VBOX
7004 if (s->vm86 && s->vme && s->iopl != 3)
7005 tcg_gen_helper_1_0(helper_read_eflags_vme, cpu_T[0]);
7006 else
7007#endif
7008 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
7009 gen_push_T0(s);
7010 }
7011 break;
7012 case 0x9d: /* popf */
7013 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
7014#ifdef VBOX
7015 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
7016#else
7017 if (s->vm86 && s->iopl != 3) {
7018#endif
7019 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7020 } else {
7021 gen_pop_T0(s);
7022 if (s->cpl == 0) {
7023 if (s->dflag) {
7024 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7025 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
7026 } else {
7027 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7028 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
7029 }
7030 } else {
7031 if (s->cpl <= s->iopl) {
7032 if (s->dflag) {
7033 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7034 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
7035 } else {
7036 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7037 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
7038 }
7039 } else {
7040 if (s->dflag) {
7041 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7042 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
7043 } else {
7044#ifdef VBOX
7045 if (s->vm86 && s->vme)
7046 tcg_gen_helper_0_1(helper_write_eflags_vme, cpu_T[0]);
7047 else
7048#endif
7049 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7050 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
7051 }
7052 }
7053 }
7054 gen_pop_update(s);
7055 s->cc_op = CC_OP_EFLAGS;
7056 /* abort translation because TF flag may change */
7057 gen_jmp_im(s->pc - s->cs_base);
7058 gen_eob(s);
7059 }
7060 break;
7061 case 0x9e: /* sahf */
7062 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7063 goto illegal_op;
7064 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
7065 if (s->cc_op != CC_OP_DYNAMIC)
7066 gen_op_set_cc_op(s->cc_op);
7067 gen_compute_eflags(cpu_cc_src);
7068 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
7069 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
7070 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
7071 s->cc_op = CC_OP_EFLAGS;
7072 break;
7073 case 0x9f: /* lahf */
7074 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7075 goto illegal_op;
7076 if (s->cc_op != CC_OP_DYNAMIC)
7077 gen_op_set_cc_op(s->cc_op);
7078 gen_compute_eflags(cpu_T[0]);
7079 /* Note: gen_compute_eflags() only gives the condition codes */
7080 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
7081 gen_op_mov_reg_T0(OT_BYTE, R_AH);
7082 break;
7083 case 0xf5: /* cmc */
7084 if (s->cc_op != CC_OP_DYNAMIC)
7085 gen_op_set_cc_op(s->cc_op);
7086 gen_compute_eflags(cpu_cc_src);
7087 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7088 s->cc_op = CC_OP_EFLAGS;
7089 break;
7090 case 0xf8: /* clc */
7091 if (s->cc_op != CC_OP_DYNAMIC)
7092 gen_op_set_cc_op(s->cc_op);
7093 gen_compute_eflags(cpu_cc_src);
7094 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
7095 s->cc_op = CC_OP_EFLAGS;
7096 break;
7097 case 0xf9: /* stc */
7098 if (s->cc_op != CC_OP_DYNAMIC)
7099 gen_op_set_cc_op(s->cc_op);
7100 gen_compute_eflags(cpu_cc_src);
7101 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7102 s->cc_op = CC_OP_EFLAGS;
7103 break;
7104 case 0xfc: /* cld */
7105 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
7106 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7107 break;
7108 case 0xfd: /* std */
7109 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
7110 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7111 break;
7112
7113 /************************/
7114 /* bit operations */
7115 case 0x1ba: /* bt/bts/btr/btc Gv, im */
7116 ot = dflag + OT_WORD;
7117 modrm = ldub_code(s->pc++);
7118 op = (modrm >> 3) & 7;
7119 mod = (modrm >> 6) & 3;
7120 rm = (modrm & 7) | REX_B(s);
7121 if (mod != 3) {
7122 s->rip_offset = 1;
7123 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7124 gen_op_ld_T0_A0(ot + s->mem_index);
7125 } else {
7126 gen_op_mov_TN_reg(ot, 0, rm);
7127 }
7128 /* load shift */
7129 val = ldub_code(s->pc++);
7130 gen_op_movl_T1_im(val);
7131 if (op < 4)
7132 goto illegal_op;
7133 op -= 4;
7134 goto bt_op;
7135 case 0x1a3: /* bt Gv, Ev */
7136 op = 0;
7137 goto do_btx;
7138 case 0x1ab: /* bts */
7139 op = 1;
7140 goto do_btx;
7141 case 0x1b3: /* btr */
7142 op = 2;
7143 goto do_btx;
7144 case 0x1bb: /* btc */
7145 op = 3;
7146 do_btx:
7147 ot = dflag + OT_WORD;
7148 modrm = ldub_code(s->pc++);
7149 reg = ((modrm >> 3) & 7) | rex_r;
7150 mod = (modrm >> 6) & 3;
7151 rm = (modrm & 7) | REX_B(s);
7152 gen_op_mov_TN_reg(OT_LONG, 1, reg);
7153 if (mod != 3) {
7154 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7155 /* specific case: we need to add a displacement */
7156 gen_exts(ot, cpu_T[1]);
7157 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
7158 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
7159 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
7160 gen_op_ld_T0_A0(ot + s->mem_index);
7161 } else {
7162 gen_op_mov_TN_reg(ot, 0, rm);
7163 }
7164 bt_op:
7165 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
7166 switch(op) {
7167 case 0:
7168 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
7169 tcg_gen_movi_tl(cpu_cc_dst, 0);
7170 break;
7171 case 1:
7172 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7173 tcg_gen_movi_tl(cpu_tmp0, 1);
7174 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7175 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7176 break;
7177 case 2:
7178 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7179 tcg_gen_movi_tl(cpu_tmp0, 1);
7180 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7181 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
7182 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7183 break;
7184 default:
7185 case 3:
7186 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7187 tcg_gen_movi_tl(cpu_tmp0, 1);
7188 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7189 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7190 break;
7191 }
7192 s->cc_op = CC_OP_SARB + ot;
7193 if (op != 0) {
7194 if (mod != 3)
7195 gen_op_st_T0_A0(ot + s->mem_index);
7196 else
7197 gen_op_mov_reg_T0(ot, rm);
7198 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
7199 tcg_gen_movi_tl(cpu_cc_dst, 0);
7200 }
7201 break;
7202 case 0x1bc: /* bsf */
7203 case 0x1bd: /* bsr */
7204 {
7205 int label1;
7206 TCGv t0;
7207
7208 ot = dflag + OT_WORD;
7209 modrm = ldub_code(s->pc++);
7210 reg = ((modrm >> 3) & 7) | rex_r;
7211 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7212 gen_extu(ot, cpu_T[0]);
7213 label1 = gen_new_label();
7214 tcg_gen_movi_tl(cpu_cc_dst, 0);
7215 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7216 tcg_gen_mov_tl(t0, cpu_T[0]);
7217 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
7218 if (b & 1) {
7219 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
7220 } else {
7221 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
7222 }
7223 gen_op_mov_reg_T0(ot, reg);
7224 tcg_gen_movi_tl(cpu_cc_dst, 1);
7225 gen_set_label(label1);
7226 tcg_gen_discard_tl(cpu_cc_src);
7227 s->cc_op = CC_OP_LOGICB + ot;
7228 tcg_temp_free(t0);
7229 }
7230 break;
7231 /************************/
7232 /* bcd */
7233 case 0x27: /* daa */
7234 if (CODE64(s))
7235 goto illegal_op;
7236 if (s->cc_op != CC_OP_DYNAMIC)
7237 gen_op_set_cc_op(s->cc_op);
7238 tcg_gen_helper_0_0(helper_daa);
7239 s->cc_op = CC_OP_EFLAGS;
7240 break;
7241 case 0x2f: /* das */
7242 if (CODE64(s))
7243 goto illegal_op;
7244 if (s->cc_op != CC_OP_DYNAMIC)
7245 gen_op_set_cc_op(s->cc_op);
7246 tcg_gen_helper_0_0(helper_das);
7247 s->cc_op = CC_OP_EFLAGS;
7248 break;
7249 case 0x37: /* aaa */
7250 if (CODE64(s))
7251 goto illegal_op;
7252 if (s->cc_op != CC_OP_DYNAMIC)
7253 gen_op_set_cc_op(s->cc_op);
7254 tcg_gen_helper_0_0(helper_aaa);
7255 s->cc_op = CC_OP_EFLAGS;
7256 break;
7257 case 0x3f: /* aas */
7258 if (CODE64(s))
7259 goto illegal_op;
7260 if (s->cc_op != CC_OP_DYNAMIC)
7261 gen_op_set_cc_op(s->cc_op);
7262 tcg_gen_helper_0_0(helper_aas);
7263 s->cc_op = CC_OP_EFLAGS;
7264 break;
7265 case 0xd4: /* aam */
7266 if (CODE64(s))
7267 goto illegal_op;
7268 val = ldub_code(s->pc++);
7269 if (val == 0) {
7270 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
7271 } else {
7272 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
7273 s->cc_op = CC_OP_LOGICB;
7274 }
7275 break;
7276 case 0xd5: /* aad */
7277 if (CODE64(s))
7278 goto illegal_op;
7279 val = ldub_code(s->pc++);
7280 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
7281 s->cc_op = CC_OP_LOGICB;
7282 break;
7283 /************************/
7284 /* misc */
7285 case 0x90: /* nop */
7286 /* XXX: xchg + rex handling */
7287 /* XXX: correct lock test for all insn */
7288 if (prefixes & PREFIX_LOCK)
7289 goto illegal_op;
7290 if (prefixes & PREFIX_REPZ) {
7291 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
7292 }
7293 break;
7294 case 0x9b: /* fwait */
7295 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
7296 (HF_MP_MASK | HF_TS_MASK)) {
7297 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7298 } else {
7299 if (s->cc_op != CC_OP_DYNAMIC)
7300 gen_op_set_cc_op(s->cc_op);
7301 gen_jmp_im(pc_start - s->cs_base);
7302 tcg_gen_helper_0_0(helper_fwait);
7303 }
7304 break;
7305 case 0xcc: /* int3 */
7306#ifdef VBOX
7307 if (s->vm86 && s->iopl != 3 && !s->vme) {
7308 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7309 } else
7310#endif
7311 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
7312 break;
7313 case 0xcd: /* int N */
7314 val = ldub_code(s->pc++);
7315#ifdef VBOX
7316 if (s->vm86 && s->iopl != 3 && !s->vme) {
7317#else
7318 if (s->vm86 && s->iopl != 3) {
7319#endif
7320 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7321 } else {
7322 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
7323 }
7324 break;
7325 case 0xce: /* into */
7326 if (CODE64(s))
7327 goto illegal_op;
7328 if (s->cc_op != CC_OP_DYNAMIC)
7329 gen_op_set_cc_op(s->cc_op);
7330 gen_jmp_im(pc_start - s->cs_base);
7331 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
7332 break;
7333 case 0xf1: /* icebp (undocumented, exits to external debugger) */
7334 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
7335#if 1
7336 gen_debug(s, pc_start - s->cs_base);
7337#else
7338 /* start debug */
7339 tb_flush(cpu_single_env);
7340 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
7341#endif
7342 break;
7343 case 0xfa: /* cli */
7344 if (!s->vm86) {
7345 if (s->cpl <= s->iopl) {
7346 tcg_gen_helper_0_0(helper_cli);
7347 } else {
7348 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7349 }
7350 } else {
7351 if (s->iopl == 3) {
7352 tcg_gen_helper_0_0(helper_cli);
7353#ifdef VBOX
7354 } else if (s->iopl != 3 && s->vme) {
7355 tcg_gen_helper_0_0(helper_cli_vme);
7356#endif
7357 } else {
7358 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7359 }
7360 }
7361 break;
7362 case 0xfb: /* sti */
7363 if (!s->vm86) {
7364 if (s->cpl <= s->iopl) {
7365 gen_sti:
7366 tcg_gen_helper_0_0(helper_sti);
7367 /* interruptions are enabled only the first insn after sti */
7368 /* If several instructions disable interrupts, only the
7369 _first_ does it */
7370 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
7371 tcg_gen_helper_0_0(helper_set_inhibit_irq);
7372 /* give a chance to handle pending irqs */
7373 gen_jmp_im(s->pc - s->cs_base);
7374 gen_eob(s);
7375 } else {
7376 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7377 }
7378 } else {
7379 if (s->iopl == 3) {
7380 goto gen_sti;
7381#ifdef VBOX
7382 } else if (s->iopl != 3 && s->vme) {
7383 tcg_gen_helper_0_0(helper_sti_vme);
7384 /* give a chance to handle pending irqs */
7385 gen_jmp_im(s->pc - s->cs_base);
7386 gen_eob(s);
7387#endif
7388 } else {
7389 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7390 }
7391 }
7392 break;
7393 case 0x62: /* bound */
7394 if (CODE64(s))
7395 goto illegal_op;
7396 ot = dflag ? OT_LONG : OT_WORD;
7397 modrm = ldub_code(s->pc++);
7398 reg = (modrm >> 3) & 7;
7399 mod = (modrm >> 6) & 3;
7400 if (mod == 3)
7401 goto illegal_op;
7402 gen_op_mov_TN_reg(ot, 0, reg);
7403 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7404 gen_jmp_im(pc_start - s->cs_base);
7405 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7406 if (ot == OT_WORD)
7407 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
7408 else
7409 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
7410 break;
7411 case 0x1c8 ... 0x1cf: /* bswap reg */
7412 reg = (b & 7) | REX_B(s);
7413#ifdef TARGET_X86_64
7414 if (dflag == 2) {
7415 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
7416 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
7417 gen_op_mov_reg_T0(OT_QUAD, reg);
7418 } else
7419 {
7420 TCGv tmp0;
7421 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7422
7423 tmp0 = tcg_temp_new(TCG_TYPE_I32);
7424 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
7425 tcg_gen_bswap_i32(tmp0, tmp0);
7426 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
7427 gen_op_mov_reg_T0(OT_LONG, reg);
7428 }
7429#else
7430 {
7431 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7432 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
7433 gen_op_mov_reg_T0(OT_LONG, reg);
7434 }
7435#endif
7436 break;
7437 case 0xd6: /* salc */
7438 if (CODE64(s))
7439 goto illegal_op;
7440 if (s->cc_op != CC_OP_DYNAMIC)
7441 gen_op_set_cc_op(s->cc_op);
7442 gen_compute_eflags_c(cpu_T[0]);
7443 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
7444 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
7445 break;
7446 case 0xe0: /* loopnz */
7447 case 0xe1: /* loopz */
7448 case 0xe2: /* loop */
7449 case 0xe3: /* jecxz */
7450 {
7451 int l1, l2, l3;
7452
7453 tval = (int8_t)insn_get(s, OT_BYTE);
7454 next_eip = s->pc - s->cs_base;
7455 tval += next_eip;
7456 if (s->dflag == 0)
7457 tval &= 0xffff;
7458
7459 l1 = gen_new_label();
7460 l2 = gen_new_label();
7461 l3 = gen_new_label();
7462 b &= 3;
7463 switch(b) {
7464 case 0: /* loopnz */
7465 case 1: /* loopz */
7466 if (s->cc_op != CC_OP_DYNAMIC)
7467 gen_op_set_cc_op(s->cc_op);
7468 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7469 gen_op_jz_ecx(s->aflag, l3);
7470 gen_compute_eflags(cpu_tmp0);
7471 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
7472 if (b == 0) {
7473 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
7474 } else {
7475 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
7476 }
7477 break;
7478 case 2: /* loop */
7479 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7480 gen_op_jnz_ecx(s->aflag, l1);
7481 break;
7482 default:
7483 case 3: /* jcxz */
7484 gen_op_jz_ecx(s->aflag, l1);
7485 break;
7486 }
7487
7488 gen_set_label(l3);
7489 gen_jmp_im(next_eip);
7490 tcg_gen_br(l2);
7491
7492 gen_set_label(l1);
7493 gen_jmp_im(tval);
7494 gen_set_label(l2);
7495 gen_eob(s);
7496 }
7497 break;
7498 case 0x130: /* wrmsr */
7499 case 0x132: /* rdmsr */
7500 if (s->cpl != 0) {
7501 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7502 } else {
7503 if (s->cc_op != CC_OP_DYNAMIC)
7504 gen_op_set_cc_op(s->cc_op);
7505 gen_jmp_im(pc_start - s->cs_base);
7506 if (b & 2) {
7507 tcg_gen_helper_0_0(helper_rdmsr);
7508 } else {
7509 tcg_gen_helper_0_0(helper_wrmsr);
7510 }
7511 }
7512 break;
7513 case 0x131: /* rdtsc */
7514 if (s->cc_op != CC_OP_DYNAMIC)
7515 gen_op_set_cc_op(s->cc_op);
7516 gen_jmp_im(pc_start - s->cs_base);
7517 if (use_icount)
7518 gen_io_start();
7519 tcg_gen_helper_0_0(helper_rdtsc);
7520 if (use_icount) {
7521 gen_io_end();
7522 gen_jmp(s, s->pc - s->cs_base);
7523 }
7524 break;
7525 case 0x133: /* rdpmc */
7526 if (s->cc_op != CC_OP_DYNAMIC)
7527 gen_op_set_cc_op(s->cc_op);
7528 gen_jmp_im(pc_start - s->cs_base);
7529 tcg_gen_helper_0_0(helper_rdpmc);
7530 break;
7531 case 0x134: /* sysenter */
7532#ifndef VBOX
7533 /* For Intel SYSENTER is valid on 64-bit */
7534 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7535#else
7536 /** @todo: make things right */
7537 if (CODE64(s))
7538#endif
7539 goto illegal_op;
7540 if (!s->pe) {
7541 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7542 } else {
7543 if (s->cc_op != CC_OP_DYNAMIC) {
7544 gen_op_set_cc_op(s->cc_op);
7545 s->cc_op = CC_OP_DYNAMIC;
7546 }
7547 gen_jmp_im(pc_start - s->cs_base);
7548 tcg_gen_helper_0_0(helper_sysenter);
7549 gen_eob(s);
7550 }
7551 break;
7552 case 0x135: /* sysexit */
7553#ifndef VBOX
7554 /* For Intel SYSEXIT is valid on 64-bit */
7555 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7556#else
7557 /** @todo: make things right */
7558 if (CODE64(s))
7559#endif
7560 goto illegal_op;
7561 if (!s->pe) {
7562 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7563 } else {
7564 if (s->cc_op != CC_OP_DYNAMIC) {
7565 gen_op_set_cc_op(s->cc_op);
7566 s->cc_op = CC_OP_DYNAMIC;
7567 }
7568 gen_jmp_im(pc_start - s->cs_base);
7569 tcg_gen_helper_0_1(helper_sysexit, tcg_const_i32(dflag));
7570 gen_eob(s);
7571 }
7572 break;
7573#ifdef TARGET_X86_64
7574 case 0x105: /* syscall */
7575 /* XXX: is it usable in real mode ? */
7576 if (s->cc_op != CC_OP_DYNAMIC) {
7577 gen_op_set_cc_op(s->cc_op);
7578 s->cc_op = CC_OP_DYNAMIC;
7579 }
7580 gen_jmp_im(pc_start - s->cs_base);
7581 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
7582 gen_eob(s);
7583 break;
7584 case 0x107: /* sysret */
7585 if (!s->pe) {
7586 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7587 } else {
7588 if (s->cc_op != CC_OP_DYNAMIC) {
7589 gen_op_set_cc_op(s->cc_op);
7590 s->cc_op = CC_OP_DYNAMIC;
7591 }
7592 gen_jmp_im(pc_start - s->cs_base);
7593 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
7594 /* condition codes are modified only in long mode */
7595 if (s->lma)
7596 s->cc_op = CC_OP_EFLAGS;
7597 gen_eob(s);
7598 }
7599 break;
7600#endif
7601 case 0x1a2: /* cpuid */
7602 if (s->cc_op != CC_OP_DYNAMIC)
7603 gen_op_set_cc_op(s->cc_op);
7604 gen_jmp_im(pc_start - s->cs_base);
7605 tcg_gen_helper_0_0(helper_cpuid);
7606 break;
7607 case 0xf4: /* hlt */
7608 if (s->cpl != 0) {
7609 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7610 } else {
7611 if (s->cc_op != CC_OP_DYNAMIC)
7612 gen_op_set_cc_op(s->cc_op);
7613 gen_jmp_im(pc_start - s->cs_base);
7614 tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
7615 s->is_jmp = 3;
7616 }
7617 break;
7618 case 0x100:
7619 modrm = ldub_code(s->pc++);
7620 mod = (modrm >> 6) & 3;
7621 op = (modrm >> 3) & 7;
7622 switch(op) {
7623 case 0: /* sldt */
7624 if (!s->pe || s->vm86)
7625 goto illegal_op;
7626 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
7627 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
7628 ot = OT_WORD;
7629 if (mod == 3)
7630 ot += s->dflag;
7631 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7632 break;
7633 case 2: /* lldt */
7634 if (!s->pe || s->vm86)
7635 goto illegal_op;
7636 if (s->cpl != 0) {
7637 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7638 } else {
7639 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
7640 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7641 gen_jmp_im(pc_start - s->cs_base);
7642 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7643 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
7644 }
7645 break;
7646 case 1: /* str */
7647 if (!s->pe || s->vm86)
7648 goto illegal_op;
7649 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
7650 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
7651 ot = OT_WORD;
7652 if (mod == 3)
7653 ot += s->dflag;
7654 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7655 break;
7656 case 3: /* ltr */
7657 if (!s->pe || s->vm86)
7658 goto illegal_op;
7659 if (s->cpl != 0) {
7660 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7661 } else {
7662 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
7663 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7664 gen_jmp_im(pc_start - s->cs_base);
7665 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7666 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
7667 }
7668 break;
7669 case 4: /* verr */
7670 case 5: /* verw */
7671 if (!s->pe || s->vm86)
7672 goto illegal_op;
7673 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7674 if (s->cc_op != CC_OP_DYNAMIC)
7675 gen_op_set_cc_op(s->cc_op);
7676 if (op == 4)
7677 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
7678 else
7679 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
7680 s->cc_op = CC_OP_EFLAGS;
7681 break;
7682 default:
7683 goto illegal_op;
7684 }
7685 break;
7686 case 0x101:
7687 modrm = ldub_code(s->pc++);
7688 mod = (modrm >> 6) & 3;
7689 op = (modrm >> 3) & 7;
7690 rm = modrm & 7;
7691
7692#ifdef VBOX
7693 /* 0f 01 f9 */
7694 if (modrm == 0xf9)
7695 {
7696 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP))
7697 goto illegal_op;
7698 gen_jmp_im(pc_start - s->cs_base);
7699 tcg_gen_helper_0_0(helper_rdtscp);
7700 break;
7701 }
7702#endif
7703 switch(op) {
7704 case 0: /* sgdt */
7705 if (mod == 3)
7706 goto illegal_op;
7707 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
7708 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7709 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
7710 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7711 gen_add_A0_im(s, 2);
7712 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
7713 if (!s->dflag)
7714 gen_op_andl_T0_im(0xffffff);
7715 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7716 break;
7717 case 1:
7718 if (mod == 3) {
7719 switch (rm) {
7720 case 0: /* monitor */
7721 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7722 s->cpl != 0)
7723 goto illegal_op;
7724 if (s->cc_op != CC_OP_DYNAMIC)
7725 gen_op_set_cc_op(s->cc_op);
7726 gen_jmp_im(pc_start - s->cs_base);
7727#ifdef TARGET_X86_64
7728 if (s->aflag == 2) {
7729 gen_op_movq_A0_reg(R_EAX);
7730 } else
7731#endif
7732 {
7733 gen_op_movl_A0_reg(R_EAX);
7734 if (s->aflag == 0)
7735 gen_op_andl_A0_ffff();
7736 }
7737 gen_add_A0_ds_seg(s);
7738 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
7739 break;
7740 case 1: /* mwait */
7741 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7742 s->cpl != 0)
7743 goto illegal_op;
7744 if (s->cc_op != CC_OP_DYNAMIC) {
7745 gen_op_set_cc_op(s->cc_op);
7746 s->cc_op = CC_OP_DYNAMIC;
7747 }
7748 gen_jmp_im(pc_start - s->cs_base);
7749 tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
7750 gen_eob(s);
7751 break;
7752 default:
7753 goto illegal_op;
7754 }
7755 } else { /* sidt */
7756 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7757 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7758 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7759 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7760 gen_add_A0_im(s, 2);
7761 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7762 if (!s->dflag)
7763 gen_op_andl_T0_im(0xffffff);
7764 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7765 }
7766 break;
7767 case 2: /* lgdt */
7768 case 3: /* lidt */
7769 if (mod == 3) {
7770 if (s->cc_op != CC_OP_DYNAMIC)
7771 gen_op_set_cc_op(s->cc_op);
7772 gen_jmp_im(pc_start - s->cs_base);
7773 switch(rm) {
7774 case 0: /* VMRUN */
7775 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7776 goto illegal_op;
7777 if (s->cpl != 0) {
7778 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7779 break;
7780 } else {
7781 tcg_gen_helper_0_2(helper_vmrun,
7782 tcg_const_i32(s->aflag),
7783 tcg_const_i32(s->pc - pc_start));
7784 tcg_gen_exit_tb(0);
7785 s->is_jmp = 3;
7786 }
7787 break;
7788 case 1: /* VMMCALL */
7789 if (!(s->flags & HF_SVME_MASK))
7790 goto illegal_op;
7791 tcg_gen_helper_0_0(helper_vmmcall);
7792 break;
7793 case 2: /* VMLOAD */
7794 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7795 goto illegal_op;
7796 if (s->cpl != 0) {
7797 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7798 break;
7799 } else {
7800 tcg_gen_helper_0_1(helper_vmload,
7801 tcg_const_i32(s->aflag));
7802 }
7803 break;
7804 case 3: /* VMSAVE */
7805 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7806 goto illegal_op;
7807 if (s->cpl != 0) {
7808 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7809 break;
7810 } else {
7811 tcg_gen_helper_0_1(helper_vmsave,
7812 tcg_const_i32(s->aflag));
7813 }
7814 break;
7815 case 4: /* STGI */
7816 if ((!(s->flags & HF_SVME_MASK) &&
7817 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7818 !s->pe)
7819 goto illegal_op;
7820 if (s->cpl != 0) {
7821 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7822 break;
7823 } else {
7824 tcg_gen_helper_0_0(helper_stgi);
7825 }
7826 break;
7827 case 5: /* CLGI */
7828 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7829 goto illegal_op;
7830 if (s->cpl != 0) {
7831 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7832 break;
7833 } else {
7834 tcg_gen_helper_0_0(helper_clgi);
7835 }
7836 break;
7837 case 6: /* SKINIT */
7838 if ((!(s->flags & HF_SVME_MASK) &&
7839 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7840 !s->pe)
7841 goto illegal_op;
7842 tcg_gen_helper_0_0(helper_skinit);
7843 break;
7844 case 7: /* INVLPGA */
7845 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7846 goto illegal_op;
7847 if (s->cpl != 0) {
7848 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7849 break;
7850 } else {
7851 tcg_gen_helper_0_1(helper_invlpga,
7852 tcg_const_i32(s->aflag));
7853 }
7854 break;
7855 default:
7856 goto illegal_op;
7857 }
7858 } else if (s->cpl != 0) {
7859 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7860 } else {
7861 gen_svm_check_intercept(s, pc_start,
7862 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7863 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7864 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7865 gen_add_A0_im(s, 2);
7866 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7867 if (!s->dflag)
7868 gen_op_andl_T0_im(0xffffff);
7869 if (op == 2) {
7870 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7871 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7872 } else {
7873 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7874 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7875 }
7876 }
7877 break;
7878 case 4: /* smsw */
7879 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7880 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7881 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7882 break;
7883 case 6: /* lmsw */
7884 if (s->cpl != 0) {
7885 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7886 } else {
7887 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7888 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7889 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
7890 gen_jmp_im(s->pc - s->cs_base);
7891 gen_eob(s);
7892 }
7893 break;
7894 case 7: /* invlpg */
7895 if (s->cpl != 0) {
7896 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7897 } else {
7898 if (mod == 3) {
7899#ifdef TARGET_X86_64
7900 if (CODE64(s) && rm == 0) {
7901 /* swapgs */
7902 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7903 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
7904 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7905 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
7906 } else
7907#endif
7908 {
7909 goto illegal_op;
7910 }
7911 } else {
7912 if (s->cc_op != CC_OP_DYNAMIC)
7913 gen_op_set_cc_op(s->cc_op);
7914 gen_jmp_im(pc_start - s->cs_base);
7915 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7916 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
7917 gen_jmp_im(s->pc - s->cs_base);
7918 gen_eob(s);
7919 }
7920 }
7921 break;
7922 default:
7923 goto illegal_op;
7924 }
7925 break;
7926 case 0x108: /* invd */
7927 case 0x109: /* wbinvd */
7928 if (s->cpl != 0) {
7929 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7930 } else {
7931 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7932 /* nothing to do */
7933 }
7934 break;
7935 case 0x63: /* arpl or movslS (x86_64) */
7936#ifdef TARGET_X86_64
7937 if (CODE64(s)) {
7938 int d_ot;
7939 /* d_ot is the size of destination */
7940 d_ot = dflag + OT_WORD;
7941
7942 modrm = ldub_code(s->pc++);
7943 reg = ((modrm >> 3) & 7) | rex_r;
7944 mod = (modrm >> 6) & 3;
7945 rm = (modrm & 7) | REX_B(s);
7946
7947 if (mod == 3) {
7948 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7949 /* sign extend */
7950 if (d_ot == OT_QUAD)
7951 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7952 gen_op_mov_reg_T0(d_ot, reg);
7953 } else {
7954 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7955 if (d_ot == OT_QUAD) {
7956 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7957 } else {
7958 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7959 }
7960 gen_op_mov_reg_T0(d_ot, reg);
7961 }
7962 } else
7963#endif
7964 {
7965 int label1;
7966 TCGv t0, t1, t2, a0;
7967
7968 if (!s->pe || s->vm86)
7969 goto illegal_op;
7970
7971 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7972 t1 = tcg_temp_local_new(TCG_TYPE_TL);
7973 t2 = tcg_temp_local_new(TCG_TYPE_TL);
7974#ifdef VBOX
7975 a0 = tcg_temp_local_new(TCG_TYPE_TL);
7976#endif
7977 ot = OT_WORD;
7978 modrm = ldub_code(s->pc++);
7979 reg = (modrm >> 3) & 7;
7980 mod = (modrm >> 6) & 3;
7981 rm = modrm & 7;
7982 if (mod != 3) {
7983 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7984#ifdef VBOX
7985 tcg_gen_mov_tl(a0, cpu_A0);
7986#endif
7987 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7988 } else {
7989 gen_op_mov_v_reg(ot, t0, rm);
7990 }
7991 gen_op_mov_v_reg(ot, t1, reg);
7992 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7993 tcg_gen_andi_tl(t1, t1, 3);
7994 tcg_gen_movi_tl(t2, 0);
7995 label1 = gen_new_label();
7996 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7997 tcg_gen_andi_tl(t0, t0, ~3);
7998 tcg_gen_or_tl(t0, t0, t1);
7999 tcg_gen_movi_tl(t2, CC_Z);
8000 gen_set_label(label1);
8001 if (mod != 3) {
8002#ifdef VBOX
8003 /* cpu_A0 doesn't survive branch */
8004 gen_op_st_v(ot + s->mem_index, t0, a0);
8005#else
8006 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
8007#endif
8008 } else {
8009 gen_op_mov_reg_v(ot, rm, t0);
8010 }
8011 if (s->cc_op != CC_OP_DYNAMIC)
8012 gen_op_set_cc_op(s->cc_op);
8013 gen_compute_eflags(cpu_cc_src);
8014 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
8015 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
8016 s->cc_op = CC_OP_EFLAGS;
8017 tcg_temp_free(t0);
8018 tcg_temp_free(t1);
8019 tcg_temp_free(t2);
8020#ifdef VBOX
8021 tcg_temp_free(a0);
8022#endif
8023 }
8024 break;
8025 case 0x102: /* lar */
8026 case 0x103: /* lsl */
8027 {
8028 int label1;
8029 TCGv t0;
8030 if (!s->pe || s->vm86)
8031 goto illegal_op;
8032 ot = dflag ? OT_LONG : OT_WORD;
8033 modrm = ldub_code(s->pc++);
8034 reg = ((modrm >> 3) & 7) | rex_r;
8035 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
8036 t0 = tcg_temp_local_new(TCG_TYPE_TL);
8037 if (s->cc_op != CC_OP_DYNAMIC)
8038 gen_op_set_cc_op(s->cc_op);
8039 if (b == 0x102)
8040 tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
8041 else
8042 tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
8043 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
8044 label1 = gen_new_label();
8045 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
8046 gen_op_mov_reg_v(ot, reg, t0);
8047 gen_set_label(label1);
8048 s->cc_op = CC_OP_EFLAGS;
8049 tcg_temp_free(t0);
8050 }
8051 break;
8052 case 0x118:
8053 modrm = ldub_code(s->pc++);
8054 mod = (modrm >> 6) & 3;
8055 op = (modrm >> 3) & 7;
8056 switch(op) {
8057 case 0: /* prefetchnta */
8058 case 1: /* prefetchnt0 */
8059 case 2: /* prefetchnt0 */
8060 case 3: /* prefetchnt0 */
8061 if (mod == 3)
8062 goto illegal_op;
8063 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8064 /* nothing more to do */
8065 break;
8066 default: /* nop (multi byte) */
8067 gen_nop_modrm(s, modrm);
8068 break;
8069 }
8070 break;
8071 case 0x119 ... 0x11f: /* nop (multi byte) */
8072 modrm = ldub_code(s->pc++);
8073 gen_nop_modrm(s, modrm);
8074 break;
8075 case 0x120: /* mov reg, crN */
8076 case 0x122: /* mov crN, reg */
8077 if (s->cpl != 0) {
8078 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8079 } else {
8080 modrm = ldub_code(s->pc++);
8081 if ((modrm & 0xc0) != 0xc0)
8082 goto illegal_op;
8083 rm = (modrm & 7) | REX_B(s);
8084 reg = ((modrm >> 3) & 7) | rex_r;
8085 if (CODE64(s))
8086 ot = OT_QUAD;
8087 else
8088 ot = OT_LONG;
8089 switch(reg) {
8090 case 0:
8091 case 2:
8092 case 3:
8093 case 4:
8094 case 8:
8095 if (s->cc_op != CC_OP_DYNAMIC)
8096 gen_op_set_cc_op(s->cc_op);
8097 gen_jmp_im(pc_start - s->cs_base);
8098 if (b & 2) {
8099 gen_op_mov_TN_reg(ot, 0, rm);
8100 tcg_gen_helper_0_2(helper_write_crN,
8101 tcg_const_i32(reg), cpu_T[0]);
8102 gen_jmp_im(s->pc - s->cs_base);
8103 gen_eob(s);
8104 } else {
8105 tcg_gen_helper_1_1(helper_read_crN,
8106 cpu_T[0], tcg_const_i32(reg));
8107 gen_op_mov_reg_T0(ot, rm);
8108 }
8109 break;
8110 default:
8111 goto illegal_op;
8112 }
8113 }
8114 break;
8115 case 0x121: /* mov reg, drN */
8116 case 0x123: /* mov drN, reg */
8117 if (s->cpl != 0) {
8118 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8119 } else {
8120 modrm = ldub_code(s->pc++);
8121 if ((modrm & 0xc0) != 0xc0)
8122 goto illegal_op;
8123 rm = (modrm & 7) | REX_B(s);
8124 reg = ((modrm >> 3) & 7) | rex_r;
8125 if (CODE64(s))
8126 ot = OT_QUAD;
8127 else
8128 ot = OT_LONG;
8129 /* XXX: do it dynamically with CR4.DE bit */
8130 if (reg == 4 || reg == 5 || reg >= 8)
8131 goto illegal_op;
8132 if (b & 2) {
8133 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
8134 gen_op_mov_TN_reg(ot, 0, rm);
8135 tcg_gen_helper_0_2(helper_movl_drN_T0,
8136 tcg_const_i32(reg), cpu_T[0]);
8137 gen_jmp_im(s->pc - s->cs_base);
8138 gen_eob(s);
8139 } else {
8140 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
8141 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
8142 gen_op_mov_reg_T0(ot, rm);
8143 }
8144 }
8145 break;
8146 case 0x106: /* clts */
8147 if (s->cpl != 0) {
8148 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8149 } else {
8150 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
8151 tcg_gen_helper_0_0(helper_clts);
8152 /* abort block because static cpu state changed */
8153 gen_jmp_im(s->pc - s->cs_base);
8154 gen_eob(s);
8155 }
8156 break;
8157 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
8158 case 0x1c3: /* MOVNTI reg, mem */
8159 if (!(s->cpuid_features & CPUID_SSE2))
8160 goto illegal_op;
8161 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
8162 modrm = ldub_code(s->pc++);
8163 mod = (modrm >> 6) & 3;
8164 if (mod == 3)
8165 goto illegal_op;
8166 reg = ((modrm >> 3) & 7) | rex_r;
8167 /* generate a generic store */
8168 gen_ldst_modrm(s, modrm, ot, reg, 1);
8169 break;
8170 case 0x1ae:
8171 modrm = ldub_code(s->pc++);
8172 mod = (modrm >> 6) & 3;
8173 op = (modrm >> 3) & 7;
8174 switch(op) {
8175 case 0: /* fxsave */
8176 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8177 (s->flags & HF_EM_MASK))
8178 goto illegal_op;
8179 if (s->flags & HF_TS_MASK) {
8180 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8181 break;
8182 }
8183 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8184 if (s->cc_op != CC_OP_DYNAMIC)
8185 gen_op_set_cc_op(s->cc_op);
8186 gen_jmp_im(pc_start - s->cs_base);
8187 tcg_gen_helper_0_2(helper_fxsave,
8188 cpu_A0, tcg_const_i32((s->dflag == 2)));
8189 break;
8190 case 1: /* fxrstor */
8191 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8192 (s->flags & HF_EM_MASK))
8193 goto illegal_op;
8194 if (s->flags & HF_TS_MASK) {
8195 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8196 break;
8197 }
8198 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8199 if (s->cc_op != CC_OP_DYNAMIC)
8200 gen_op_set_cc_op(s->cc_op);
8201 gen_jmp_im(pc_start - s->cs_base);
8202 tcg_gen_helper_0_2(helper_fxrstor,
8203 cpu_A0, tcg_const_i32((s->dflag == 2)));
8204 break;
8205 case 2: /* ldmxcsr */
8206 case 3: /* stmxcsr */
8207 if (s->flags & HF_TS_MASK) {
8208 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8209 break;
8210 }
8211 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
8212 mod == 3)
8213 goto illegal_op;
8214 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8215 if (op == 2) {
8216 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
8217 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8218 } else {
8219 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8220 gen_op_st_T0_A0(OT_LONG + s->mem_index);
8221 }
8222 break;
8223 case 5: /* lfence */
8224 case 6: /* mfence */
8225 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
8226 goto illegal_op;
8227 break;
8228 case 7: /* sfence / clflush */
8229 if ((modrm & 0xc7) == 0xc0) {
8230 /* sfence */
8231 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
8232 if (!(s->cpuid_features & CPUID_SSE))
8233 goto illegal_op;
8234 } else {
8235 /* clflush */
8236 if (!(s->cpuid_features & CPUID_CLFLUSH))
8237 goto illegal_op;
8238 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8239 }
8240 break;
8241 default:
8242 goto illegal_op;
8243 }
8244 break;
8245 case 0x10d: /* 3DNow! prefetch(w) */
8246 modrm = ldub_code(s->pc++);
8247 mod = (modrm >> 6) & 3;
8248 if (mod == 3)
8249 goto illegal_op;
8250 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8251 /* ignore for now */
8252 break;
8253 case 0x1aa: /* rsm */
8254 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
8255 if (!(s->flags & HF_SMM_MASK))
8256 goto illegal_op;
8257 if (s->cc_op != CC_OP_DYNAMIC) {
8258 gen_op_set_cc_op(s->cc_op);
8259 s->cc_op = CC_OP_DYNAMIC;
8260 }
8261 gen_jmp_im(s->pc - s->cs_base);
8262 tcg_gen_helper_0_0(helper_rsm);
8263 gen_eob(s);
8264 break;
8265 case 0x1b8: /* SSE4.2 popcnt */
8266 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
8267 PREFIX_REPZ)
8268 goto illegal_op;
8269 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
8270 goto illegal_op;
8271
8272 modrm = ldub_code(s->pc++);
8273 reg = ((modrm >> 3) & 7);
8274
8275 if (s->prefix & PREFIX_DATA)
8276 ot = OT_WORD;
8277 else if (s->dflag != 2)
8278 ot = OT_LONG;
8279 else
8280 ot = OT_QUAD;
8281
8282 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
8283 tcg_gen_helper_1_2(helper_popcnt,
8284 cpu_T[0], cpu_T[0], tcg_const_i32(ot));
8285 gen_op_mov_reg_T0(ot, reg);
8286
8287 s->cc_op = CC_OP_EFLAGS;
8288 break;
8289 case 0x10e ... 0x10f:
8290 /* 3DNow! instructions, ignore prefixes */
8291 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
8292 case 0x110 ... 0x117:
8293 case 0x128 ... 0x12f:
8294 case 0x138 ... 0x13a:
8295 case 0x150 ... 0x177:
8296 case 0x17c ... 0x17f:
8297 case 0x1c2:
8298 case 0x1c4 ... 0x1c6:
8299 case 0x1d0 ... 0x1fe:
8300 gen_sse(s, b, pc_start, rex_r);
8301 break;
8302 default:
8303 goto illegal_op;
8304 }
8305 /* lock generation */
8306 if (s->prefix & PREFIX_LOCK)
8307 tcg_gen_helper_0_0(helper_unlock);
8308 return s->pc;
8309 illegal_op:
8310 if (s->prefix & PREFIX_LOCK)
8311 tcg_gen_helper_0_0(helper_unlock);
8312 /* XXX: ensure that no lock was generated */
8313 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
8314 return s->pc;
8315}
8316
8317void optimize_flags_init(void)
8318{
8319#if TCG_TARGET_REG_BITS == 32
8320 assert(sizeof(CCTable) == (1 << 3));
8321#else
8322 assert(sizeof(CCTable) == (1 << 4));
8323#endif
8324 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
8325 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
8326 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
8327 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
8328 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
8329 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
8330 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
8331 cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
8332 TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
8333
8334 /* register helpers */
8335
8336#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
8337#include "helper.h"
8338}
8339
8340/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
8341 basic block 'tb'. If search_pc is TRUE, also generate PC
8342 information for each intermediate instruction. */
8343#ifndef VBOX
8344static inline void gen_intermediate_code_internal(CPUState *env,
8345#else /* VBOX */
8346DECLINLINE(void) gen_intermediate_code_internal(CPUState *env,
8347#endif /* VBOX */
8348 TranslationBlock *tb,
8349 int search_pc)
8350{
8351 DisasContext dc1, *dc = &dc1;
8352 target_ulong pc_ptr;
8353 uint16_t *gen_opc_end;
8354 int j, lj, cflags;
8355 uint64_t flags;
8356 target_ulong pc_start;
8357 target_ulong cs_base;
8358 int num_insns;
8359 int max_insns;
8360
8361 /* generate intermediate code */
8362 pc_start = tb->pc;
8363 cs_base = tb->cs_base;
8364 flags = tb->flags;
8365 cflags = tb->cflags;
8366
8367 dc->pe = (flags >> HF_PE_SHIFT) & 1;
8368 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
8369 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
8370 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
8371 dc->f_st = 0;
8372 dc->vm86 = (flags >> VM_SHIFT) & 1;
8373#ifdef VBOX
8374 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
8375 dc->pvi = !!(env->cr[4] & CR4_PVI_MASK);
8376#ifdef VBOX_WITH_CALL_RECORD
8377 if ( !(env->state & CPU_RAW_RING0)
8378 && (env->cr[0] & CR0_PG_MASK)
8379 && !(env->eflags & X86_EFL_IF)
8380 && dc->code32)
8381 dc->record_call = 1;
8382 else
8383 dc->record_call = 0;
8384#endif
8385#endif
8386 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
8387 dc->iopl = (flags >> IOPL_SHIFT) & 3;
8388 dc->tf = (flags >> TF_SHIFT) & 1;
8389 dc->singlestep_enabled = env->singlestep_enabled;
8390 dc->cc_op = CC_OP_DYNAMIC;
8391 dc->cs_base = cs_base;
8392 dc->tb = tb;
8393 dc->popl_esp_hack = 0;
8394 /* select memory access functions */
8395 dc->mem_index = 0;
8396 if (flags & HF_SOFTMMU_MASK) {
8397 if (dc->cpl == 3)
8398 dc->mem_index = 2 * 4;
8399 else
8400 dc->mem_index = 1 * 4;
8401 }
8402 dc->cpuid_features = env->cpuid_features;
8403 dc->cpuid_ext_features = env->cpuid_ext_features;
8404 dc->cpuid_ext2_features = env->cpuid_ext2_features;
8405 dc->cpuid_ext3_features = env->cpuid_ext3_features;
8406#ifdef TARGET_X86_64
8407 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
8408 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
8409#endif
8410 dc->flags = flags;
8411 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
8412 (flags & HF_INHIBIT_IRQ_MASK)
8413#ifndef CONFIG_SOFTMMU
8414 || (flags & HF_SOFTMMU_MASK)
8415#endif
8416 );
8417#if 0
8418 /* check addseg logic */
8419 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
8420 printf("ERROR addseg\n");
8421#endif
8422
8423 cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
8424 cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
8425 cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
8426 cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
8427
8428 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
8429 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
8430 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
8431 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
8432 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
8433 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
8434 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
8435 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
8436 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
8437
8438 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
8439
8440 dc->is_jmp = DISAS_NEXT;
8441 pc_ptr = pc_start;
8442 lj = -1;
8443 num_insns = 0;
8444 max_insns = tb->cflags & CF_COUNT_MASK;
8445 if (max_insns == 0)
8446 max_insns = CF_COUNT_MASK;
8447
8448 gen_icount_start();
8449 for(;;) {
8450 if (env->nb_breakpoints > 0) {
8451 for(j = 0; j < env->nb_breakpoints; j++) {
8452 if (env->breakpoints[j] == pc_ptr) {
8453 gen_debug(dc, pc_ptr - dc->cs_base);
8454 break;
8455 }
8456 }
8457 }
8458 if (search_pc) {
8459 j = gen_opc_ptr - gen_opc_buf;
8460 if (lj < j) {
8461 lj++;
8462 while (lj < j)
8463 gen_opc_instr_start[lj++] = 0;
8464 }
8465 gen_opc_pc[lj] = pc_ptr;
8466 gen_opc_cc_op[lj] = dc->cc_op;
8467 gen_opc_instr_start[lj] = 1;
8468 gen_opc_icount[lj] = num_insns;
8469 }
8470 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8471 gen_io_start();
8472
8473 pc_ptr = disas_insn(dc, pc_ptr);
8474 num_insns++;
8475 /* stop translation if indicated */
8476 if (dc->is_jmp)
8477 break;
8478#ifdef VBOX
8479#ifdef DEBUG
8480/*
8481 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
8482 {
8483 //should never happen as the jump to the patch code terminates the translation block
8484 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
8485 }
8486*/
8487#endif
8488 if (env->state & CPU_EMULATE_SINGLE_INSTR)
8489 {
8490 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
8491 gen_jmp_im(pc_ptr - dc->cs_base);
8492 gen_eob(dc);
8493 break;
8494 }
8495#endif /* VBOX */
8496
8497 /* if single step mode, we generate only one instruction and
8498 generate an exception */
8499 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
8500 the flag and abort the translation to give the irqs a
8501 change to be happen */
8502 if (dc->tf || dc->singlestep_enabled ||
8503 (flags & HF_INHIBIT_IRQ_MASK)) {
8504 gen_jmp_im(pc_ptr - dc->cs_base);
8505 gen_eob(dc);
8506 break;
8507 }
8508 /* if too long translation, stop generation too */
8509 if (gen_opc_ptr >= gen_opc_end ||
8510 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
8511 num_insns >= max_insns) {
8512 gen_jmp_im(pc_ptr - dc->cs_base);
8513 gen_eob(dc);
8514 break;
8515 }
8516 }
8517 if (tb->cflags & CF_LAST_IO)
8518 gen_io_end();
8519 gen_icount_end(tb, num_insns);
8520 *gen_opc_ptr = INDEX_op_end;
8521 /* we don't forget to fill the last values */
8522 if (search_pc) {
8523 j = gen_opc_ptr - gen_opc_buf;
8524 lj++;
8525 while (lj <= j)
8526 gen_opc_instr_start[lj++] = 0;
8527 }
8528
8529#ifdef DEBUG_DISAS
8530 if (loglevel & CPU_LOG_TB_CPU) {
8531 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
8532 }
8533 if (loglevel & CPU_LOG_TB_IN_ASM) {
8534 int disas_flags;
8535 fprintf(logfile, "----------------\n");
8536 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
8537#ifdef TARGET_X86_64
8538 if (dc->code64)
8539 disas_flags = 2;
8540 else
8541#endif
8542 disas_flags = !dc->code32;
8543 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
8544 fprintf(logfile, "\n");
8545 }
8546#endif
8547
8548 if (!search_pc) {
8549 tb->size = pc_ptr - pc_start;
8550 tb->icount = num_insns;
8551 }
8552}
8553
8554void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
8555{
8556 gen_intermediate_code_internal(env, tb, 0);
8557}
8558
8559void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
8560{
8561 gen_intermediate_code_internal(env, tb, 1);
8562}
8563
8564void gen_pc_load(CPUState *env, TranslationBlock *tb,
8565 unsigned long searched_pc, int pc_pos, void *puc)
8566{
8567 int cc_op;
8568#ifdef DEBUG_DISAS
8569 if (loglevel & CPU_LOG_TB_OP) {
8570 int i;
8571 fprintf(logfile, "RESTORE:\n");
8572 for(i = 0;i <= pc_pos; i++) {
8573 if (gen_opc_instr_start[i]) {
8574 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
8575 }
8576 }
8577 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
8578 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
8579 (uint32_t)tb->cs_base);
8580 }
8581#endif
8582 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
8583 cc_op = gen_opc_cc_op[pc_pos];
8584 if (cc_op != CC_OP_DYNAMIC)
8585 env->cc_op = cc_op;
8586}
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette