VirtualBox

source: vbox/trunk/src/recompiler/target-i386/translate.c@ 33656

Last change on this file since 33656 was 33656, checked in by vboxsync, 14 years ago

*: rebrand Sun (L)GPL disclaimers

  • Property svn:eol-style set to native
File size: 276.8 KB
Line 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21/*
22 * Oracle LGPL Disclaimer: For the avoidance of doubt, except that if any license choice
23 * other than GPL or LGPL is available it will apply instead, Oracle elects to use only
24 * the Lesser General Public License version 2.1 (LGPLv2) at this time for any software where
25 * a choice of LGPL license versions is made available with the language indicating
26 * that LGPLv2 or any later version may be used, or where a choice of which version
27 * of the LGPL is applied is otherwise unspecified.
28 */
29
30#include <stdarg.h>
31#include <stdlib.h>
32#include <stdio.h>
33#include <string.h>
34#ifndef VBOX
35#include <inttypes.h>
36#include <signal.h>
37#include <assert.h>
38#endif /* !VBOX */
39
40#include "cpu.h"
41#include "exec-all.h"
42#include "disas.h"
43#include "helper.h"
44#include "tcg-op.h"
45
46#define PREFIX_REPZ 0x01
47#define PREFIX_REPNZ 0x02
48#define PREFIX_LOCK 0x04
49#define PREFIX_DATA 0x08
50#define PREFIX_ADR 0x10
51
52#ifdef TARGET_X86_64
53#define X86_64_ONLY(x) x
54#ifndef VBOX
55#define X86_64_DEF(x...) x
56#else
57#define X86_64_DEF(x...) x
58#endif
59#define CODE64(s) ((s)->code64)
60#define REX_X(s) ((s)->rex_x)
61#define REX_B(s) ((s)->rex_b)
62/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
63#if 1
64#define BUGGY_64(x) NULL
65#endif
66#else
67#define X86_64_ONLY(x) NULL
68#ifndef VBOX
69#define X86_64_DEF(x...)
70#else
71#define X86_64_DEF(x)
72#endif
73#define CODE64(s) 0
74#define REX_X(s) 0
75#define REX_B(s) 0
76#endif
77
78//#define MACRO_TEST 1
79
80/* global register indexes */
81static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
82/* local temps */
83static TCGv cpu_T[2], cpu_T3;
84/* local register indexes (only used inside old micro ops) */
85static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
86static TCGv cpu_tmp5, cpu_tmp6;
87
88#include "gen-icount.h"
89
90#ifdef TARGET_X86_64
91static int x86_64_hregs;
92#endif
93
94#ifdef VBOX
95
96/* Special/override code readers to hide patched code. */
97
98uint8_t ldub_code_raw(target_ulong pc)
99{
100 uint8_t b;
101
102 if (!remR3GetOpcode(cpu_single_env, pc, &b))
103 b = ldub_code(pc);
104 return b;
105}
106#define ldub_code(a) ldub_code_raw(a)
107
108uint16_t lduw_code_raw(target_ulong pc)
109{
110 return (ldub_code(pc+1) << 8) | ldub_code(pc);
111}
112#define lduw_code(a) lduw_code_raw(a)
113
114
115uint32_t ldl_code_raw(target_ulong pc)
116{
117 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
118}
119#define ldl_code(a) ldl_code_raw(a)
120
121#endif /* VBOX */
122
123
124typedef struct DisasContext {
125 /* current insn context */
126 int override; /* -1 if no override */
127 int prefix;
128 int aflag, dflag;
129 target_ulong pc; /* pc = eip + cs_base */
130 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
131 static state change (stop translation) */
132 /* current block context */
133 target_ulong cs_base; /* base of CS segment */
134 int pe; /* protected mode */
135 int code32; /* 32 bit code segment */
136#ifdef TARGET_X86_64
137 int lma; /* long mode active */
138 int code64; /* 64 bit code segment */
139 int rex_x, rex_b;
140#endif
141 int ss32; /* 32 bit stack segment */
142 int cc_op; /* current CC operation */
143 int addseg; /* non zero if either DS/ES/SS have a non zero base */
144 int f_st; /* currently unused */
145 int vm86; /* vm86 mode */
146#ifdef VBOX
147 int vme; /* CR4.VME */
148 int pvi; /* CR4.PVI */
149 int record_call; /* record calls for CSAM or not? */
150#endif
151 int cpl;
152 int iopl;
153 int tf; /* TF cpu flag */
154 int singlestep_enabled; /* "hardware" single step enabled */
155 int jmp_opt; /* use direct block chaining for direct jumps */
156 int mem_index; /* select memory access functions */
157 uint64_t flags; /* all execution flags */
158 struct TranslationBlock *tb;
159 int popl_esp_hack; /* for correct popl with esp base handling */
160 int rip_offset; /* only used in x86_64, but left for simplicity */
161 int cpuid_features;
162 int cpuid_ext_features;
163 int cpuid_ext2_features;
164 int cpuid_ext3_features;
165} DisasContext;
166
167static void gen_eob(DisasContext *s);
168static void gen_jmp(DisasContext *s, target_ulong eip);
169static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
170
171#ifdef VBOX
172static void gen_check_external_event();
173#endif
174
175/* i386 arith/logic operations */
176enum {
177 OP_ADDL,
178 OP_ORL,
179 OP_ADCL,
180 OP_SBBL,
181 OP_ANDL,
182 OP_SUBL,
183 OP_XORL,
184 OP_CMPL,
185};
186
187/* i386 shift ops */
188enum {
189 OP_ROL,
190 OP_ROR,
191 OP_RCL,
192 OP_RCR,
193 OP_SHL,
194 OP_SHR,
195 OP_SHL1, /* undocumented */
196 OP_SAR = 7,
197};
198
199enum {
200 JCC_O,
201 JCC_B,
202 JCC_Z,
203 JCC_BE,
204 JCC_S,
205 JCC_P,
206 JCC_L,
207 JCC_LE,
208};
209
210/* operand size */
211enum {
212 OT_BYTE = 0,
213 OT_WORD,
214 OT_LONG,
215 OT_QUAD,
216};
217
218enum {
219 /* I386 int registers */
220 OR_EAX, /* MUST be even numbered */
221 OR_ECX,
222 OR_EDX,
223 OR_EBX,
224 OR_ESP,
225 OR_EBP,
226 OR_ESI,
227 OR_EDI,
228
229 OR_TMP0 = 16, /* temporary operand register */
230 OR_TMP1,
231 OR_A0, /* temporary register used when doing address evaluation */
232};
233
234#ifndef VBOX
235static inline void gen_op_movl_T0_0(void)
236#else /* VBOX */
237DECLINLINE(void) gen_op_movl_T0_0(void)
238#endif /* VBOX */
239{
240 tcg_gen_movi_tl(cpu_T[0], 0);
241}
242
243#ifndef VBOX
244static inline void gen_op_movl_T0_im(int32_t val)
245#else /* VBOX */
246DECLINLINE(void) gen_op_movl_T0_im(int32_t val)
247#endif /* VBOX */
248{
249 tcg_gen_movi_tl(cpu_T[0], val);
250}
251
252#ifndef VBOX
253static inline void gen_op_movl_T0_imu(uint32_t val)
254#else /* VBOX */
255DECLINLINE(void) gen_op_movl_T0_imu(uint32_t val)
256#endif /* VBOX */
257{
258 tcg_gen_movi_tl(cpu_T[0], val);
259}
260
261#ifndef VBOX
262static inline void gen_op_movl_T1_im(int32_t val)
263#else /* VBOX */
264DECLINLINE(void) gen_op_movl_T1_im(int32_t val)
265#endif /* VBOX */
266{
267 tcg_gen_movi_tl(cpu_T[1], val);
268}
269
270#ifndef VBOX
271static inline void gen_op_movl_T1_imu(uint32_t val)
272#else /* VBOX */
273DECLINLINE(void) gen_op_movl_T1_imu(uint32_t val)
274#endif /* VBOX */
275{
276 tcg_gen_movi_tl(cpu_T[1], val);
277}
278
279#ifndef VBOX
280static inline void gen_op_movl_A0_im(uint32_t val)
281#else /* VBOX */
282DECLINLINE(void) gen_op_movl_A0_im(uint32_t val)
283#endif /* VBOX */
284{
285 tcg_gen_movi_tl(cpu_A0, val);
286}
287
288#ifdef TARGET_X86_64
289#ifndef VBOX
290static inline void gen_op_movq_A0_im(int64_t val)
291#else /* VBOX */
292DECLINLINE(void) gen_op_movq_A0_im(int64_t val)
293#endif /* VBOX */
294{
295 tcg_gen_movi_tl(cpu_A0, val);
296}
297#endif
298
299#ifndef VBOX
300static inline void gen_movtl_T0_im(target_ulong val)
301#else /* VBOX */
302DECLINLINE(void) gen_movtl_T0_im(target_ulong val)
303#endif /* VBOX */
304{
305 tcg_gen_movi_tl(cpu_T[0], val);
306}
307
308#ifndef VBOX
309static inline void gen_movtl_T1_im(target_ulong val)
310#else /* VBOX */
311DECLINLINE(void) gen_movtl_T1_im(target_ulong val)
312#endif /* VBOX */
313{
314 tcg_gen_movi_tl(cpu_T[1], val);
315}
316
317#ifndef VBOX
318static inline void gen_op_andl_T0_ffff(void)
319#else /* VBOX */
320DECLINLINE(void) gen_op_andl_T0_ffff(void)
321#endif /* VBOX */
322{
323 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
324}
325
326#ifndef VBOX
327static inline void gen_op_andl_T0_im(uint32_t val)
328#else /* VBOX */
329DECLINLINE(void) gen_op_andl_T0_im(uint32_t val)
330#endif /* VBOX */
331{
332 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
333}
334
335#ifndef VBOX
336static inline void gen_op_movl_T0_T1(void)
337#else /* VBOX */
338DECLINLINE(void) gen_op_movl_T0_T1(void)
339#endif /* VBOX */
340{
341 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
342}
343
344#ifndef VBOX
345static inline void gen_op_andl_A0_ffff(void)
346#else /* VBOX */
347DECLINLINE(void) gen_op_andl_A0_ffff(void)
348#endif /* VBOX */
349{
350 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
351}
352
353#ifdef TARGET_X86_64
354
355#define NB_OP_SIZES 4
356
357#else /* !TARGET_X86_64 */
358
359#define NB_OP_SIZES 3
360
361#endif /* !TARGET_X86_64 */
362
363#if defined(WORDS_BIGENDIAN)
364#define REG_B_OFFSET (sizeof(target_ulong) - 1)
365#define REG_H_OFFSET (sizeof(target_ulong) - 2)
366#define REG_W_OFFSET (sizeof(target_ulong) - 2)
367#define REG_L_OFFSET (sizeof(target_ulong) - 4)
368#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
369#else
370#define REG_B_OFFSET 0
371#define REG_H_OFFSET 1
372#define REG_W_OFFSET 0
373#define REG_L_OFFSET 0
374#define REG_LH_OFFSET 4
375#endif
376
377#ifndef VBOX
378static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
379#else /* VBOX */
380DECLINLINE(void) gen_op_mov_reg_v(int ot, int reg, TCGv t0)
381#endif /* VBOX */
382{
383 switch(ot) {
384 case OT_BYTE:
385 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
386 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
387 } else {
388 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
389 }
390 break;
391 case OT_WORD:
392 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
393 break;
394#ifdef TARGET_X86_64
395 case OT_LONG:
396 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
397 /* high part of register set to zero */
398 tcg_gen_movi_tl(cpu_tmp0, 0);
399 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
400 break;
401 default:
402 case OT_QUAD:
403 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
404 break;
405#else
406 default:
407 case OT_LONG:
408 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
409 break;
410#endif
411 }
412}
413
414#ifndef VBOX
415static inline void gen_op_mov_reg_T0(int ot, int reg)
416#else /* VBOX */
417DECLINLINE(void) gen_op_mov_reg_T0(int ot, int reg)
418#endif /* VBOX */
419{
420 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
421}
422
423#ifndef VBOX
424static inline void gen_op_mov_reg_T1(int ot, int reg)
425#else /* VBOX */
426DECLINLINE(void) gen_op_mov_reg_T1(int ot, int reg)
427#endif /* VBOX */
428{
429 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
430}
431
432#ifndef VBOX
433static inline void gen_op_mov_reg_A0(int size, int reg)
434#else /* VBOX */
435DECLINLINE(void) gen_op_mov_reg_A0(int size, int reg)
436#endif /* VBOX */
437{
438 switch(size) {
439 case 0:
440 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
441 break;
442#ifdef TARGET_X86_64
443 case 1:
444 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
445 /* high part of register set to zero */
446 tcg_gen_movi_tl(cpu_tmp0, 0);
447 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
448 break;
449 default:
450 case 2:
451 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
452 break;
453#else
454 default:
455 case 1:
456 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
457 break;
458#endif
459 }
460}
461
462#ifndef VBOX
463static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
464#else /* VBOX */
465DECLINLINE(void) gen_op_mov_v_reg(int ot, TCGv t0, int reg)
466#endif /* VBOX */
467{
468 switch(ot) {
469 case OT_BYTE:
470 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
471#ifndef VBOX
472 goto std_case;
473#else
474 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
475#endif
476 } else {
477 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
478 }
479 break;
480 default:
481#ifndef VBOX
482 std_case:
483#endif
484 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
485 break;
486 }
487}
488
489#ifndef VBOX
490static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
491#else /* VBOX */
492DECLINLINE(void) gen_op_mov_TN_reg(int ot, int t_index, int reg)
493#endif /* VBOX */
494{
495 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
496}
497
498#ifndef VBOX
499static inline void gen_op_movl_A0_reg(int reg)
500#else /* VBOX */
501DECLINLINE(void) gen_op_movl_A0_reg(int reg)
502#endif /* VBOX */
503{
504 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
505}
506
507#ifndef VBOX
508static inline void gen_op_addl_A0_im(int32_t val)
509#else /* VBOX */
510DECLINLINE(void) gen_op_addl_A0_im(int32_t val)
511#endif /* VBOX */
512{
513 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
514#ifdef TARGET_X86_64
515 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
516#endif
517}
518
519#ifdef TARGET_X86_64
520#ifndef VBOX
521static inline void gen_op_addq_A0_im(int64_t val)
522#else /* VBOX */
523DECLINLINE(void) gen_op_addq_A0_im(int64_t val)
524#endif /* VBOX */
525{
526 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
527}
528#endif
529
530static void gen_add_A0_im(DisasContext *s, int val)
531{
532#ifdef TARGET_X86_64
533 if (CODE64(s))
534 gen_op_addq_A0_im(val);
535 else
536#endif
537 gen_op_addl_A0_im(val);
538}
539
540#ifndef VBOX
541static inline void gen_op_addl_T0_T1(void)
542#else /* VBOX */
543DECLINLINE(void) gen_op_addl_T0_T1(void)
544#endif /* VBOX */
545{
546 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
547}
548
549#ifndef VBOX
550static inline void gen_op_jmp_T0(void)
551#else /* VBOX */
552DECLINLINE(void) gen_op_jmp_T0(void)
553#endif /* VBOX */
554{
555 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
556}
557
558#ifndef VBOX
559static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
560#else /* VBOX */
561DECLINLINE(void) gen_op_add_reg_im(int size, int reg, int32_t val)
562#endif /* VBOX */
563{
564 switch(size) {
565 case 0:
566 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
567 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
568 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
569 break;
570 case 1:
571 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
572 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
573#ifdef TARGET_X86_64
574 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
575#endif
576 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
577 break;
578#ifdef TARGET_X86_64
579 case 2:
580 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
581 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
582 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
583 break;
584#endif
585 }
586}
587
588#ifndef VBOX
589static inline void gen_op_add_reg_T0(int size, int reg)
590#else /* VBOX */
591DECLINLINE(void) gen_op_add_reg_T0(int size, int reg)
592#endif /* VBOX */
593{
594 switch(size) {
595 case 0:
596 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
597 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
598 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
599 break;
600 case 1:
601 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
602 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
603#ifdef TARGET_X86_64
604 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
605#endif
606 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
607 break;
608#ifdef TARGET_X86_64
609 case 2:
610 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
611 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
612 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
613 break;
614#endif
615 }
616}
617
618#ifndef VBOX
619static inline void gen_op_set_cc_op(int32_t val)
620#else /* VBOX */
621DECLINLINE(void) gen_op_set_cc_op(int32_t val)
622#endif /* VBOX */
623{
624 tcg_gen_movi_i32(cpu_cc_op, val);
625}
626
627#ifndef VBOX
628static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
629#else /* VBOX */
630DECLINLINE(void) gen_op_addl_A0_reg_sN(int shift, int reg)
631#endif /* VBOX */
632{
633 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
634 if (shift != 0)
635 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
636 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
637#ifdef TARGET_X86_64
638 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
639#endif
640}
641#ifdef VBOX
642DECLINLINE(void) gen_op_seg_check(int reg, bool keepA0)
643{
644 /* It seems segments doesn't get out of sync - if they do in fact - enable below code. */
645#ifdef FORCE_SEGMENT_SYNC
646#if 1
647 TCGv t0;
648
649 /* Considering poor quality of TCG optimizer - better call directly */
650 t0 = tcg_temp_local_new(TCG_TYPE_TL);
651 tcg_gen_movi_tl(t0, reg);
652 tcg_gen_helper_0_1(helper_sync_seg, t0);
653 tcg_temp_free(t0);
654#else
655 /* Our segments could be outdated, thus check for newselector field to see if update really needed */
656 int skip_label;
657 TCGv t0, a0;
658
659 /* For other segments this check is waste of time, and also TCG is unable to cope with this code,
660 for data/stack segments, as expects alive cpu_T[0] */
661 if (reg != R_GS)
662 return;
663
664 if (keepA0)
665 {
666 /* we need to store old cpu_A0 */
667 a0 = tcg_temp_local_new(TCG_TYPE_TL);
668 tcg_gen_mov_tl(a0, cpu_A0);
669 }
670
671 skip_label = gen_new_label();
672 t0 = tcg_temp_local_new(TCG_TYPE_TL);
673
674 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, segs[reg].newselector) + REG_L_OFFSET);
675 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
676 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, eflags) + REG_L_OFFSET);
677 tcg_gen_andi_tl(t0, t0, VM_MASK);
678 tcg_gen_brcondi_i32(TCG_COND_NE, t0, 0, skip_label);
679 tcg_gen_movi_tl(t0, reg);
680
681 tcg_gen_helper_0_1(helper_sync_seg, t0);
682
683 tcg_temp_free(t0);
684
685 gen_set_label(skip_label);
686 if (keepA0)
687 {
688 tcg_gen_mov_tl(cpu_A0, a0);
689 tcg_temp_free(a0);
690 }
691#endif /* 0 */
692#endif /* FORCE_SEGMENT_SYNC */
693}
694#endif
695
696#ifndef VBOX
697static inline void gen_op_movl_A0_seg(int reg)
698#else /* VBOX */
699DECLINLINE(void) gen_op_movl_A0_seg(int reg)
700#endif /* VBOX */
701{
702#ifdef VBOX
703 gen_op_seg_check(reg, false);
704#endif
705 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
706}
707
708#ifndef VBOX
709static inline void gen_op_addl_A0_seg(int reg)
710#else /* VBOX */
711DECLINLINE(void) gen_op_addl_A0_seg(int reg)
712#endif /* VBOX */
713{
714#ifdef VBOX
715 gen_op_seg_check(reg, true);
716#endif
717 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
718 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
719#ifdef TARGET_X86_64
720 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
721#endif
722}
723
724#ifdef TARGET_X86_64
725#ifndef VBOX
726static inline void gen_op_movq_A0_seg(int reg)
727#else /* VBOX */
728DECLINLINE(void) gen_op_movq_A0_seg(int reg)
729#endif /* VBOX */
730{
731#ifdef VBOX
732 gen_op_seg_check(reg, false);
733#endif
734 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
735}
736
737#ifndef VBOX
738static inline void gen_op_addq_A0_seg(int reg)
739#else /* VBOX */
740DECLINLINE(void) gen_op_addq_A0_seg(int reg)
741#endif /* VBOX */
742{
743#ifdef VBOX
744 gen_op_seg_check(reg, true);
745#endif
746 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
747 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
748}
749
750#ifndef VBOX
751static inline void gen_op_movq_A0_reg(int reg)
752#else /* VBOX */
753DECLINLINE(void) gen_op_movq_A0_reg(int reg)
754#endif /* VBOX */
755{
756 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
757}
758
759#ifndef VBOX
760static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
761#else /* VBOX */
762DECLINLINE(void) gen_op_addq_A0_reg_sN(int shift, int reg)
763#endif /* VBOX */
764{
765 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
766 if (shift != 0)
767 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
768 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
769}
770#endif
771
772#ifndef VBOX
773static inline void gen_op_lds_T0_A0(int idx)
774#else /* VBOX */
775DECLINLINE(void) gen_op_lds_T0_A0(int idx)
776#endif /* VBOX */
777{
778 int mem_index = (idx >> 2) - 1;
779 switch(idx & 3) {
780 case 0:
781 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
782 break;
783 case 1:
784 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
785 break;
786 default:
787 case 2:
788 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
789 break;
790 }
791}
792
793#ifndef VBOX
794static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
795#else /* VBOX */
796DECLINLINE(void) gen_op_ld_v(int idx, TCGv t0, TCGv a0)
797#endif /* VBOX */
798{
799 int mem_index = (idx >> 2) - 1;
800 switch(idx & 3) {
801 case 0:
802 tcg_gen_qemu_ld8u(t0, a0, mem_index);
803 break;
804 case 1:
805 tcg_gen_qemu_ld16u(t0, a0, mem_index);
806 break;
807 case 2:
808 tcg_gen_qemu_ld32u(t0, a0, mem_index);
809 break;
810 default:
811 case 3:
812 tcg_gen_qemu_ld64(t0, a0, mem_index);
813 break;
814 }
815}
816
817/* XXX: always use ldu or lds */
818#ifndef VBOX
819static inline void gen_op_ld_T0_A0(int idx)
820#else /* VBOX */
821DECLINLINE(void) gen_op_ld_T0_A0(int idx)
822#endif /* VBOX */
823{
824 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
825}
826
827#ifndef VBOX
828static inline void gen_op_ldu_T0_A0(int idx)
829#else /* VBOX */
830DECLINLINE(void) gen_op_ldu_T0_A0(int idx)
831#endif /* VBOX */
832{
833 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
834}
835
836#ifndef VBOX
837static inline void gen_op_ld_T1_A0(int idx)
838#else /* VBOX */
839DECLINLINE(void) gen_op_ld_T1_A0(int idx)
840#endif /* VBOX */
841{
842 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
843}
844
845#ifndef VBOX
846static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
847#else /* VBOX */
848DECLINLINE(void) gen_op_st_v(int idx, TCGv t0, TCGv a0)
849#endif /* VBOX */
850{
851 int mem_index = (idx >> 2) - 1;
852 switch(idx & 3) {
853 case 0:
854 tcg_gen_qemu_st8(t0, a0, mem_index);
855 break;
856 case 1:
857 tcg_gen_qemu_st16(t0, a0, mem_index);
858 break;
859 case 2:
860 tcg_gen_qemu_st32(t0, a0, mem_index);
861 break;
862 default:
863 case 3:
864 tcg_gen_qemu_st64(t0, a0, mem_index);
865 break;
866 }
867}
868
869#ifndef VBOX
870static inline void gen_op_st_T0_A0(int idx)
871#else /* VBOX */
872DECLINLINE(void) gen_op_st_T0_A0(int idx)
873#endif /* VBOX */
874{
875 gen_op_st_v(idx, cpu_T[0], cpu_A0);
876}
877
878#ifndef VBOX
879static inline void gen_op_st_T1_A0(int idx)
880#else /* VBOX */
881DECLINLINE(void) gen_op_st_T1_A0(int idx)
882#endif /* VBOX */
883{
884 gen_op_st_v(idx, cpu_T[1], cpu_A0);
885}
886
887#ifdef VBOX
888static void gen_check_external_event()
889{
890#if 1
891 /** @todo: once TCG codegen improves, we may want to use version
892 from else version */
893 tcg_gen_helper_0_0(helper_check_external_event);
894#else
895 int skip_label;
896 TCGv t0;
897
898 skip_label = gen_new_label();
899 t0 = tcg_temp_local_new(TCG_TYPE_TL);
900 /* t0 = cpu_tmp0; */
901
902 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, interrupt_request));
903 /* Keep in sync with helper_check_external_event() */
904 tcg_gen_andi_tl(t0, t0,
905 CPU_INTERRUPT_EXTERNAL_EXIT
906 | CPU_INTERRUPT_EXTERNAL_TIMER
907 | CPU_INTERRUPT_EXTERNAL_DMA
908 | CPU_INTERRUPT_EXTERNAL_HARD);
909 /** @todo: predict branch as taken */
910 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
911 tcg_temp_free(t0);
912
913 tcg_gen_helper_0_0(helper_check_external_event);
914
915 gen_set_label(skip_label);
916#endif
917}
918
919#if 0 /* unused code? */
920static void gen_check_external_event2()
921{
922 tcg_gen_helper_0_0(helper_check_external_event);
923}
924#endif
925
926#endif
927
928#ifndef VBOX
929static inline void gen_jmp_im(target_ulong pc)
930#else /* VBOX */
931DECLINLINE(void) gen_jmp_im(target_ulong pc)
932#endif /* VBOX */
933{
934 tcg_gen_movi_tl(cpu_tmp0, pc);
935 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
936}
937
938#ifdef VBOX
939DECLINLINE(void) gen_update_eip(target_ulong pc)
940{
941 gen_jmp_im(pc);
942#ifdef VBOX_DUMP_STATE
943 tcg_gen_helper_0_0(helper_dump_state);
944#endif
945}
946
947#endif
948
949#ifndef VBOX
950static inline void gen_string_movl_A0_ESI(DisasContext *s)
951#else /* VBOX */
952DECLINLINE(void) gen_string_movl_A0_ESI(DisasContext *s)
953#endif /* VBOX */
954{
955 int override;
956
957 override = s->override;
958#ifdef TARGET_X86_64
959 if (s->aflag == 2) {
960 if (override >= 0) {
961 gen_op_movq_A0_seg(override);
962 gen_op_addq_A0_reg_sN(0, R_ESI);
963 } else {
964 gen_op_movq_A0_reg(R_ESI);
965 }
966 } else
967#endif
968 if (s->aflag) {
969 /* 32 bit address */
970 if (s->addseg && override < 0)
971 override = R_DS;
972 if (override >= 0) {
973 gen_op_movl_A0_seg(override);
974 gen_op_addl_A0_reg_sN(0, R_ESI);
975 } else {
976 gen_op_movl_A0_reg(R_ESI);
977 }
978 } else {
979 /* 16 address, always override */
980 if (override < 0)
981 override = R_DS;
982 gen_op_movl_A0_reg(R_ESI);
983 gen_op_andl_A0_ffff();
984 gen_op_addl_A0_seg(override);
985 }
986}
987
988#ifndef VBOX
989static inline void gen_string_movl_A0_EDI(DisasContext *s)
990#else /* VBOX */
991DECLINLINE(void) gen_string_movl_A0_EDI(DisasContext *s)
992#endif /* VBOX */
993{
994#ifdef TARGET_X86_64
995 if (s->aflag == 2) {
996 gen_op_movq_A0_reg(R_EDI);
997 } else
998#endif
999 if (s->aflag) {
1000 if (s->addseg) {
1001 gen_op_movl_A0_seg(R_ES);
1002 gen_op_addl_A0_reg_sN(0, R_EDI);
1003 } else {
1004 gen_op_movl_A0_reg(R_EDI);
1005 }
1006 } else {
1007 gen_op_movl_A0_reg(R_EDI);
1008 gen_op_andl_A0_ffff();
1009 gen_op_addl_A0_seg(R_ES);
1010 }
1011}
1012
1013#ifndef VBOX
1014static inline void gen_op_movl_T0_Dshift(int ot)
1015#else /* VBOX */
1016DECLINLINE(void) gen_op_movl_T0_Dshift(int ot)
1017#endif /* VBOX */
1018{
1019 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
1020 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
1021};
1022
1023static void gen_extu(int ot, TCGv reg)
1024{
1025 switch(ot) {
1026 case OT_BYTE:
1027 tcg_gen_ext8u_tl(reg, reg);
1028 break;
1029 case OT_WORD:
1030 tcg_gen_ext16u_tl(reg, reg);
1031 break;
1032 case OT_LONG:
1033 tcg_gen_ext32u_tl(reg, reg);
1034 break;
1035 default:
1036 break;
1037 }
1038}
1039
1040static void gen_exts(int ot, TCGv reg)
1041{
1042 switch(ot) {
1043 case OT_BYTE:
1044 tcg_gen_ext8s_tl(reg, reg);
1045 break;
1046 case OT_WORD:
1047 tcg_gen_ext16s_tl(reg, reg);
1048 break;
1049 case OT_LONG:
1050 tcg_gen_ext32s_tl(reg, reg);
1051 break;
1052 default:
1053 break;
1054 }
1055}
1056
1057#ifndef VBOX
1058static inline void gen_op_jnz_ecx(int size, int label1)
1059#else /* VBOX */
1060DECLINLINE(void) gen_op_jnz_ecx(int size, int label1)
1061#endif /* VBOX */
1062{
1063 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1064 gen_extu(size + 1, cpu_tmp0);
1065 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
1066}
1067
1068#ifndef VBOX
1069static inline void gen_op_jz_ecx(int size, int label1)
1070#else /* VBOX */
1071DECLINLINE(void) gen_op_jz_ecx(int size, int label1)
1072#endif /* VBOX */
1073{
1074 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1075 gen_extu(size + 1, cpu_tmp0);
1076 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
1077}
1078
1079static void *helper_in_func[3] = {
1080 helper_inb,
1081 helper_inw,
1082 helper_inl,
1083};
1084
1085static void *helper_out_func[3] = {
1086 helper_outb,
1087 helper_outw,
1088 helper_outl,
1089};
1090
1091static void *gen_check_io_func[3] = {
1092 helper_check_iob,
1093 helper_check_iow,
1094 helper_check_iol,
1095};
1096
1097static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
1098 uint32_t svm_flags)
1099{
1100 int state_saved;
1101 target_ulong next_eip;
1102
1103 state_saved = 0;
1104 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1105 if (s->cc_op != CC_OP_DYNAMIC)
1106 gen_op_set_cc_op(s->cc_op);
1107 gen_jmp_im(cur_eip);
1108 state_saved = 1;
1109 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1110 tcg_gen_helper_0_1(gen_check_io_func[ot],
1111 cpu_tmp2_i32);
1112 }
1113 if(s->flags & HF_SVMI_MASK) {
1114 if (!state_saved) {
1115 if (s->cc_op != CC_OP_DYNAMIC)
1116 gen_op_set_cc_op(s->cc_op);
1117 gen_jmp_im(cur_eip);
1118 state_saved = 1;
1119 }
1120 svm_flags |= (1 << (4 + ot));
1121 next_eip = s->pc - s->cs_base;
1122 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1123 tcg_gen_helper_0_3(helper_svm_check_io,
1124 cpu_tmp2_i32,
1125 tcg_const_i32(svm_flags),
1126 tcg_const_i32(next_eip - cur_eip));
1127 }
1128}
1129
1130#ifndef VBOX
1131static inline void gen_movs(DisasContext *s, int ot)
1132#else /* VBOX */
1133DECLINLINE(void) gen_movs(DisasContext *s, int ot)
1134#endif /* VBOX */
1135{
1136 gen_string_movl_A0_ESI(s);
1137 gen_op_ld_T0_A0(ot + s->mem_index);
1138 gen_string_movl_A0_EDI(s);
1139 gen_op_st_T0_A0(ot + s->mem_index);
1140 gen_op_movl_T0_Dshift(ot);
1141 gen_op_add_reg_T0(s->aflag, R_ESI);
1142 gen_op_add_reg_T0(s->aflag, R_EDI);
1143}
1144
1145#ifndef VBOX
1146static inline void gen_update_cc_op(DisasContext *s)
1147#else /* VBOX */
1148DECLINLINE(void) gen_update_cc_op(DisasContext *s)
1149#endif /* VBOX */
1150{
1151 if (s->cc_op != CC_OP_DYNAMIC) {
1152 gen_op_set_cc_op(s->cc_op);
1153 s->cc_op = CC_OP_DYNAMIC;
1154 }
1155}
1156
1157static void gen_op_update1_cc(void)
1158{
1159 tcg_gen_discard_tl(cpu_cc_src);
1160 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1161}
1162
1163static void gen_op_update2_cc(void)
1164{
1165 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1166 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1167}
1168
1169#ifndef VBOX
1170static inline void gen_op_cmpl_T0_T1_cc(void)
1171#else /* VBOX */
1172DECLINLINE(void) gen_op_cmpl_T0_T1_cc(void)
1173#endif /* VBOX */
1174{
1175 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1176 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1177}
1178
1179#ifndef VBOX
1180static inline void gen_op_testl_T0_T1_cc(void)
1181#else /* VBOX */
1182DECLINLINE(void) gen_op_testl_T0_T1_cc(void)
1183#endif /* VBOX */
1184{
1185 tcg_gen_discard_tl(cpu_cc_src);
1186 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1187}
1188
1189static void gen_op_update_neg_cc(void)
1190{
1191 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
1192 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1193}
1194
1195/* compute eflags.C to reg */
1196static void gen_compute_eflags_c(TCGv reg)
1197{
1198#if TCG_TARGET_REG_BITS == 32
1199 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1200 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1201 (long)cc_table + offsetof(CCTable, compute_c));
1202 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1203 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1204 1, &cpu_tmp2_i32, 0, NULL);
1205#else
1206 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1207 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1208 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1209 (long)cc_table + offsetof(CCTable, compute_c));
1210 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1211 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1212 1, &cpu_tmp2_i32, 0, NULL);
1213#endif
1214 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1215}
1216
1217/* compute all eflags to cc_src */
1218static void gen_compute_eflags(TCGv reg)
1219{
1220#if TCG_TARGET_REG_BITS == 32
1221 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1222 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1223 (long)cc_table + offsetof(CCTable, compute_all));
1224 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1225 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1226 1, &cpu_tmp2_i32, 0, NULL);
1227#else
1228 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1229 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1230 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1231 (long)cc_table + offsetof(CCTable, compute_all));
1232 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1233 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1234 1, &cpu_tmp2_i32, 0, NULL);
1235#endif
1236 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1237}
1238
1239#ifndef VBOX
1240static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1241#else /* VBOX */
1242DECLINLINE(void) gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1243#endif /* VBOX */
1244{
1245 if (s->cc_op != CC_OP_DYNAMIC)
1246 gen_op_set_cc_op(s->cc_op);
1247 switch(jcc_op) {
1248 case JCC_O:
1249 gen_compute_eflags(cpu_T[0]);
1250 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
1251 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1252 break;
1253 case JCC_B:
1254 gen_compute_eflags_c(cpu_T[0]);
1255 break;
1256 case JCC_Z:
1257 gen_compute_eflags(cpu_T[0]);
1258 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
1259 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1260 break;
1261 case JCC_BE:
1262 gen_compute_eflags(cpu_tmp0);
1263 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
1264 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1265 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1266 break;
1267 case JCC_S:
1268 gen_compute_eflags(cpu_T[0]);
1269 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
1270 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1271 break;
1272 case JCC_P:
1273 gen_compute_eflags(cpu_T[0]);
1274 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
1275 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1276 break;
1277 case JCC_L:
1278 gen_compute_eflags(cpu_tmp0);
1279 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1280 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
1281 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1282 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1283 break;
1284 default:
1285 case JCC_LE:
1286 gen_compute_eflags(cpu_tmp0);
1287 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1288 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
1289 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
1290 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1291 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1292 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1293 break;
1294 }
1295}
1296
1297/* return true if setcc_slow is not needed (WARNING: must be kept in
1298 sync with gen_jcc1) */
1299static int is_fast_jcc_case(DisasContext *s, int b)
1300{
1301 int jcc_op;
1302 jcc_op = (b >> 1) & 7;
1303 switch(s->cc_op) {
1304 /* we optimize the cmp/jcc case */
1305 case CC_OP_SUBB:
1306 case CC_OP_SUBW:
1307 case CC_OP_SUBL:
1308 case CC_OP_SUBQ:
1309 if (jcc_op == JCC_O || jcc_op == JCC_P)
1310 goto slow_jcc;
1311 break;
1312
1313 /* some jumps are easy to compute */
1314 case CC_OP_ADDB:
1315 case CC_OP_ADDW:
1316 case CC_OP_ADDL:
1317 case CC_OP_ADDQ:
1318
1319 case CC_OP_LOGICB:
1320 case CC_OP_LOGICW:
1321 case CC_OP_LOGICL:
1322 case CC_OP_LOGICQ:
1323
1324 case CC_OP_INCB:
1325 case CC_OP_INCW:
1326 case CC_OP_INCL:
1327 case CC_OP_INCQ:
1328
1329 case CC_OP_DECB:
1330 case CC_OP_DECW:
1331 case CC_OP_DECL:
1332 case CC_OP_DECQ:
1333
1334 case CC_OP_SHLB:
1335 case CC_OP_SHLW:
1336 case CC_OP_SHLL:
1337 case CC_OP_SHLQ:
1338 if (jcc_op != JCC_Z && jcc_op != JCC_S)
1339 goto slow_jcc;
1340 break;
1341 default:
1342 slow_jcc:
1343 return 0;
1344 }
1345 return 1;
1346}
1347
1348/* generate a conditional jump to label 'l1' according to jump opcode
1349 value 'b'. In the fast case, T0 is guaranteed not to be used. */
1350#ifndef VBOX
1351static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1352#else /* VBOX */
1353DECLINLINE(void) gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1354#endif /* VBOX */
1355{
1356 int inv, jcc_op, size, cond;
1357 TCGv t0;
1358
1359 inv = b & 1;
1360 jcc_op = (b >> 1) & 7;
1361
1362 switch(cc_op) {
1363 /* we optimize the cmp/jcc case */
1364 case CC_OP_SUBB:
1365 case CC_OP_SUBW:
1366 case CC_OP_SUBL:
1367 case CC_OP_SUBQ:
1368
1369 size = cc_op - CC_OP_SUBB;
1370 switch(jcc_op) {
1371 case JCC_Z:
1372 fast_jcc_z:
1373 switch(size) {
1374 case 0:
1375 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
1376 t0 = cpu_tmp0;
1377 break;
1378 case 1:
1379 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
1380 t0 = cpu_tmp0;
1381 break;
1382#ifdef TARGET_X86_64
1383 case 2:
1384 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
1385 t0 = cpu_tmp0;
1386 break;
1387#endif
1388 default:
1389 t0 = cpu_cc_dst;
1390 break;
1391 }
1392 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
1393 break;
1394 case JCC_S:
1395 fast_jcc_s:
1396 switch(size) {
1397 case 0:
1398 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1399 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1400 0, l1);
1401 break;
1402 case 1:
1403 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1404 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1405 0, l1);
1406 break;
1407#ifdef TARGET_X86_64
1408 case 2:
1409 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1410 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1411 0, l1);
1412 break;
1413#endif
1414 default:
1415 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1416 0, l1);
1417 break;
1418 }
1419 break;
1420
1421 case JCC_B:
1422 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1423 goto fast_jcc_b;
1424 case JCC_BE:
1425 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1426 fast_jcc_b:
1427 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1428 switch(size) {
1429 case 0:
1430 t0 = cpu_tmp0;
1431 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1432 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1433 break;
1434 case 1:
1435 t0 = cpu_tmp0;
1436 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1437 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1438 break;
1439#ifdef TARGET_X86_64
1440 case 2:
1441 t0 = cpu_tmp0;
1442 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1443 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1444 break;
1445#endif
1446 default:
1447 t0 = cpu_cc_src;
1448 break;
1449 }
1450 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1451 break;
1452
1453 case JCC_L:
1454 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1455 goto fast_jcc_l;
1456 case JCC_LE:
1457 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1458 fast_jcc_l:
1459 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1460 switch(size) {
1461 case 0:
1462 t0 = cpu_tmp0;
1463 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1464 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1465 break;
1466 case 1:
1467 t0 = cpu_tmp0;
1468 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1469 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1470 break;
1471#ifdef TARGET_X86_64
1472 case 2:
1473 t0 = cpu_tmp0;
1474 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1475 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1476 break;
1477#endif
1478 default:
1479 t0 = cpu_cc_src;
1480 break;
1481 }
1482 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1483 break;
1484
1485 default:
1486 goto slow_jcc;
1487 }
1488 break;
1489
1490 /* some jumps are easy to compute */
1491 case CC_OP_ADDB:
1492 case CC_OP_ADDW:
1493 case CC_OP_ADDL:
1494 case CC_OP_ADDQ:
1495
1496 case CC_OP_ADCB:
1497 case CC_OP_ADCW:
1498 case CC_OP_ADCL:
1499 case CC_OP_ADCQ:
1500
1501 case CC_OP_SBBB:
1502 case CC_OP_SBBW:
1503 case CC_OP_SBBL:
1504 case CC_OP_SBBQ:
1505
1506 case CC_OP_LOGICB:
1507 case CC_OP_LOGICW:
1508 case CC_OP_LOGICL:
1509 case CC_OP_LOGICQ:
1510
1511 case CC_OP_INCB:
1512 case CC_OP_INCW:
1513 case CC_OP_INCL:
1514 case CC_OP_INCQ:
1515
1516 case CC_OP_DECB:
1517 case CC_OP_DECW:
1518 case CC_OP_DECL:
1519 case CC_OP_DECQ:
1520
1521 case CC_OP_SHLB:
1522 case CC_OP_SHLW:
1523 case CC_OP_SHLL:
1524 case CC_OP_SHLQ:
1525
1526 case CC_OP_SARB:
1527 case CC_OP_SARW:
1528 case CC_OP_SARL:
1529 case CC_OP_SARQ:
1530 switch(jcc_op) {
1531 case JCC_Z:
1532 size = (cc_op - CC_OP_ADDB) & 3;
1533 goto fast_jcc_z;
1534 case JCC_S:
1535 size = (cc_op - CC_OP_ADDB) & 3;
1536 goto fast_jcc_s;
1537 default:
1538 goto slow_jcc;
1539 }
1540 break;
1541 default:
1542 slow_jcc:
1543 gen_setcc_slow_T0(s, jcc_op);
1544 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1545 cpu_T[0], 0, l1);
1546 break;
1547 }
1548}
1549
1550/* XXX: does not work with gdbstub "ice" single step - not a
1551 serious problem */
1552static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1553{
1554 int l1, l2;
1555
1556 l1 = gen_new_label();
1557 l2 = gen_new_label();
1558 gen_op_jnz_ecx(s->aflag, l1);
1559 gen_set_label(l2);
1560 gen_jmp_tb(s, next_eip, 1);
1561 gen_set_label(l1);
1562 return l2;
1563}
1564
1565#ifndef VBOX
1566static inline void gen_stos(DisasContext *s, int ot)
1567#else /* VBOX */
1568DECLINLINE(void) gen_stos(DisasContext *s, int ot)
1569#endif /* VBOX */
1570{
1571 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1572 gen_string_movl_A0_EDI(s);
1573 gen_op_st_T0_A0(ot + s->mem_index);
1574 gen_op_movl_T0_Dshift(ot);
1575 gen_op_add_reg_T0(s->aflag, R_EDI);
1576}
1577
1578#ifndef VBOX
1579static inline void gen_lods(DisasContext *s, int ot)
1580#else /* VBOX */
1581DECLINLINE(void) gen_lods(DisasContext *s, int ot)
1582#endif /* VBOX */
1583{
1584 gen_string_movl_A0_ESI(s);
1585 gen_op_ld_T0_A0(ot + s->mem_index);
1586 gen_op_mov_reg_T0(ot, R_EAX);
1587 gen_op_movl_T0_Dshift(ot);
1588 gen_op_add_reg_T0(s->aflag, R_ESI);
1589}
1590
1591#ifndef VBOX
1592static inline void gen_scas(DisasContext *s, int ot)
1593#else /* VBOX */
1594DECLINLINE(void) gen_scas(DisasContext *s, int ot)
1595#endif /* VBOX */
1596{
1597 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1598 gen_string_movl_A0_EDI(s);
1599 gen_op_ld_T1_A0(ot + s->mem_index);
1600 gen_op_cmpl_T0_T1_cc();
1601 gen_op_movl_T0_Dshift(ot);
1602 gen_op_add_reg_T0(s->aflag, R_EDI);
1603}
1604
1605#ifndef VBOX
1606static inline void gen_cmps(DisasContext *s, int ot)
1607#else /* VBOX */
1608DECLINLINE(void) gen_cmps(DisasContext *s, int ot)
1609#endif /* VBOX */
1610{
1611 gen_string_movl_A0_ESI(s);
1612 gen_op_ld_T0_A0(ot + s->mem_index);
1613 gen_string_movl_A0_EDI(s);
1614 gen_op_ld_T1_A0(ot + s->mem_index);
1615 gen_op_cmpl_T0_T1_cc();
1616 gen_op_movl_T0_Dshift(ot);
1617 gen_op_add_reg_T0(s->aflag, R_ESI);
1618 gen_op_add_reg_T0(s->aflag, R_EDI);
1619}
1620
1621#ifndef VBOX
1622static inline void gen_ins(DisasContext *s, int ot)
1623#else /* VBOX */
1624DECLINLINE(void) gen_ins(DisasContext *s, int ot)
1625#endif /* VBOX */
1626{
1627 if (use_icount)
1628 gen_io_start();
1629 gen_string_movl_A0_EDI(s);
1630 /* Note: we must do this dummy write first to be restartable in
1631 case of page fault. */
1632 gen_op_movl_T0_0();
1633 gen_op_st_T0_A0(ot + s->mem_index);
1634 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1635 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1636 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1637 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1638 gen_op_st_T0_A0(ot + s->mem_index);
1639 gen_op_movl_T0_Dshift(ot);
1640 gen_op_add_reg_T0(s->aflag, R_EDI);
1641 if (use_icount)
1642 gen_io_end();
1643}
1644
1645#ifndef VBOX
1646static inline void gen_outs(DisasContext *s, int ot)
1647#else /* VBOX */
1648DECLINLINE(void) gen_outs(DisasContext *s, int ot)
1649#endif /* VBOX */
1650{
1651 if (use_icount)
1652 gen_io_start();
1653 gen_string_movl_A0_ESI(s);
1654 gen_op_ld_T0_A0(ot + s->mem_index);
1655
1656 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1657 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1658 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1659 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1660 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1661
1662 gen_op_movl_T0_Dshift(ot);
1663 gen_op_add_reg_T0(s->aflag, R_ESI);
1664 if (use_icount)
1665 gen_io_end();
1666}
1667
1668/* same method as Valgrind : we generate jumps to current or next
1669 instruction */
1670#ifndef VBOX
1671#define GEN_REPZ(op) \
1672static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1673 target_ulong cur_eip, target_ulong next_eip) \
1674{ \
1675 int l2; \
1676 gen_update_cc_op(s); \
1677 l2 = gen_jz_ecx_string(s, next_eip); \
1678 gen_ ## op(s, ot); \
1679 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1680 /* a loop would cause two single step exceptions if ECX = 1 \
1681 before rep string_insn */ \
1682 if (!s->jmp_opt) \
1683 gen_op_jz_ecx(s->aflag, l2); \
1684 gen_jmp(s, cur_eip); \
1685}
1686#else /* VBOX */
1687#define GEN_REPZ(op) \
1688DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1689 target_ulong cur_eip, target_ulong next_eip) \
1690{ \
1691 int l2; \
1692 gen_update_cc_op(s); \
1693 l2 = gen_jz_ecx_string(s, next_eip); \
1694 gen_ ## op(s, ot); \
1695 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1696 /* a loop would cause two single step exceptions if ECX = 1 \
1697 before rep string_insn */ \
1698 if (!s->jmp_opt) \
1699 gen_op_jz_ecx(s->aflag, l2); \
1700 gen_jmp(s, cur_eip); \
1701}
1702#endif /* VBOX */
1703
1704#ifndef VBOX
1705#define GEN_REPZ2(op) \
1706static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1707 target_ulong cur_eip, \
1708 target_ulong next_eip, \
1709 int nz) \
1710{ \
1711 int l2; \
1712 gen_update_cc_op(s); \
1713 l2 = gen_jz_ecx_string(s, next_eip); \
1714 gen_ ## op(s, ot); \
1715 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1716 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1717 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1718 if (!s->jmp_opt) \
1719 gen_op_jz_ecx(s->aflag, l2); \
1720 gen_jmp(s, cur_eip); \
1721}
1722#else /* VBOX */
1723#define GEN_REPZ2(op) \
1724DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1725 target_ulong cur_eip, \
1726 target_ulong next_eip, \
1727 int nz) \
1728{ \
1729 int l2;\
1730 gen_update_cc_op(s); \
1731 l2 = gen_jz_ecx_string(s, next_eip); \
1732 gen_ ## op(s, ot); \
1733 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1734 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1735 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1736 if (!s->jmp_opt) \
1737 gen_op_jz_ecx(s->aflag, l2); \
1738 gen_jmp(s, cur_eip); \
1739}
1740#endif /* VBOX */
1741
1742GEN_REPZ(movs)
1743GEN_REPZ(stos)
1744GEN_REPZ(lods)
1745GEN_REPZ(ins)
1746GEN_REPZ(outs)
1747GEN_REPZ2(scas)
1748GEN_REPZ2(cmps)
1749
1750static void *helper_fp_arith_ST0_FT0[8] = {
1751 helper_fadd_ST0_FT0,
1752 helper_fmul_ST0_FT0,
1753 helper_fcom_ST0_FT0,
1754 helper_fcom_ST0_FT0,
1755 helper_fsub_ST0_FT0,
1756 helper_fsubr_ST0_FT0,
1757 helper_fdiv_ST0_FT0,
1758 helper_fdivr_ST0_FT0,
1759};
1760
1761/* NOTE the exception in "r" op ordering */
1762static void *helper_fp_arith_STN_ST0[8] = {
1763 helper_fadd_STN_ST0,
1764 helper_fmul_STN_ST0,
1765 NULL,
1766 NULL,
1767 helper_fsubr_STN_ST0,
1768 helper_fsub_STN_ST0,
1769 helper_fdivr_STN_ST0,
1770 helper_fdiv_STN_ST0,
1771};
1772
1773/* if d == OR_TMP0, it means memory operand (address in A0) */
1774static void gen_op(DisasContext *s1, int op, int ot, int d)
1775{
1776 if (d != OR_TMP0) {
1777 gen_op_mov_TN_reg(ot, 0, d);
1778 } else {
1779 gen_op_ld_T0_A0(ot + s1->mem_index);
1780 }
1781 switch(op) {
1782 case OP_ADCL:
1783 if (s1->cc_op != CC_OP_DYNAMIC)
1784 gen_op_set_cc_op(s1->cc_op);
1785 gen_compute_eflags_c(cpu_tmp4);
1786 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1787 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1788 if (d != OR_TMP0)
1789 gen_op_mov_reg_T0(ot, d);
1790 else
1791 gen_op_st_T0_A0(ot + s1->mem_index);
1792 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1793 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1794 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1795 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1796 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1797 s1->cc_op = CC_OP_DYNAMIC;
1798 break;
1799 case OP_SBBL:
1800 if (s1->cc_op != CC_OP_DYNAMIC)
1801 gen_op_set_cc_op(s1->cc_op);
1802 gen_compute_eflags_c(cpu_tmp4);
1803 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1804 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1805 if (d != OR_TMP0)
1806 gen_op_mov_reg_T0(ot, d);
1807 else
1808 gen_op_st_T0_A0(ot + s1->mem_index);
1809 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1810 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1811 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1812 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1813 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1814 s1->cc_op = CC_OP_DYNAMIC;
1815 break;
1816 case OP_ADDL:
1817 gen_op_addl_T0_T1();
1818 if (d != OR_TMP0)
1819 gen_op_mov_reg_T0(ot, d);
1820 else
1821 gen_op_st_T0_A0(ot + s1->mem_index);
1822 gen_op_update2_cc();
1823 s1->cc_op = CC_OP_ADDB + ot;
1824 break;
1825 case OP_SUBL:
1826 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1827 if (d != OR_TMP0)
1828 gen_op_mov_reg_T0(ot, d);
1829 else
1830 gen_op_st_T0_A0(ot + s1->mem_index);
1831 gen_op_update2_cc();
1832 s1->cc_op = CC_OP_SUBB + ot;
1833 break;
1834 default:
1835 case OP_ANDL:
1836 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1837 if (d != OR_TMP0)
1838 gen_op_mov_reg_T0(ot, d);
1839 else
1840 gen_op_st_T0_A0(ot + s1->mem_index);
1841 gen_op_update1_cc();
1842 s1->cc_op = CC_OP_LOGICB + ot;
1843 break;
1844 case OP_ORL:
1845 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1846 if (d != OR_TMP0)
1847 gen_op_mov_reg_T0(ot, d);
1848 else
1849 gen_op_st_T0_A0(ot + s1->mem_index);
1850 gen_op_update1_cc();
1851 s1->cc_op = CC_OP_LOGICB + ot;
1852 break;
1853 case OP_XORL:
1854 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1855 if (d != OR_TMP0)
1856 gen_op_mov_reg_T0(ot, d);
1857 else
1858 gen_op_st_T0_A0(ot + s1->mem_index);
1859 gen_op_update1_cc();
1860 s1->cc_op = CC_OP_LOGICB + ot;
1861 break;
1862 case OP_CMPL:
1863 gen_op_cmpl_T0_T1_cc();
1864 s1->cc_op = CC_OP_SUBB + ot;
1865 break;
1866 }
1867}
1868
1869/* if d == OR_TMP0, it means memory operand (address in A0) */
1870static void gen_inc(DisasContext *s1, int ot, int d, int c)
1871{
1872 if (d != OR_TMP0)
1873 gen_op_mov_TN_reg(ot, 0, d);
1874 else
1875 gen_op_ld_T0_A0(ot + s1->mem_index);
1876 if (s1->cc_op != CC_OP_DYNAMIC)
1877 gen_op_set_cc_op(s1->cc_op);
1878 if (c > 0) {
1879 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1880 s1->cc_op = CC_OP_INCB + ot;
1881 } else {
1882 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1883 s1->cc_op = CC_OP_DECB + ot;
1884 }
1885 if (d != OR_TMP0)
1886 gen_op_mov_reg_T0(ot, d);
1887 else
1888 gen_op_st_T0_A0(ot + s1->mem_index);
1889 gen_compute_eflags_c(cpu_cc_src);
1890 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1891}
1892
1893static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1894 int is_right, int is_arith)
1895{
1896 target_ulong mask;
1897 int shift_label;
1898 TCGv t0, t1;
1899
1900 if (ot == OT_QUAD)
1901 mask = 0x3f;
1902 else
1903 mask = 0x1f;
1904
1905 /* load */
1906 if (op1 == OR_TMP0)
1907 gen_op_ld_T0_A0(ot + s->mem_index);
1908 else
1909 gen_op_mov_TN_reg(ot, 0, op1);
1910
1911 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1912
1913 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1914
1915 if (is_right) {
1916 if (is_arith) {
1917 gen_exts(ot, cpu_T[0]);
1918 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1919 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1920 } else {
1921 gen_extu(ot, cpu_T[0]);
1922 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1923 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1924 }
1925 } else {
1926 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1927 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1928 }
1929
1930 /* store */
1931 if (op1 == OR_TMP0)
1932 gen_op_st_T0_A0(ot + s->mem_index);
1933 else
1934 gen_op_mov_reg_T0(ot, op1);
1935
1936 /* update eflags if non zero shift */
1937 if (s->cc_op != CC_OP_DYNAMIC)
1938 gen_op_set_cc_op(s->cc_op);
1939
1940 /* XXX: inefficient */
1941 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1942 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1943
1944 tcg_gen_mov_tl(t0, cpu_T[0]);
1945 tcg_gen_mov_tl(t1, cpu_T3);
1946
1947 shift_label = gen_new_label();
1948 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1949
1950 tcg_gen_mov_tl(cpu_cc_src, t1);
1951 tcg_gen_mov_tl(cpu_cc_dst, t0);
1952 if (is_right)
1953 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1954 else
1955 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1956
1957 gen_set_label(shift_label);
1958 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1959
1960 tcg_temp_free(t0);
1961 tcg_temp_free(t1);
1962}
1963
1964static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1965 int is_right, int is_arith)
1966{
1967 int mask;
1968
1969 if (ot == OT_QUAD)
1970 mask = 0x3f;
1971 else
1972 mask = 0x1f;
1973
1974 /* load */
1975 if (op1 == OR_TMP0)
1976 gen_op_ld_T0_A0(ot + s->mem_index);
1977 else
1978 gen_op_mov_TN_reg(ot, 0, op1);
1979
1980 op2 &= mask;
1981 if (op2 != 0) {
1982 if (is_right) {
1983 if (is_arith) {
1984 gen_exts(ot, cpu_T[0]);
1985 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1986 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1987 } else {
1988 gen_extu(ot, cpu_T[0]);
1989 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1990 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1991 }
1992 } else {
1993 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1994 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1995 }
1996 }
1997
1998 /* store */
1999 if (op1 == OR_TMP0)
2000 gen_op_st_T0_A0(ot + s->mem_index);
2001 else
2002 gen_op_mov_reg_T0(ot, op1);
2003
2004 /* update eflags if non zero shift */
2005 if (op2 != 0) {
2006 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
2007 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
2008 if (is_right)
2009 s->cc_op = CC_OP_SARB + ot;
2010 else
2011 s->cc_op = CC_OP_SHLB + ot;
2012 }
2013}
2014
2015#ifndef VBOX
2016static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
2017#else /* VBOX */
2018DECLINLINE(void) tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
2019#endif /* VBOX */
2020{
2021 if (arg2 >= 0)
2022 tcg_gen_shli_tl(ret, arg1, arg2);
2023 else
2024 tcg_gen_shri_tl(ret, arg1, -arg2);
2025}
2026
2027/* XXX: add faster immediate case */
2028static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
2029 int is_right)
2030{
2031 target_ulong mask;
2032 int label1, label2, data_bits;
2033 TCGv t0, t1, t2, a0;
2034
2035 /* XXX: inefficient, but we must use local temps */
2036 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2037 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2038 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2039 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2040
2041 if (ot == OT_QUAD)
2042 mask = 0x3f;
2043 else
2044 mask = 0x1f;
2045
2046 /* load */
2047 if (op1 == OR_TMP0) {
2048 tcg_gen_mov_tl(a0, cpu_A0);
2049 gen_op_ld_v(ot + s->mem_index, t0, a0);
2050 } else {
2051 gen_op_mov_v_reg(ot, t0, op1);
2052 }
2053
2054 tcg_gen_mov_tl(t1, cpu_T[1]);
2055
2056 tcg_gen_andi_tl(t1, t1, mask);
2057
2058 /* Must test zero case to avoid using undefined behaviour in TCG
2059 shifts. */
2060 label1 = gen_new_label();
2061 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
2062
2063 if (ot <= OT_WORD)
2064 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
2065 else
2066 tcg_gen_mov_tl(cpu_tmp0, t1);
2067
2068 gen_extu(ot, t0);
2069 tcg_gen_mov_tl(t2, t0);
2070
2071 data_bits = 8 << ot;
2072 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
2073 fix TCG definition) */
2074 if (is_right) {
2075 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
2076 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2077 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
2078 } else {
2079 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
2080 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2081 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
2082 }
2083 tcg_gen_or_tl(t0, t0, cpu_tmp4);
2084
2085 gen_set_label(label1);
2086 /* store */
2087 if (op1 == OR_TMP0) {
2088 gen_op_st_v(ot + s->mem_index, t0, a0);
2089 } else {
2090 gen_op_mov_reg_v(ot, op1, t0);
2091 }
2092
2093 /* update eflags */
2094 if (s->cc_op != CC_OP_DYNAMIC)
2095 gen_op_set_cc_op(s->cc_op);
2096
2097 label2 = gen_new_label();
2098 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
2099
2100 gen_compute_eflags(cpu_cc_src);
2101 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
2102 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
2103 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
2104 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
2105 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
2106 if (is_right) {
2107 tcg_gen_shri_tl(t0, t0, data_bits - 1);
2108 }
2109 tcg_gen_andi_tl(t0, t0, CC_C);
2110 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
2111
2112 tcg_gen_discard_tl(cpu_cc_dst);
2113 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2114
2115 gen_set_label(label2);
2116 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2117
2118 tcg_temp_free(t0);
2119 tcg_temp_free(t1);
2120 tcg_temp_free(t2);
2121 tcg_temp_free(a0);
2122}
2123
2124static void *helper_rotc[8] = {
2125 helper_rclb,
2126 helper_rclw,
2127 helper_rcll,
2128 X86_64_ONLY(helper_rclq),
2129 helper_rcrb,
2130 helper_rcrw,
2131 helper_rcrl,
2132 X86_64_ONLY(helper_rcrq),
2133};
2134
2135/* XXX: add faster immediate = 1 case */
2136static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
2137 int is_right)
2138{
2139 int label1;
2140
2141 if (s->cc_op != CC_OP_DYNAMIC)
2142 gen_op_set_cc_op(s->cc_op);
2143
2144 /* load */
2145 if (op1 == OR_TMP0)
2146 gen_op_ld_T0_A0(ot + s->mem_index);
2147 else
2148 gen_op_mov_TN_reg(ot, 0, op1);
2149
2150 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
2151 cpu_T[0], cpu_T[0], cpu_T[1]);
2152 /* store */
2153 if (op1 == OR_TMP0)
2154 gen_op_st_T0_A0(ot + s->mem_index);
2155 else
2156 gen_op_mov_reg_T0(ot, op1);
2157
2158 /* update eflags */
2159 label1 = gen_new_label();
2160 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
2161
2162 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
2163 tcg_gen_discard_tl(cpu_cc_dst);
2164 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2165
2166 gen_set_label(label1);
2167 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2168}
2169
2170/* XXX: add faster immediate case */
2171static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
2172 int is_right)
2173{
2174 int label1, label2, data_bits;
2175 target_ulong mask;
2176 TCGv t0, t1, t2, a0;
2177
2178 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2179 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2180 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2181 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2182
2183 if (ot == OT_QUAD)
2184 mask = 0x3f;
2185 else
2186 mask = 0x1f;
2187
2188 /* load */
2189 if (op1 == OR_TMP0) {
2190 tcg_gen_mov_tl(a0, cpu_A0);
2191 gen_op_ld_v(ot + s->mem_index, t0, a0);
2192 } else {
2193 gen_op_mov_v_reg(ot, t0, op1);
2194 }
2195
2196 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
2197
2198 tcg_gen_mov_tl(t1, cpu_T[1]);
2199 tcg_gen_mov_tl(t2, cpu_T3);
2200
2201 /* Must test zero case to avoid using undefined behaviour in TCG
2202 shifts. */
2203 label1 = gen_new_label();
2204 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
2205
2206 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
2207 if (ot == OT_WORD) {
2208 /* Note: we implement the Intel behaviour for shift count > 16 */
2209 if (is_right) {
2210 tcg_gen_andi_tl(t0, t0, 0xffff);
2211 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
2212 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2213 tcg_gen_ext32u_tl(t0, t0);
2214
2215 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2216
2217 /* only needed if count > 16, but a test would complicate */
2218 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2219 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
2220
2221 tcg_gen_shr_tl(t0, t0, t2);
2222
2223 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2224 } else {
2225 /* XXX: not optimal */
2226 tcg_gen_andi_tl(t0, t0, 0xffff);
2227 tcg_gen_shli_tl(t1, t1, 16);
2228 tcg_gen_or_tl(t1, t1, t0);
2229 tcg_gen_ext32u_tl(t1, t1);
2230
2231 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2232 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
2233 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
2234 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
2235
2236 tcg_gen_shl_tl(t0, t0, t2);
2237 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2238 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2239 tcg_gen_or_tl(t0, t0, t1);
2240 }
2241 } else {
2242 data_bits = 8 << ot;
2243 if (is_right) {
2244 if (ot == OT_LONG)
2245 tcg_gen_ext32u_tl(t0, t0);
2246
2247 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2248
2249 tcg_gen_shr_tl(t0, t0, t2);
2250 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2251 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
2252 tcg_gen_or_tl(t0, t0, t1);
2253
2254 } else {
2255 if (ot == OT_LONG)
2256 tcg_gen_ext32u_tl(t1, t1);
2257
2258 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2259
2260 tcg_gen_shl_tl(t0, t0, t2);
2261 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2262 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2263 tcg_gen_or_tl(t0, t0, t1);
2264 }
2265 }
2266 tcg_gen_mov_tl(t1, cpu_tmp4);
2267
2268 gen_set_label(label1);
2269 /* store */
2270 if (op1 == OR_TMP0) {
2271 gen_op_st_v(ot + s->mem_index, t0, a0);
2272 } else {
2273 gen_op_mov_reg_v(ot, op1, t0);
2274 }
2275
2276 /* update eflags */
2277 if (s->cc_op != CC_OP_DYNAMIC)
2278 gen_op_set_cc_op(s->cc_op);
2279
2280 label2 = gen_new_label();
2281 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
2282
2283 tcg_gen_mov_tl(cpu_cc_src, t1);
2284 tcg_gen_mov_tl(cpu_cc_dst, t0);
2285 if (is_right) {
2286 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
2287 } else {
2288 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
2289 }
2290 gen_set_label(label2);
2291 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2292
2293 tcg_temp_free(t0);
2294 tcg_temp_free(t1);
2295 tcg_temp_free(t2);
2296 tcg_temp_free(a0);
2297}
2298
2299static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
2300{
2301 if (s != OR_TMP1)
2302 gen_op_mov_TN_reg(ot, 1, s);
2303 switch(op) {
2304 case OP_ROL:
2305 gen_rot_rm_T1(s1, ot, d, 0);
2306 break;
2307 case OP_ROR:
2308 gen_rot_rm_T1(s1, ot, d, 1);
2309 break;
2310 case OP_SHL:
2311 case OP_SHL1:
2312 gen_shift_rm_T1(s1, ot, d, 0, 0);
2313 break;
2314 case OP_SHR:
2315 gen_shift_rm_T1(s1, ot, d, 1, 0);
2316 break;
2317 case OP_SAR:
2318 gen_shift_rm_T1(s1, ot, d, 1, 1);
2319 break;
2320 case OP_RCL:
2321 gen_rotc_rm_T1(s1, ot, d, 0);
2322 break;
2323 case OP_RCR:
2324 gen_rotc_rm_T1(s1, ot, d, 1);
2325 break;
2326 }
2327}
2328
2329static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
2330{
2331 switch(op) {
2332 case OP_SHL:
2333 case OP_SHL1:
2334 gen_shift_rm_im(s1, ot, d, c, 0, 0);
2335 break;
2336 case OP_SHR:
2337 gen_shift_rm_im(s1, ot, d, c, 1, 0);
2338 break;
2339 case OP_SAR:
2340 gen_shift_rm_im(s1, ot, d, c, 1, 1);
2341 break;
2342 default:
2343 /* currently not optimized */
2344 gen_op_movl_T1_im(c);
2345 gen_shift(s1, op, ot, d, OR_TMP1);
2346 break;
2347 }
2348}
2349
2350static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
2351{
2352 target_long disp;
2353 int havesib;
2354 int base;
2355 int index;
2356 int scale;
2357 int opreg;
2358 int mod, rm, code, override, must_add_seg;
2359
2360 override = s->override;
2361 must_add_seg = s->addseg;
2362 if (override >= 0)
2363 must_add_seg = 1;
2364 mod = (modrm >> 6) & 3;
2365 rm = modrm & 7;
2366
2367 if (s->aflag) {
2368
2369 havesib = 0;
2370 base = rm;
2371 index = 0;
2372 scale = 0;
2373
2374 if (base == 4) {
2375 havesib = 1;
2376 code = ldub_code(s->pc++);
2377 scale = (code >> 6) & 3;
2378 index = ((code >> 3) & 7) | REX_X(s);
2379 base = (code & 7);
2380 }
2381 base |= REX_B(s);
2382
2383 switch (mod) {
2384 case 0:
2385 if ((base & 7) == 5) {
2386 base = -1;
2387 disp = (int32_t)ldl_code(s->pc);
2388 s->pc += 4;
2389 if (CODE64(s) && !havesib) {
2390 disp += s->pc + s->rip_offset;
2391 }
2392 } else {
2393 disp = 0;
2394 }
2395 break;
2396 case 1:
2397 disp = (int8_t)ldub_code(s->pc++);
2398 break;
2399 default:
2400 case 2:
2401#ifdef VBOX
2402 disp = (int32_t)ldl_code(s->pc);
2403#else
2404 disp = ldl_code(s->pc);
2405#endif
2406 s->pc += 4;
2407 break;
2408 }
2409
2410 if (base >= 0) {
2411 /* for correct popl handling with esp */
2412 if (base == 4 && s->popl_esp_hack)
2413 disp += s->popl_esp_hack;
2414#ifdef TARGET_X86_64
2415 if (s->aflag == 2) {
2416 gen_op_movq_A0_reg(base);
2417 if (disp != 0) {
2418 gen_op_addq_A0_im(disp);
2419 }
2420 } else
2421#endif
2422 {
2423 gen_op_movl_A0_reg(base);
2424 if (disp != 0)
2425 gen_op_addl_A0_im(disp);
2426 }
2427 } else {
2428#ifdef TARGET_X86_64
2429 if (s->aflag == 2) {
2430 gen_op_movq_A0_im(disp);
2431 } else
2432#endif
2433 {
2434 gen_op_movl_A0_im(disp);
2435 }
2436 }
2437 /* index == 4 means no index */
2438 if (havesib && (index != 4)) {
2439#ifdef TARGET_X86_64
2440 if (s->aflag == 2) {
2441 gen_op_addq_A0_reg_sN(scale, index);
2442 } else
2443#endif
2444 {
2445 gen_op_addl_A0_reg_sN(scale, index);
2446 }
2447 }
2448 if (must_add_seg) {
2449 if (override < 0) {
2450 if (base == R_EBP || base == R_ESP)
2451 override = R_SS;
2452 else
2453 override = R_DS;
2454 }
2455#ifdef TARGET_X86_64
2456 if (s->aflag == 2) {
2457 gen_op_addq_A0_seg(override);
2458 } else
2459#endif
2460 {
2461 gen_op_addl_A0_seg(override);
2462 }
2463 }
2464 } else {
2465 switch (mod) {
2466 case 0:
2467 if (rm == 6) {
2468 disp = lduw_code(s->pc);
2469 s->pc += 2;
2470 gen_op_movl_A0_im(disp);
2471 rm = 0; /* avoid SS override */
2472 goto no_rm;
2473 } else {
2474 disp = 0;
2475 }
2476 break;
2477 case 1:
2478 disp = (int8_t)ldub_code(s->pc++);
2479 break;
2480 default:
2481 case 2:
2482 disp = lduw_code(s->pc);
2483 s->pc += 2;
2484 break;
2485 }
2486 switch(rm) {
2487 case 0:
2488 gen_op_movl_A0_reg(R_EBX);
2489 gen_op_addl_A0_reg_sN(0, R_ESI);
2490 break;
2491 case 1:
2492 gen_op_movl_A0_reg(R_EBX);
2493 gen_op_addl_A0_reg_sN(0, R_EDI);
2494 break;
2495 case 2:
2496 gen_op_movl_A0_reg(R_EBP);
2497 gen_op_addl_A0_reg_sN(0, R_ESI);
2498 break;
2499 case 3:
2500 gen_op_movl_A0_reg(R_EBP);
2501 gen_op_addl_A0_reg_sN(0, R_EDI);
2502 break;
2503 case 4:
2504 gen_op_movl_A0_reg(R_ESI);
2505 break;
2506 case 5:
2507 gen_op_movl_A0_reg(R_EDI);
2508 break;
2509 case 6:
2510 gen_op_movl_A0_reg(R_EBP);
2511 break;
2512 default:
2513 case 7:
2514 gen_op_movl_A0_reg(R_EBX);
2515 break;
2516 }
2517 if (disp != 0)
2518 gen_op_addl_A0_im(disp);
2519 gen_op_andl_A0_ffff();
2520 no_rm:
2521 if (must_add_seg) {
2522 if (override < 0) {
2523 if (rm == 2 || rm == 3 || rm == 6)
2524 override = R_SS;
2525 else
2526 override = R_DS;
2527 }
2528 gen_op_addl_A0_seg(override);
2529 }
2530 }
2531
2532 opreg = OR_A0;
2533 disp = 0;
2534 *reg_ptr = opreg;
2535 *offset_ptr = disp;
2536}
2537
2538static void gen_nop_modrm(DisasContext *s, int modrm)
2539{
2540 int mod, rm, base, code;
2541
2542 mod = (modrm >> 6) & 3;
2543 if (mod == 3)
2544 return;
2545 rm = modrm & 7;
2546
2547 if (s->aflag) {
2548
2549 base = rm;
2550
2551 if (base == 4) {
2552 code = ldub_code(s->pc++);
2553 base = (code & 7);
2554 }
2555
2556 switch (mod) {
2557 case 0:
2558 if (base == 5) {
2559 s->pc += 4;
2560 }
2561 break;
2562 case 1:
2563 s->pc++;
2564 break;
2565 default:
2566 case 2:
2567 s->pc += 4;
2568 break;
2569 }
2570 } else {
2571 switch (mod) {
2572 case 0:
2573 if (rm == 6) {
2574 s->pc += 2;
2575 }
2576 break;
2577 case 1:
2578 s->pc++;
2579 break;
2580 default:
2581 case 2:
2582 s->pc += 2;
2583 break;
2584 }
2585 }
2586}
2587
2588/* used for LEA and MOV AX, mem */
2589static void gen_add_A0_ds_seg(DisasContext *s)
2590{
2591 int override, must_add_seg;
2592 must_add_seg = s->addseg;
2593 override = R_DS;
2594 if (s->override >= 0) {
2595 override = s->override;
2596 must_add_seg = 1;
2597 } else {
2598 override = R_DS;
2599 }
2600 if (must_add_seg) {
2601#ifdef TARGET_X86_64
2602 if (CODE64(s)) {
2603 gen_op_addq_A0_seg(override);
2604 } else
2605#endif
2606 {
2607 gen_op_addl_A0_seg(override);
2608 }
2609 }
2610}
2611
2612/* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2613 OR_TMP0 */
2614static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2615{
2616 int mod, rm, opreg, disp;
2617
2618 mod = (modrm >> 6) & 3;
2619 rm = (modrm & 7) | REX_B(s);
2620 if (mod == 3) {
2621 if (is_store) {
2622 if (reg != OR_TMP0)
2623 gen_op_mov_TN_reg(ot, 0, reg);
2624 gen_op_mov_reg_T0(ot, rm);
2625 } else {
2626 gen_op_mov_TN_reg(ot, 0, rm);
2627 if (reg != OR_TMP0)
2628 gen_op_mov_reg_T0(ot, reg);
2629 }
2630 } else {
2631 gen_lea_modrm(s, modrm, &opreg, &disp);
2632 if (is_store) {
2633 if (reg != OR_TMP0)
2634 gen_op_mov_TN_reg(ot, 0, reg);
2635 gen_op_st_T0_A0(ot + s->mem_index);
2636 } else {
2637 gen_op_ld_T0_A0(ot + s->mem_index);
2638 if (reg != OR_TMP0)
2639 gen_op_mov_reg_T0(ot, reg);
2640 }
2641 }
2642}
2643
2644#ifndef VBOX
2645static inline uint32_t insn_get(DisasContext *s, int ot)
2646#else /* VBOX */
2647DECLINLINE(uint32_t) insn_get(DisasContext *s, int ot)
2648#endif /* VBOX */
2649{
2650 uint32_t ret;
2651
2652 switch(ot) {
2653 case OT_BYTE:
2654 ret = ldub_code(s->pc);
2655 s->pc++;
2656 break;
2657 case OT_WORD:
2658 ret = lduw_code(s->pc);
2659 s->pc += 2;
2660 break;
2661 default:
2662 case OT_LONG:
2663 ret = ldl_code(s->pc);
2664 s->pc += 4;
2665 break;
2666 }
2667 return ret;
2668}
2669
2670#ifndef VBOX
2671static inline int insn_const_size(unsigned int ot)
2672#else /* VBOX */
2673DECLINLINE(int) insn_const_size(unsigned int ot)
2674#endif /* VBOX */
2675{
2676 if (ot <= OT_LONG)
2677 return 1 << ot;
2678 else
2679 return 4;
2680}
2681
2682#ifndef VBOX
2683static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2684#else /* VBOX */
2685DECLINLINE(void) gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2686#endif /* VBOX */
2687{
2688 TranslationBlock *tb;
2689 target_ulong pc;
2690
2691 pc = s->cs_base + eip;
2692 tb = s->tb;
2693 /* NOTE: we handle the case where the TB spans two pages here */
2694 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2695 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2696#ifdef VBOX
2697 gen_check_external_event(s);
2698#endif /* VBOX */
2699 /* jump to same page: we can use a direct jump */
2700 tcg_gen_goto_tb(tb_num);
2701 gen_jmp_im(eip);
2702 tcg_gen_exit_tb((long)tb + tb_num);
2703 } else {
2704 /* jump to another page: currently not optimized */
2705 gen_jmp_im(eip);
2706 gen_eob(s);
2707 }
2708}
2709
2710#ifndef VBOX
2711static inline void gen_jcc(DisasContext *s, int b,
2712#else /* VBOX */
2713DECLINLINE(void) gen_jcc(DisasContext *s, int b,
2714#endif /* VBOX */
2715 target_ulong val, target_ulong next_eip)
2716{
2717 int l1, l2, cc_op;
2718
2719 cc_op = s->cc_op;
2720 if (s->cc_op != CC_OP_DYNAMIC) {
2721 gen_op_set_cc_op(s->cc_op);
2722 s->cc_op = CC_OP_DYNAMIC;
2723 }
2724 if (s->jmp_opt) {
2725 l1 = gen_new_label();
2726 gen_jcc1(s, cc_op, b, l1);
2727
2728 gen_goto_tb(s, 0, next_eip);
2729
2730 gen_set_label(l1);
2731 gen_goto_tb(s, 1, val);
2732 s->is_jmp = 3;
2733 } else {
2734
2735 l1 = gen_new_label();
2736 l2 = gen_new_label();
2737 gen_jcc1(s, cc_op, b, l1);
2738
2739 gen_jmp_im(next_eip);
2740 tcg_gen_br(l2);
2741
2742 gen_set_label(l1);
2743 gen_jmp_im(val);
2744 gen_set_label(l2);
2745 gen_eob(s);
2746 }
2747}
2748
2749static void gen_setcc(DisasContext *s, int b)
2750{
2751 int inv, jcc_op, l1;
2752 TCGv t0;
2753
2754 if (is_fast_jcc_case(s, b)) {
2755 /* nominal case: we use a jump */
2756 /* XXX: make it faster by adding new instructions in TCG */
2757 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2758 tcg_gen_movi_tl(t0, 0);
2759 l1 = gen_new_label();
2760 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2761 tcg_gen_movi_tl(t0, 1);
2762 gen_set_label(l1);
2763 tcg_gen_mov_tl(cpu_T[0], t0);
2764 tcg_temp_free(t0);
2765 } else {
2766 /* slow case: it is more efficient not to generate a jump,
2767 although it is questionable whether this optimization is
2768 worth to */
2769 inv = b & 1;
2770 jcc_op = (b >> 1) & 7;
2771 gen_setcc_slow_T0(s, jcc_op);
2772 if (inv) {
2773 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2774 }
2775 }
2776}
2777
2778#ifndef VBOX
2779static inline void gen_op_movl_T0_seg(int seg_reg)
2780#else /* VBOX */
2781DECLINLINE(void) gen_op_movl_T0_seg(int seg_reg)
2782#endif /* VBOX */
2783{
2784 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2785 offsetof(CPUX86State,segs[seg_reg].selector));
2786}
2787
2788#ifndef VBOX
2789static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2790#else /* VBOX */
2791DECLINLINE(void) gen_op_movl_seg_T0_vm(int seg_reg)
2792#endif /* VBOX */
2793{
2794 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2795 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2796 offsetof(CPUX86State,segs[seg_reg].selector));
2797 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2798 tcg_gen_st_tl(cpu_T[0], cpu_env,
2799 offsetof(CPUX86State,segs[seg_reg].base));
2800#ifdef VBOX
2801 int flags = DESC_P_MASK | DESC_S_MASK | DESC_W_MASK;
2802 if (seg_reg == R_CS)
2803 flags |= DESC_CS_MASK;
2804 gen_op_movl_T0_im(flags);
2805 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].flags));
2806
2807 /* Set the limit to 0xffff. */
2808 gen_op_movl_T0_im(0xffff);
2809 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].limit));
2810#endif
2811}
2812
2813/* move T0 to seg_reg and compute if the CPU state may change. Never
2814 call this function with seg_reg == R_CS */
2815static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2816{
2817 if (s->pe && !s->vm86) {
2818 /* XXX: optimize by finding processor state dynamically */
2819 if (s->cc_op != CC_OP_DYNAMIC)
2820 gen_op_set_cc_op(s->cc_op);
2821 gen_jmp_im(cur_eip);
2822 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2823 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2824 /* abort translation because the addseg value may change or
2825 because ss32 may change. For R_SS, translation must always
2826 stop as a special handling must be done to disable hardware
2827 interrupts for the next instruction */
2828 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2829 s->is_jmp = 3;
2830 } else {
2831 gen_op_movl_seg_T0_vm(seg_reg);
2832 if (seg_reg == R_SS)
2833 s->is_jmp = 3;
2834 }
2835}
2836
2837#ifndef VBOX
2838static inline int svm_is_rep(int prefixes)
2839#else /* VBOX */
2840DECLINLINE(int) svm_is_rep(int prefixes)
2841#endif /* VBOX */
2842{
2843 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2844}
2845
2846#ifndef VBOX
2847static inline void
2848#else /* VBOX */
2849DECLINLINE(void)
2850#endif /* VBOX */
2851gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2852 uint32_t type, uint64_t param)
2853{
2854 /* no SVM activated; fast case */
2855 if (likely(!(s->flags & HF_SVMI_MASK)))
2856 return;
2857 if (s->cc_op != CC_OP_DYNAMIC)
2858 gen_op_set_cc_op(s->cc_op);
2859 gen_jmp_im(pc_start - s->cs_base);
2860 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2861 tcg_const_i32(type), tcg_const_i64(param));
2862}
2863
2864#ifndef VBOX
2865static inline void
2866#else /* VBOX */
2867DECLINLINE(void)
2868#endif
2869gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2870{
2871 gen_svm_check_intercept_param(s, pc_start, type, 0);
2872}
2873
2874#ifndef VBOX
2875static inline void gen_stack_update(DisasContext *s, int addend)
2876#else /* VBOX */
2877DECLINLINE(void) gen_stack_update(DisasContext *s, int addend)
2878#endif /* VBOX */
2879{
2880#ifdef TARGET_X86_64
2881 if (CODE64(s)) {
2882 gen_op_add_reg_im(2, R_ESP, addend);
2883 } else
2884#endif
2885 if (s->ss32) {
2886 gen_op_add_reg_im(1, R_ESP, addend);
2887 } else {
2888 gen_op_add_reg_im(0, R_ESP, addend);
2889 }
2890}
2891
2892/* generate a push. It depends on ss32, addseg and dflag */
2893static void gen_push_T0(DisasContext *s)
2894{
2895#ifdef TARGET_X86_64
2896 if (CODE64(s)) {
2897 gen_op_movq_A0_reg(R_ESP);
2898 if (s->dflag) {
2899 gen_op_addq_A0_im(-8);
2900 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2901 } else {
2902 gen_op_addq_A0_im(-2);
2903 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2904 }
2905 gen_op_mov_reg_A0(2, R_ESP);
2906 } else
2907#endif
2908 {
2909 gen_op_movl_A0_reg(R_ESP);
2910 if (!s->dflag)
2911 gen_op_addl_A0_im(-2);
2912 else
2913 gen_op_addl_A0_im(-4);
2914 if (s->ss32) {
2915 if (s->addseg) {
2916 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2917 gen_op_addl_A0_seg(R_SS);
2918 }
2919 } else {
2920 gen_op_andl_A0_ffff();
2921 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2922 gen_op_addl_A0_seg(R_SS);
2923 }
2924 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2925 if (s->ss32 && !s->addseg)
2926 gen_op_mov_reg_A0(1, R_ESP);
2927 else
2928 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2929 }
2930}
2931
2932/* generate a push. It depends on ss32, addseg and dflag */
2933/* slower version for T1, only used for call Ev */
2934static void gen_push_T1(DisasContext *s)
2935{
2936#ifdef TARGET_X86_64
2937 if (CODE64(s)) {
2938 gen_op_movq_A0_reg(R_ESP);
2939 if (s->dflag) {
2940 gen_op_addq_A0_im(-8);
2941 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2942 } else {
2943 gen_op_addq_A0_im(-2);
2944 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2945 }
2946 gen_op_mov_reg_A0(2, R_ESP);
2947 } else
2948#endif
2949 {
2950 gen_op_movl_A0_reg(R_ESP);
2951 if (!s->dflag)
2952 gen_op_addl_A0_im(-2);
2953 else
2954 gen_op_addl_A0_im(-4);
2955 if (s->ss32) {
2956 if (s->addseg) {
2957 gen_op_addl_A0_seg(R_SS);
2958 }
2959 } else {
2960 gen_op_andl_A0_ffff();
2961 gen_op_addl_A0_seg(R_SS);
2962 }
2963 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2964
2965 if (s->ss32 && !s->addseg)
2966 gen_op_mov_reg_A0(1, R_ESP);
2967 else
2968 gen_stack_update(s, (-2) << s->dflag);
2969 }
2970}
2971
2972/* two step pop is necessary for precise exceptions */
2973static void gen_pop_T0(DisasContext *s)
2974{
2975#ifdef TARGET_X86_64
2976 if (CODE64(s)) {
2977 gen_op_movq_A0_reg(R_ESP);
2978 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2979 } else
2980#endif
2981 {
2982 gen_op_movl_A0_reg(R_ESP);
2983 if (s->ss32) {
2984 if (s->addseg)
2985 gen_op_addl_A0_seg(R_SS);
2986 } else {
2987 gen_op_andl_A0_ffff();
2988 gen_op_addl_A0_seg(R_SS);
2989 }
2990 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2991 }
2992}
2993
2994static void gen_pop_update(DisasContext *s)
2995{
2996#ifdef TARGET_X86_64
2997 if (CODE64(s) && s->dflag) {
2998 gen_stack_update(s, 8);
2999 } else
3000#endif
3001 {
3002 gen_stack_update(s, 2 << s->dflag);
3003 }
3004}
3005
3006static void gen_stack_A0(DisasContext *s)
3007{
3008 gen_op_movl_A0_reg(R_ESP);
3009 if (!s->ss32)
3010 gen_op_andl_A0_ffff();
3011 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3012 if (s->addseg)
3013 gen_op_addl_A0_seg(R_SS);
3014}
3015
3016/* NOTE: wrap around in 16 bit not fully handled */
3017static void gen_pusha(DisasContext *s)
3018{
3019 int i;
3020 gen_op_movl_A0_reg(R_ESP);
3021 gen_op_addl_A0_im(-16 << s->dflag);
3022 if (!s->ss32)
3023 gen_op_andl_A0_ffff();
3024 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3025 if (s->addseg)
3026 gen_op_addl_A0_seg(R_SS);
3027 for(i = 0;i < 8; i++) {
3028 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
3029 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
3030 gen_op_addl_A0_im(2 << s->dflag);
3031 }
3032 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3033}
3034
3035/* NOTE: wrap around in 16 bit not fully handled */
3036static void gen_popa(DisasContext *s)
3037{
3038 int i;
3039 gen_op_movl_A0_reg(R_ESP);
3040 if (!s->ss32)
3041 gen_op_andl_A0_ffff();
3042 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3043 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
3044 if (s->addseg)
3045 gen_op_addl_A0_seg(R_SS);
3046 for(i = 0;i < 8; i++) {
3047 /* ESP is not reloaded */
3048 if (i != 3) {
3049 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
3050 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
3051 }
3052 gen_op_addl_A0_im(2 << s->dflag);
3053 }
3054 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3055}
3056
3057static void gen_enter(DisasContext *s, int esp_addend, int level)
3058{
3059 int ot, opsize;
3060
3061 level &= 0x1f;
3062#ifdef TARGET_X86_64
3063 if (CODE64(s)) {
3064 ot = s->dflag ? OT_QUAD : OT_WORD;
3065 opsize = 1 << ot;
3066
3067 gen_op_movl_A0_reg(R_ESP);
3068 gen_op_addq_A0_im(-opsize);
3069 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3070
3071 /* push bp */
3072 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3073 gen_op_st_T0_A0(ot + s->mem_index);
3074 if (level) {
3075 /* XXX: must save state */
3076 tcg_gen_helper_0_3(helper_enter64_level,
3077 tcg_const_i32(level),
3078 tcg_const_i32((ot == OT_QUAD)),
3079 cpu_T[1]);
3080 }
3081 gen_op_mov_reg_T1(ot, R_EBP);
3082 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3083 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
3084 } else
3085#endif
3086 {
3087 ot = s->dflag + OT_WORD;
3088 opsize = 2 << s->dflag;
3089
3090 gen_op_movl_A0_reg(R_ESP);
3091 gen_op_addl_A0_im(-opsize);
3092 if (!s->ss32)
3093 gen_op_andl_A0_ffff();
3094 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3095 if (s->addseg)
3096 gen_op_addl_A0_seg(R_SS);
3097 /* push bp */
3098 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3099 gen_op_st_T0_A0(ot + s->mem_index);
3100 if (level) {
3101 /* XXX: must save state */
3102 tcg_gen_helper_0_3(helper_enter_level,
3103 tcg_const_i32(level),
3104 tcg_const_i32(s->dflag),
3105 cpu_T[1]);
3106 }
3107 gen_op_mov_reg_T1(ot, R_EBP);
3108 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3109 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3110 }
3111}
3112
3113static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
3114{
3115 if (s->cc_op != CC_OP_DYNAMIC)
3116 gen_op_set_cc_op(s->cc_op);
3117 gen_jmp_im(cur_eip);
3118 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
3119 s->is_jmp = 3;
3120}
3121
3122/* an interrupt is different from an exception because of the
3123 privilege checks */
3124static void gen_interrupt(DisasContext *s, int intno,
3125 target_ulong cur_eip, target_ulong next_eip)
3126{
3127 if (s->cc_op != CC_OP_DYNAMIC)
3128 gen_op_set_cc_op(s->cc_op);
3129 gen_jmp_im(cur_eip);
3130 tcg_gen_helper_0_2(helper_raise_interrupt,
3131 tcg_const_i32(intno),
3132 tcg_const_i32(next_eip - cur_eip));
3133 s->is_jmp = 3;
3134}
3135
3136static void gen_debug(DisasContext *s, target_ulong cur_eip)
3137{
3138 if (s->cc_op != CC_OP_DYNAMIC)
3139 gen_op_set_cc_op(s->cc_op);
3140 gen_jmp_im(cur_eip);
3141 tcg_gen_helper_0_0(helper_debug);
3142 s->is_jmp = 3;
3143}
3144
3145/* generate a generic end of block. Trace exception is also generated
3146 if needed */
3147static void gen_eob(DisasContext *s)
3148{
3149 if (s->cc_op != CC_OP_DYNAMIC)
3150 gen_op_set_cc_op(s->cc_op);
3151 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
3152 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
3153 }
3154
3155#ifdef VBOX
3156 gen_check_external_event(s);
3157#endif /* VBOX */
3158
3159 if (s->singlestep_enabled) {
3160 tcg_gen_helper_0_0(helper_debug);
3161 } else if (s->tf) {
3162 tcg_gen_helper_0_0(helper_single_step);
3163 } else {
3164 tcg_gen_exit_tb(0);
3165 }
3166 s->is_jmp = 3;
3167}
3168
3169/* generate a jump to eip. No segment change must happen before as a
3170 direct call to the next block may occur */
3171static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
3172{
3173 if (s->jmp_opt) {
3174 if (s->cc_op != CC_OP_DYNAMIC) {
3175 gen_op_set_cc_op(s->cc_op);
3176 s->cc_op = CC_OP_DYNAMIC;
3177 }
3178 gen_goto_tb(s, tb_num, eip);
3179 s->is_jmp = 3;
3180 } else {
3181 gen_jmp_im(eip);
3182 gen_eob(s);
3183 }
3184}
3185
3186static void gen_jmp(DisasContext *s, target_ulong eip)
3187{
3188 gen_jmp_tb(s, eip, 0);
3189}
3190
3191#ifndef VBOX
3192static inline void gen_ldq_env_A0(int idx, int offset)
3193#else /* VBOX */
3194DECLINLINE(void) gen_ldq_env_A0(int idx, int offset)
3195#endif /* VBOX */
3196{
3197 int mem_index = (idx >> 2) - 1;
3198 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3199 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
3200}
3201
3202#ifndef VBOX
3203static inline void gen_stq_env_A0(int idx, int offset)
3204#else /* VBOX */
3205DECLINLINE(void) gen_stq_env_A0(int idx, int offset)
3206#endif /* VBOX */
3207{
3208 int mem_index = (idx >> 2) - 1;
3209 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
3210 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3211}
3212
3213#ifndef VBOX
3214static inline void gen_ldo_env_A0(int idx, int offset)
3215#else /* VBOX */
3216DECLINLINE(void) gen_ldo_env_A0(int idx, int offset)
3217#endif /* VBOX */
3218{
3219 int mem_index = (idx >> 2) - 1;
3220 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3221 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3222 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3223 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3224 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3225}
3226
3227#ifndef VBOX
3228static inline void gen_sto_env_A0(int idx, int offset)
3229#else /* VBOX */
3230DECLINLINE(void) gen_sto_env_A0(int idx, int offset)
3231#endif /* VBOX */
3232{
3233 int mem_index = (idx >> 2) - 1;
3234 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3235 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3236 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3237 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3238 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3239}
3240
3241#ifndef VBOX
3242static inline void gen_op_movo(int d_offset, int s_offset)
3243#else /* VBOX */
3244DECLINLINE(void) gen_op_movo(int d_offset, int s_offset)
3245#endif /* VBOX */
3246{
3247 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3248 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3249 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
3250 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
3251}
3252
3253#ifndef VBOX
3254static inline void gen_op_movq(int d_offset, int s_offset)
3255#else /* VBOX */
3256DECLINLINE(void) gen_op_movq(int d_offset, int s_offset)
3257#endif /* VBOX */
3258{
3259 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3260 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3261}
3262
3263#ifndef VBOX
3264static inline void gen_op_movl(int d_offset, int s_offset)
3265#else /* VBOX */
3266DECLINLINE(void) gen_op_movl(int d_offset, int s_offset)
3267#endif /* VBOX */
3268{
3269 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
3270 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
3271}
3272
3273#ifndef VBOX
3274static inline void gen_op_movq_env_0(int d_offset)
3275#else /* VBOX */
3276DECLINLINE(void) gen_op_movq_env_0(int d_offset)
3277#endif /* VBOX */
3278{
3279 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
3280 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3281}
3282
3283#define SSE_SPECIAL ((void *)1)
3284#define SSE_DUMMY ((void *)2)
3285
3286#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
3287#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
3288 helper_ ## x ## ss, helper_ ## x ## sd, }
3289
3290static void *sse_op_table1[256][4] = {
3291 /* 3DNow! extensions */
3292 [0x0e] = { SSE_DUMMY }, /* femms */
3293 [0x0f] = { SSE_DUMMY }, /* pf... */
3294 /* pure SSE operations */
3295 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3296 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3297 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
3298 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
3299 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
3300 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
3301 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
3302 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
3303
3304 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3305 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3306 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
3307 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
3308 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
3309 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
3310 [0x2e] = { helper_ucomiss, helper_ucomisd },
3311 [0x2f] = { helper_comiss, helper_comisd },
3312 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
3313 [0x51] = SSE_FOP(sqrt),
3314 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
3315 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
3316 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
3317 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
3318 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
3319 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
3320 [0x58] = SSE_FOP(add),
3321 [0x59] = SSE_FOP(mul),
3322 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
3323 helper_cvtss2sd, helper_cvtsd2ss },
3324 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
3325 [0x5c] = SSE_FOP(sub),
3326 [0x5d] = SSE_FOP(min),
3327 [0x5e] = SSE_FOP(div),
3328 [0x5f] = SSE_FOP(max),
3329
3330 [0xc2] = SSE_FOP(cmpeq),
3331 [0xc6] = { helper_shufps, helper_shufpd },
3332
3333 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3334 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3335
3336 /* MMX ops and their SSE extensions */
3337 [0x60] = MMX_OP2(punpcklbw),
3338 [0x61] = MMX_OP2(punpcklwd),
3339 [0x62] = MMX_OP2(punpckldq),
3340 [0x63] = MMX_OP2(packsswb),
3341 [0x64] = MMX_OP2(pcmpgtb),
3342 [0x65] = MMX_OP2(pcmpgtw),
3343 [0x66] = MMX_OP2(pcmpgtl),
3344 [0x67] = MMX_OP2(packuswb),
3345 [0x68] = MMX_OP2(punpckhbw),
3346 [0x69] = MMX_OP2(punpckhwd),
3347 [0x6a] = MMX_OP2(punpckhdq),
3348 [0x6b] = MMX_OP2(packssdw),
3349 [0x6c] = { NULL, helper_punpcklqdq_xmm },
3350 [0x6d] = { NULL, helper_punpckhqdq_xmm },
3351 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
3352 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
3353 [0x70] = { helper_pshufw_mmx,
3354 helper_pshufd_xmm,
3355 helper_pshufhw_xmm,
3356 helper_pshuflw_xmm },
3357 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
3358 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
3359 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
3360 [0x74] = MMX_OP2(pcmpeqb),
3361 [0x75] = MMX_OP2(pcmpeqw),
3362 [0x76] = MMX_OP2(pcmpeql),
3363 [0x77] = { SSE_DUMMY }, /* emms */
3364 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
3365 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
3366 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
3367 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
3368 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
3369 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
3370 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
3371 [0xd1] = MMX_OP2(psrlw),
3372 [0xd2] = MMX_OP2(psrld),
3373 [0xd3] = MMX_OP2(psrlq),
3374 [0xd4] = MMX_OP2(paddq),
3375 [0xd5] = MMX_OP2(pmullw),
3376 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
3377 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
3378 [0xd8] = MMX_OP2(psubusb),
3379 [0xd9] = MMX_OP2(psubusw),
3380 [0xda] = MMX_OP2(pminub),
3381 [0xdb] = MMX_OP2(pand),
3382 [0xdc] = MMX_OP2(paddusb),
3383 [0xdd] = MMX_OP2(paddusw),
3384 [0xde] = MMX_OP2(pmaxub),
3385 [0xdf] = MMX_OP2(pandn),
3386 [0xe0] = MMX_OP2(pavgb),
3387 [0xe1] = MMX_OP2(psraw),
3388 [0xe2] = MMX_OP2(psrad),
3389 [0xe3] = MMX_OP2(pavgw),
3390 [0xe4] = MMX_OP2(pmulhuw),
3391 [0xe5] = MMX_OP2(pmulhw),
3392 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
3393 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
3394 [0xe8] = MMX_OP2(psubsb),
3395 [0xe9] = MMX_OP2(psubsw),
3396 [0xea] = MMX_OP2(pminsw),
3397 [0xeb] = MMX_OP2(por),
3398 [0xec] = MMX_OP2(paddsb),
3399 [0xed] = MMX_OP2(paddsw),
3400 [0xee] = MMX_OP2(pmaxsw),
3401 [0xef] = MMX_OP2(pxor),
3402 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
3403 [0xf1] = MMX_OP2(psllw),
3404 [0xf2] = MMX_OP2(pslld),
3405 [0xf3] = MMX_OP2(psllq),
3406 [0xf4] = MMX_OP2(pmuludq),
3407 [0xf5] = MMX_OP2(pmaddwd),
3408 [0xf6] = MMX_OP2(psadbw),
3409 [0xf7] = MMX_OP2(maskmov),
3410 [0xf8] = MMX_OP2(psubb),
3411 [0xf9] = MMX_OP2(psubw),
3412 [0xfa] = MMX_OP2(psubl),
3413 [0xfb] = MMX_OP2(psubq),
3414 [0xfc] = MMX_OP2(paddb),
3415 [0xfd] = MMX_OP2(paddw),
3416 [0xfe] = MMX_OP2(paddl),
3417};
3418
3419static void *sse_op_table2[3 * 8][2] = {
3420 [0 + 2] = MMX_OP2(psrlw),
3421 [0 + 4] = MMX_OP2(psraw),
3422 [0 + 6] = MMX_OP2(psllw),
3423 [8 + 2] = MMX_OP2(psrld),
3424 [8 + 4] = MMX_OP2(psrad),
3425 [8 + 6] = MMX_OP2(pslld),
3426 [16 + 2] = MMX_OP2(psrlq),
3427 [16 + 3] = { NULL, helper_psrldq_xmm },
3428 [16 + 6] = MMX_OP2(psllq),
3429 [16 + 7] = { NULL, helper_pslldq_xmm },
3430};
3431
3432static void *sse_op_table3[4 * 3] = {
3433 helper_cvtsi2ss,
3434 helper_cvtsi2sd,
3435 X86_64_ONLY(helper_cvtsq2ss),
3436 X86_64_ONLY(helper_cvtsq2sd),
3437
3438 helper_cvttss2si,
3439 helper_cvttsd2si,
3440 X86_64_ONLY(helper_cvttss2sq),
3441 X86_64_ONLY(helper_cvttsd2sq),
3442
3443 helper_cvtss2si,
3444 helper_cvtsd2si,
3445 X86_64_ONLY(helper_cvtss2sq),
3446 X86_64_ONLY(helper_cvtsd2sq),
3447};
3448
3449static void *sse_op_table4[8][4] = {
3450 SSE_FOP(cmpeq),
3451 SSE_FOP(cmplt),
3452 SSE_FOP(cmple),
3453 SSE_FOP(cmpunord),
3454 SSE_FOP(cmpneq),
3455 SSE_FOP(cmpnlt),
3456 SSE_FOP(cmpnle),
3457 SSE_FOP(cmpord),
3458};
3459
3460static void *sse_op_table5[256] = {
3461 [0x0c] = helper_pi2fw,
3462 [0x0d] = helper_pi2fd,
3463 [0x1c] = helper_pf2iw,
3464 [0x1d] = helper_pf2id,
3465 [0x8a] = helper_pfnacc,
3466 [0x8e] = helper_pfpnacc,
3467 [0x90] = helper_pfcmpge,
3468 [0x94] = helper_pfmin,
3469 [0x96] = helper_pfrcp,
3470 [0x97] = helper_pfrsqrt,
3471 [0x9a] = helper_pfsub,
3472 [0x9e] = helper_pfadd,
3473 [0xa0] = helper_pfcmpgt,
3474 [0xa4] = helper_pfmax,
3475 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
3476 [0xa7] = helper_movq, /* pfrsqit1 */
3477 [0xaa] = helper_pfsubr,
3478 [0xae] = helper_pfacc,
3479 [0xb0] = helper_pfcmpeq,
3480 [0xb4] = helper_pfmul,
3481 [0xb6] = helper_movq, /* pfrcpit2 */
3482 [0xb7] = helper_pmulhrw_mmx,
3483 [0xbb] = helper_pswapd,
3484 [0xbf] = helper_pavgb_mmx /* pavgusb */
3485};
3486
3487struct sse_op_helper_s {
3488 void *op[2]; uint32_t ext_mask;
3489};
3490#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
3491#define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3492#define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3493#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3494static struct sse_op_helper_s sse_op_table6[256] = {
3495 [0x00] = SSSE3_OP(pshufb),
3496 [0x01] = SSSE3_OP(phaddw),
3497 [0x02] = SSSE3_OP(phaddd),
3498 [0x03] = SSSE3_OP(phaddsw),
3499 [0x04] = SSSE3_OP(pmaddubsw),
3500 [0x05] = SSSE3_OP(phsubw),
3501 [0x06] = SSSE3_OP(phsubd),
3502 [0x07] = SSSE3_OP(phsubsw),
3503 [0x08] = SSSE3_OP(psignb),
3504 [0x09] = SSSE3_OP(psignw),
3505 [0x0a] = SSSE3_OP(psignd),
3506 [0x0b] = SSSE3_OP(pmulhrsw),
3507 [0x10] = SSE41_OP(pblendvb),
3508 [0x14] = SSE41_OP(blendvps),
3509 [0x15] = SSE41_OP(blendvpd),
3510 [0x17] = SSE41_OP(ptest),
3511 [0x1c] = SSSE3_OP(pabsb),
3512 [0x1d] = SSSE3_OP(pabsw),
3513 [0x1e] = SSSE3_OP(pabsd),
3514 [0x20] = SSE41_OP(pmovsxbw),
3515 [0x21] = SSE41_OP(pmovsxbd),
3516 [0x22] = SSE41_OP(pmovsxbq),
3517 [0x23] = SSE41_OP(pmovsxwd),
3518 [0x24] = SSE41_OP(pmovsxwq),
3519 [0x25] = SSE41_OP(pmovsxdq),
3520 [0x28] = SSE41_OP(pmuldq),
3521 [0x29] = SSE41_OP(pcmpeqq),
3522 [0x2a] = SSE41_SPECIAL, /* movntqda */
3523 [0x2b] = SSE41_OP(packusdw),
3524 [0x30] = SSE41_OP(pmovzxbw),
3525 [0x31] = SSE41_OP(pmovzxbd),
3526 [0x32] = SSE41_OP(pmovzxbq),
3527 [0x33] = SSE41_OP(pmovzxwd),
3528 [0x34] = SSE41_OP(pmovzxwq),
3529 [0x35] = SSE41_OP(pmovzxdq),
3530 [0x37] = SSE42_OP(pcmpgtq),
3531 [0x38] = SSE41_OP(pminsb),
3532 [0x39] = SSE41_OP(pminsd),
3533 [0x3a] = SSE41_OP(pminuw),
3534 [0x3b] = SSE41_OP(pminud),
3535 [0x3c] = SSE41_OP(pmaxsb),
3536 [0x3d] = SSE41_OP(pmaxsd),
3537 [0x3e] = SSE41_OP(pmaxuw),
3538 [0x3f] = SSE41_OP(pmaxud),
3539 [0x40] = SSE41_OP(pmulld),
3540 [0x41] = SSE41_OP(phminposuw),
3541};
3542
3543static struct sse_op_helper_s sse_op_table7[256] = {
3544 [0x08] = SSE41_OP(roundps),
3545 [0x09] = SSE41_OP(roundpd),
3546 [0x0a] = SSE41_OP(roundss),
3547 [0x0b] = SSE41_OP(roundsd),
3548 [0x0c] = SSE41_OP(blendps),
3549 [0x0d] = SSE41_OP(blendpd),
3550 [0x0e] = SSE41_OP(pblendw),
3551 [0x0f] = SSSE3_OP(palignr),
3552 [0x14] = SSE41_SPECIAL, /* pextrb */
3553 [0x15] = SSE41_SPECIAL, /* pextrw */
3554 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3555 [0x17] = SSE41_SPECIAL, /* extractps */
3556 [0x20] = SSE41_SPECIAL, /* pinsrb */
3557 [0x21] = SSE41_SPECIAL, /* insertps */
3558 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3559 [0x40] = SSE41_OP(dpps),
3560 [0x41] = SSE41_OP(dppd),
3561 [0x42] = SSE41_OP(mpsadbw),
3562 [0x60] = SSE42_OP(pcmpestrm),
3563 [0x61] = SSE42_OP(pcmpestri),
3564 [0x62] = SSE42_OP(pcmpistrm),
3565 [0x63] = SSE42_OP(pcmpistri),
3566};
3567
3568static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3569{
3570 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3571 int modrm, mod, rm, reg, reg_addr, offset_addr;
3572 void *sse_op2;
3573
3574 b &= 0xff;
3575 if (s->prefix & PREFIX_DATA)
3576 b1 = 1;
3577 else if (s->prefix & PREFIX_REPZ)
3578 b1 = 2;
3579 else if (s->prefix & PREFIX_REPNZ)
3580 b1 = 3;
3581 else
3582 b1 = 0;
3583 sse_op2 = sse_op_table1[b][b1];
3584 if (!sse_op2)
3585 goto illegal_op;
3586 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3587 is_xmm = 1;
3588 } else {
3589 if (b1 == 0) {
3590 /* MMX case */
3591 is_xmm = 0;
3592 } else {
3593 is_xmm = 1;
3594 }
3595 }
3596 /* simple MMX/SSE operation */
3597 if (s->flags & HF_TS_MASK) {
3598 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3599 return;
3600 }
3601 if (s->flags & HF_EM_MASK) {
3602 illegal_op:
3603 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3604 return;
3605 }
3606 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3607 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3608 goto illegal_op;
3609 if (b == 0x0e) {
3610 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3611 goto illegal_op;
3612 /* femms */
3613 tcg_gen_helper_0_0(helper_emms);
3614 return;
3615 }
3616 if (b == 0x77) {
3617 /* emms */
3618 tcg_gen_helper_0_0(helper_emms);
3619 return;
3620 }
3621 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3622 the static cpu state) */
3623 if (!is_xmm) {
3624 tcg_gen_helper_0_0(helper_enter_mmx);
3625 }
3626
3627 modrm = ldub_code(s->pc++);
3628 reg = ((modrm >> 3) & 7);
3629 if (is_xmm)
3630 reg |= rex_r;
3631 mod = (modrm >> 6) & 3;
3632 if (sse_op2 == SSE_SPECIAL) {
3633 b |= (b1 << 8);
3634 switch(b) {
3635 case 0x0e7: /* movntq */
3636 if (mod == 3)
3637 goto illegal_op;
3638 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3639 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3640 break;
3641 case 0x1e7: /* movntdq */
3642 case 0x02b: /* movntps */
3643 case 0x12b: /* movntps */
3644 if (mod == 3)
3645 goto illegal_op;
3646 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3647 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3648 break;
3649 case 0x3f0: /* lddqu */
3650 if (mod == 3)
3651 goto illegal_op;
3652 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3653 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3654 break;
3655 case 0x6e: /* movd mm, ea */
3656#ifdef TARGET_X86_64
3657 if (s->dflag == 2) {
3658 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3659 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3660 } else
3661#endif
3662 {
3663 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3664 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3665 offsetof(CPUX86State,fpregs[reg].mmx));
3666 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3667 }
3668 break;
3669 case 0x16e: /* movd xmm, ea */
3670#ifdef TARGET_X86_64
3671 if (s->dflag == 2) {
3672 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3673 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3674 offsetof(CPUX86State,xmm_regs[reg]));
3675 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3676 } else
3677#endif
3678 {
3679 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3680 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3681 offsetof(CPUX86State,xmm_regs[reg]));
3682 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3683 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3684 }
3685 break;
3686 case 0x6f: /* movq mm, ea */
3687 if (mod != 3) {
3688 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3689 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3690 } else {
3691 rm = (modrm & 7);
3692 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3693 offsetof(CPUX86State,fpregs[rm].mmx));
3694 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3695 offsetof(CPUX86State,fpregs[reg].mmx));
3696 }
3697 break;
3698 case 0x010: /* movups */
3699 case 0x110: /* movupd */
3700 case 0x028: /* movaps */
3701 case 0x128: /* movapd */
3702 case 0x16f: /* movdqa xmm, ea */
3703 case 0x26f: /* movdqu xmm, ea */
3704 if (mod != 3) {
3705 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3706 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3707 } else {
3708 rm = (modrm & 7) | REX_B(s);
3709 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3710 offsetof(CPUX86State,xmm_regs[rm]));
3711 }
3712 break;
3713 case 0x210: /* movss xmm, ea */
3714 if (mod != 3) {
3715 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3716 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3717 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3718 gen_op_movl_T0_0();
3719 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3720 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3721 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3722 } else {
3723 rm = (modrm & 7) | REX_B(s);
3724 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3725 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3726 }
3727 break;
3728 case 0x310: /* movsd xmm, ea */
3729 if (mod != 3) {
3730 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3731 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3732 gen_op_movl_T0_0();
3733 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3734 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3735 } else {
3736 rm = (modrm & 7) | REX_B(s);
3737 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3738 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3739 }
3740 break;
3741 case 0x012: /* movlps */
3742 case 0x112: /* movlpd */
3743 if (mod != 3) {
3744 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3745 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3746 } else {
3747 /* movhlps */
3748 rm = (modrm & 7) | REX_B(s);
3749 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3750 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3751 }
3752 break;
3753 case 0x212: /* movsldup */
3754 if (mod != 3) {
3755 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3756 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3757 } else {
3758 rm = (modrm & 7) | REX_B(s);
3759 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3760 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3761 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3762 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3763 }
3764 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3765 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3766 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3767 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3768 break;
3769 case 0x312: /* movddup */
3770 if (mod != 3) {
3771 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3772 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3773 } else {
3774 rm = (modrm & 7) | REX_B(s);
3775 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3776 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3777 }
3778 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3779 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3780 break;
3781 case 0x016: /* movhps */
3782 case 0x116: /* movhpd */
3783 if (mod != 3) {
3784 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3785 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3786 } else {
3787 /* movlhps */
3788 rm = (modrm & 7) | REX_B(s);
3789 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3790 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3791 }
3792 break;
3793 case 0x216: /* movshdup */
3794 if (mod != 3) {
3795 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3796 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3797 } else {
3798 rm = (modrm & 7) | REX_B(s);
3799 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3800 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3801 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3802 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3803 }
3804 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3805 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3806 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3807 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3808 break;
3809 case 0x7e: /* movd ea, mm */
3810#ifdef TARGET_X86_64
3811 if (s->dflag == 2) {
3812 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3813 offsetof(CPUX86State,fpregs[reg].mmx));
3814 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3815 } else
3816#endif
3817 {
3818 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3819 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3820 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3821 }
3822 break;
3823 case 0x17e: /* movd ea, xmm */
3824#ifdef TARGET_X86_64
3825 if (s->dflag == 2) {
3826 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3827 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3828 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3829 } else
3830#endif
3831 {
3832 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3833 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3834 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3835 }
3836 break;
3837 case 0x27e: /* movq xmm, ea */
3838 if (mod != 3) {
3839 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3840 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3841 } else {
3842 rm = (modrm & 7) | REX_B(s);
3843 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3844 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3845 }
3846 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3847 break;
3848 case 0x7f: /* movq ea, mm */
3849 if (mod != 3) {
3850 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3851 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3852 } else {
3853 rm = (modrm & 7);
3854 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3855 offsetof(CPUX86State,fpregs[reg].mmx));
3856 }
3857 break;
3858 case 0x011: /* movups */
3859 case 0x111: /* movupd */
3860 case 0x029: /* movaps */
3861 case 0x129: /* movapd */
3862 case 0x17f: /* movdqa ea, xmm */
3863 case 0x27f: /* movdqu ea, xmm */
3864 if (mod != 3) {
3865 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3866 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3867 } else {
3868 rm = (modrm & 7) | REX_B(s);
3869 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3870 offsetof(CPUX86State,xmm_regs[reg]));
3871 }
3872 break;
3873 case 0x211: /* movss ea, xmm */
3874 if (mod != 3) {
3875 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3876 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3877 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3878 } else {
3879 rm = (modrm & 7) | REX_B(s);
3880 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3881 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3882 }
3883 break;
3884 case 0x311: /* movsd ea, xmm */
3885 if (mod != 3) {
3886 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3887 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3888 } else {
3889 rm = (modrm & 7) | REX_B(s);
3890 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3891 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3892 }
3893 break;
3894 case 0x013: /* movlps */
3895 case 0x113: /* movlpd */
3896 if (mod != 3) {
3897 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3898 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3899 } else {
3900 goto illegal_op;
3901 }
3902 break;
3903 case 0x017: /* movhps */
3904 case 0x117: /* movhpd */
3905 if (mod != 3) {
3906 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3907 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3908 } else {
3909 goto illegal_op;
3910 }
3911 break;
3912 case 0x71: /* shift mm, im */
3913 case 0x72:
3914 case 0x73:
3915 case 0x171: /* shift xmm, im */
3916 case 0x172:
3917 case 0x173:
3918 val = ldub_code(s->pc++);
3919 if (is_xmm) {
3920 gen_op_movl_T0_im(val);
3921 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3922 gen_op_movl_T0_0();
3923 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3924 op1_offset = offsetof(CPUX86State,xmm_t0);
3925 } else {
3926 gen_op_movl_T0_im(val);
3927 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3928 gen_op_movl_T0_0();
3929 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3930 op1_offset = offsetof(CPUX86State,mmx_t0);
3931 }
3932 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3933 if (!sse_op2)
3934 goto illegal_op;
3935 if (is_xmm) {
3936 rm = (modrm & 7) | REX_B(s);
3937 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3938 } else {
3939 rm = (modrm & 7);
3940 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3941 }
3942 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3943 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3944 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3945 break;
3946 case 0x050: /* movmskps */
3947 rm = (modrm & 7) | REX_B(s);
3948 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3949 offsetof(CPUX86State,xmm_regs[rm]));
3950 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3951 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3952 gen_op_mov_reg_T0(OT_LONG, reg);
3953 break;
3954 case 0x150: /* movmskpd */
3955 rm = (modrm & 7) | REX_B(s);
3956 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3957 offsetof(CPUX86State,xmm_regs[rm]));
3958 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3959 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3960 gen_op_mov_reg_T0(OT_LONG, reg);
3961 break;
3962 case 0x02a: /* cvtpi2ps */
3963 case 0x12a: /* cvtpi2pd */
3964 tcg_gen_helper_0_0(helper_enter_mmx);
3965 if (mod != 3) {
3966 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3967 op2_offset = offsetof(CPUX86State,mmx_t0);
3968 gen_ldq_env_A0(s->mem_index, op2_offset);
3969 } else {
3970 rm = (modrm & 7);
3971 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3972 }
3973 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3974 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3975 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3976 switch(b >> 8) {
3977 case 0x0:
3978 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3979 break;
3980 default:
3981 case 0x1:
3982 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3983 break;
3984 }
3985 break;
3986 case 0x22a: /* cvtsi2ss */
3987 case 0x32a: /* cvtsi2sd */
3988 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3989 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3990 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3991 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3992 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3993 if (ot == OT_LONG) {
3994 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3995 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3996 } else {
3997 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_T[0]);
3998 }
3999 break;
4000 case 0x02c: /* cvttps2pi */
4001 case 0x12c: /* cvttpd2pi */
4002 case 0x02d: /* cvtps2pi */
4003 case 0x12d: /* cvtpd2pi */
4004 tcg_gen_helper_0_0(helper_enter_mmx);
4005 if (mod != 3) {
4006 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4007 op2_offset = offsetof(CPUX86State,xmm_t0);
4008 gen_ldo_env_A0(s->mem_index, op2_offset);
4009 } else {
4010 rm = (modrm & 7) | REX_B(s);
4011 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4012 }
4013 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
4014 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4015 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4016 switch(b) {
4017 case 0x02c:
4018 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
4019 break;
4020 case 0x12c:
4021 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
4022 break;
4023 case 0x02d:
4024 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
4025 break;
4026 case 0x12d:
4027 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
4028 break;
4029 }
4030 break;
4031 case 0x22c: /* cvttss2si */
4032 case 0x32c: /* cvttsd2si */
4033 case 0x22d: /* cvtss2si */
4034 case 0x32d: /* cvtsd2si */
4035 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4036 if (mod != 3) {
4037 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4038 if ((b >> 8) & 1) {
4039 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
4040 } else {
4041 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4042 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4043 }
4044 op2_offset = offsetof(CPUX86State,xmm_t0);
4045 } else {
4046 rm = (modrm & 7) | REX_B(s);
4047 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4048 }
4049 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
4050 (b & 1) * 4];
4051 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
4052 if (ot == OT_LONG) {
4053 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
4054 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4055 } else {
4056 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
4057 }
4058 gen_op_mov_reg_T0(ot, reg);
4059 break;
4060 case 0xc4: /* pinsrw */
4061 case 0x1c4:
4062 s->rip_offset = 1;
4063 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4064 val = ldub_code(s->pc++);
4065 if (b1) {
4066 val &= 7;
4067 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4068 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
4069 } else {
4070 val &= 3;
4071 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4072 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
4073 }
4074 break;
4075 case 0xc5: /* pextrw */
4076 case 0x1c5:
4077 if (mod != 3)
4078 goto illegal_op;
4079 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4080 val = ldub_code(s->pc++);
4081 if (b1) {
4082 val &= 7;
4083 rm = (modrm & 7) | REX_B(s);
4084 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4085 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
4086 } else {
4087 val &= 3;
4088 rm = (modrm & 7);
4089 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4090 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
4091 }
4092 reg = ((modrm >> 3) & 7) | rex_r;
4093 gen_op_mov_reg_T0(ot, reg);
4094 break;
4095 case 0x1d6: /* movq ea, xmm */
4096 if (mod != 3) {
4097 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4098 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4099 } else {
4100 rm = (modrm & 7) | REX_B(s);
4101 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
4102 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4103 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
4104 }
4105 break;
4106 case 0x2d6: /* movq2dq */
4107 tcg_gen_helper_0_0(helper_enter_mmx);
4108 rm = (modrm & 7);
4109 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
4110 offsetof(CPUX86State,fpregs[rm].mmx));
4111 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
4112 break;
4113 case 0x3d6: /* movdq2q */
4114 tcg_gen_helper_0_0(helper_enter_mmx);
4115 rm = (modrm & 7) | REX_B(s);
4116 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
4117 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
4118 break;
4119 case 0xd7: /* pmovmskb */
4120 case 0x1d7:
4121 if (mod != 3)
4122 goto illegal_op;
4123 if (b1) {
4124 rm = (modrm & 7) | REX_B(s);
4125 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
4126 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
4127 } else {
4128 rm = (modrm & 7);
4129 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
4130 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
4131 }
4132 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4133 reg = ((modrm >> 3) & 7) | rex_r;
4134 gen_op_mov_reg_T0(OT_LONG, reg);
4135 break;
4136 case 0x138:
4137 if (s->prefix & PREFIX_REPNZ)
4138 goto crc32;
4139 case 0x038:
4140 b = modrm;
4141 modrm = ldub_code(s->pc++);
4142 rm = modrm & 7;
4143 reg = ((modrm >> 3) & 7) | rex_r;
4144 mod = (modrm >> 6) & 3;
4145
4146 sse_op2 = sse_op_table6[b].op[b1];
4147 if (!sse_op2)
4148 goto illegal_op;
4149 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
4150 goto illegal_op;
4151
4152 if (b1) {
4153 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4154 if (mod == 3) {
4155 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4156 } else {
4157 op2_offset = offsetof(CPUX86State,xmm_t0);
4158 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4159 switch (b) {
4160 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
4161 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
4162 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
4163 gen_ldq_env_A0(s->mem_index, op2_offset +
4164 offsetof(XMMReg, XMM_Q(0)));
4165 break;
4166 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
4167 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
4168 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
4169 (s->mem_index >> 2) - 1);
4170 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
4171 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
4172 offsetof(XMMReg, XMM_L(0)));
4173 break;
4174 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
4175 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
4176 (s->mem_index >> 2) - 1);
4177 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
4178 offsetof(XMMReg, XMM_W(0)));
4179 break;
4180 case 0x2a: /* movntqda */
4181 gen_ldo_env_A0(s->mem_index, op1_offset);
4182 return;
4183 default:
4184 gen_ldo_env_A0(s->mem_index, op2_offset);
4185 }
4186 }
4187 } else {
4188 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4189 if (mod == 3) {
4190 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4191 } else {
4192 op2_offset = offsetof(CPUX86State,mmx_t0);
4193 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4194 gen_ldq_env_A0(s->mem_index, op2_offset);
4195 }
4196 }
4197 if (sse_op2 == SSE_SPECIAL)
4198 goto illegal_op;
4199
4200 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4201 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4202 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4203
4204 if (b == 0x17)
4205 s->cc_op = CC_OP_EFLAGS;
4206 break;
4207 case 0x338: /* crc32 */
4208 crc32:
4209 b = modrm;
4210 modrm = ldub_code(s->pc++);
4211 reg = ((modrm >> 3) & 7) | rex_r;
4212
4213 if (b != 0xf0 && b != 0xf1)
4214 goto illegal_op;
4215 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
4216 goto illegal_op;
4217
4218 if (b == 0xf0)
4219 ot = OT_BYTE;
4220 else if (b == 0xf1 && s->dflag != 2)
4221 if (s->prefix & PREFIX_DATA)
4222 ot = OT_WORD;
4223 else
4224 ot = OT_LONG;
4225 else
4226 ot = OT_QUAD;
4227
4228 gen_op_mov_TN_reg(OT_LONG, 0, reg);
4229 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4230 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4231 tcg_gen_helper_1_3(helper_crc32, cpu_T[0], cpu_tmp2_i32,
4232 cpu_T[0], tcg_const_i32(8 << ot));
4233
4234 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4235 gen_op_mov_reg_T0(ot, reg);
4236 break;
4237 case 0x03a:
4238 case 0x13a:
4239 b = modrm;
4240 modrm = ldub_code(s->pc++);
4241 rm = modrm & 7;
4242 reg = ((modrm >> 3) & 7) | rex_r;
4243 mod = (modrm >> 6) & 3;
4244
4245 sse_op2 = sse_op_table7[b].op[b1];
4246 if (!sse_op2)
4247 goto illegal_op;
4248 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
4249 goto illegal_op;
4250
4251 if (sse_op2 == SSE_SPECIAL) {
4252 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4253 rm = (modrm & 7) | REX_B(s);
4254 if (mod != 3)
4255 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4256 reg = ((modrm >> 3) & 7) | rex_r;
4257 val = ldub_code(s->pc++);
4258 switch (b) {
4259 case 0x14: /* pextrb */
4260 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4261 xmm_regs[reg].XMM_B(val & 15)));
4262 if (mod == 3)
4263 gen_op_mov_reg_T0(ot, rm);
4264 else
4265 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
4266 (s->mem_index >> 2) - 1);
4267 break;
4268 case 0x15: /* pextrw */
4269 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4270 xmm_regs[reg].XMM_W(val & 7)));
4271 if (mod == 3)
4272 gen_op_mov_reg_T0(ot, rm);
4273 else
4274 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
4275 (s->mem_index >> 2) - 1);
4276 break;
4277 case 0x16:
4278 if (ot == OT_LONG) { /* pextrd */
4279 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4280 offsetof(CPUX86State,
4281 xmm_regs[reg].XMM_L(val & 3)));
4282 if (mod == 3)
4283 gen_op_mov_reg_v(ot, rm, cpu_tmp2_i32);
4284 else
4285 tcg_gen_qemu_st32(cpu_tmp2_i32, cpu_A0,
4286 (s->mem_index >> 2) - 1);
4287 } else { /* pextrq */
4288 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
4289 offsetof(CPUX86State,
4290 xmm_regs[reg].XMM_Q(val & 1)));
4291 if (mod == 3)
4292 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
4293 else
4294 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4295 (s->mem_index >> 2) - 1);
4296 }
4297 break;
4298 case 0x17: /* extractps */
4299 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4300 xmm_regs[reg].XMM_L(val & 3)));
4301 if (mod == 3)
4302 gen_op_mov_reg_T0(ot, rm);
4303 else
4304 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
4305 (s->mem_index >> 2) - 1);
4306 break;
4307 case 0x20: /* pinsrb */
4308 if (mod == 3)
4309 gen_op_mov_TN_reg(OT_LONG, 0, rm);
4310 else
4311 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
4312 (s->mem_index >> 2) - 1);
4313 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4314 xmm_regs[reg].XMM_B(val & 15)));
4315 break;
4316 case 0x21: /* insertps */
4317 if (mod == 3)
4318 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4319 offsetof(CPUX86State,xmm_regs[rm]
4320 .XMM_L((val >> 6) & 3)));
4321 else
4322 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4323 (s->mem_index >> 2) - 1);
4324 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4325 offsetof(CPUX86State,xmm_regs[reg]
4326 .XMM_L((val >> 4) & 3)));
4327 if ((val >> 0) & 1)
4328 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4329 cpu_env, offsetof(CPUX86State,
4330 xmm_regs[reg].XMM_L(0)));
4331 if ((val >> 1) & 1)
4332 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4333 cpu_env, offsetof(CPUX86State,
4334 xmm_regs[reg].XMM_L(1)));
4335 if ((val >> 2) & 1)
4336 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4337 cpu_env, offsetof(CPUX86State,
4338 xmm_regs[reg].XMM_L(2)));
4339 if ((val >> 3) & 1)
4340 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4341 cpu_env, offsetof(CPUX86State,
4342 xmm_regs[reg].XMM_L(3)));
4343 break;
4344 case 0x22:
4345 if (ot == OT_LONG) { /* pinsrd */
4346 if (mod == 3)
4347 gen_op_mov_v_reg(ot, cpu_tmp2_i32, rm);
4348 else
4349 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4350 (s->mem_index >> 2) - 1);
4351 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4352 offsetof(CPUX86State,
4353 xmm_regs[reg].XMM_L(val & 3)));
4354 } else { /* pinsrq */
4355 if (mod == 3)
4356 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
4357 else
4358 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4359 (s->mem_index >> 2) - 1);
4360 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
4361 offsetof(CPUX86State,
4362 xmm_regs[reg].XMM_Q(val & 1)));
4363 }
4364 break;
4365 }
4366 return;
4367 }
4368
4369 if (b1) {
4370 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4371 if (mod == 3) {
4372 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4373 } else {
4374 op2_offset = offsetof(CPUX86State,xmm_t0);
4375 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4376 gen_ldo_env_A0(s->mem_index, op2_offset);
4377 }
4378 } else {
4379 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4380 if (mod == 3) {
4381 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4382 } else {
4383 op2_offset = offsetof(CPUX86State,mmx_t0);
4384 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4385 gen_ldq_env_A0(s->mem_index, op2_offset);
4386 }
4387 }
4388 val = ldub_code(s->pc++);
4389
4390 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
4391 s->cc_op = CC_OP_EFLAGS;
4392
4393 if (s->dflag == 2)
4394 /* The helper must use entire 64-bit gp registers */
4395 val |= 1 << 8;
4396 }
4397
4398 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4399 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4400 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4401 break;
4402 default:
4403 goto illegal_op;
4404 }
4405 } else {
4406 /* generic MMX or SSE operation */
4407 switch(b) {
4408 case 0x70: /* pshufx insn */
4409 case 0xc6: /* pshufx insn */
4410 case 0xc2: /* compare insns */
4411 s->rip_offset = 1;
4412 break;
4413 default:
4414 break;
4415 }
4416 if (is_xmm) {
4417 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4418 if (mod != 3) {
4419 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4420 op2_offset = offsetof(CPUX86State,xmm_t0);
4421 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
4422 b == 0xc2)) {
4423 /* specific case for SSE single instructions */
4424 if (b1 == 2) {
4425 /* 32 bit access */
4426 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4427 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4428 } else {
4429 /* 64 bit access */
4430 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
4431 }
4432 } else {
4433 gen_ldo_env_A0(s->mem_index, op2_offset);
4434 }
4435 } else {
4436 rm = (modrm & 7) | REX_B(s);
4437 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4438 }
4439 } else {
4440 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4441 if (mod != 3) {
4442 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4443 op2_offset = offsetof(CPUX86State,mmx_t0);
4444 gen_ldq_env_A0(s->mem_index, op2_offset);
4445 } else {
4446 rm = (modrm & 7);
4447 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4448 }
4449 }
4450 switch(b) {
4451 case 0x0f: /* 3DNow! data insns */
4452 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4453 goto illegal_op;
4454 val = ldub_code(s->pc++);
4455 sse_op2 = sse_op_table5[val];
4456 if (!sse_op2)
4457 goto illegal_op;
4458 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4459 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4460 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4461 break;
4462 case 0x70: /* pshufx insn */
4463 case 0xc6: /* pshufx insn */
4464 val = ldub_code(s->pc++);
4465 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4466 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4467 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4468 break;
4469 case 0xc2:
4470 /* compare insns */
4471 val = ldub_code(s->pc++);
4472 if (val >= 8)
4473 goto illegal_op;
4474 sse_op2 = sse_op_table4[val][b1];
4475 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4476 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4477 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4478 break;
4479 case 0xf7:
4480 /* maskmov : we must prepare A0 */
4481 if (mod != 3)
4482 goto illegal_op;
4483#ifdef TARGET_X86_64
4484 if (s->aflag == 2) {
4485 gen_op_movq_A0_reg(R_EDI);
4486 } else
4487#endif
4488 {
4489 gen_op_movl_A0_reg(R_EDI);
4490 if (s->aflag == 0)
4491 gen_op_andl_A0_ffff();
4492 }
4493 gen_add_A0_ds_seg(s);
4494
4495 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4496 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4497 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
4498 break;
4499 default:
4500 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4501 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4502 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4503 break;
4504 }
4505 if (b == 0x2e || b == 0x2f) {
4506 s->cc_op = CC_OP_EFLAGS;
4507 }
4508 }
4509}
4510
4511#ifdef VBOX
4512/* Checks if it's an invalid lock sequence. Only a few instructions
4513 can be used together with the lock prefix and of those only the
4514 form that write a memory operand. So, this is kind of annoying
4515 work to do...
4516 The AMD manual lists the following instructions.
4517 ADC
4518 ADD
4519 AND
4520 BTC
4521 BTR
4522 BTS
4523 CMPXCHG
4524 CMPXCHG8B
4525 CMPXCHG16B
4526 DEC
4527 INC
4528 NEG
4529 NOT
4530 OR
4531 SBB
4532 SUB
4533 XADD
4534 XCHG
4535 XOR */
4536static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
4537{
4538 target_ulong pc = s->pc;
4539 int modrm, mod, op;
4540
4541 /* X={8,16,32,64} Y={16,32,64} */
4542 switch (b)
4543 {
4544 /* /2: ADC reg/memX, immX */
4545 /* /0: ADD reg/memX, immX */
4546 /* /4: AND reg/memX, immX */
4547 /* /1: OR reg/memX, immX */
4548 /* /3: SBB reg/memX, immX */
4549 /* /5: SUB reg/memX, immX */
4550 /* /6: XOR reg/memX, immX */
4551 case 0x80:
4552 case 0x81:
4553 case 0x83:
4554 modrm = ldub_code(pc++);
4555 op = (modrm >> 3) & 7;
4556 if (op == 7) /* /7: CMP */
4557 break;
4558 mod = (modrm >> 6) & 3;
4559 if (mod == 3) /* register destination */
4560 break;
4561 return false;
4562
4563 case 0x10: /* /r: ADC reg/mem8, reg8 */
4564 case 0x11: /* /r: ADC reg/memX, regY */
4565 case 0x00: /* /r: ADD reg/mem8, reg8 */
4566 case 0x01: /* /r: ADD reg/memX, regY */
4567 case 0x20: /* /r: AND reg/mem8, reg8 */
4568 case 0x21: /* /r: AND reg/memY, regY */
4569 case 0x08: /* /r: OR reg/mem8, reg8 */
4570 case 0x09: /* /r: OR reg/memY, regY */
4571 case 0x18: /* /r: SBB reg/mem8, reg8 */
4572 case 0x19: /* /r: SBB reg/memY, regY */
4573 case 0x28: /* /r: SUB reg/mem8, reg8 */
4574 case 0x29: /* /r: SUB reg/memY, regY */
4575 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
4576 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
4577 case 0x30: /* /r: XOR reg/mem8, reg8 */
4578 case 0x31: /* /r: XOR reg/memY, regY */
4579 modrm = ldub_code(pc++);
4580 mod = (modrm >> 6) & 3;
4581 if (mod == 3) /* register destination */
4582 break;
4583 return false;
4584
4585 /* /1: DEC reg/memX */
4586 /* /0: INC reg/memX */
4587 case 0xfe:
4588 case 0xff:
4589 modrm = ldub_code(pc++);
4590 mod = (modrm >> 6) & 3;
4591 if (mod == 3) /* register destination */
4592 break;
4593 return false;
4594
4595 /* /3: NEG reg/memX */
4596 /* /2: NOT reg/memX */
4597 case 0xf6:
4598 case 0xf7:
4599 modrm = ldub_code(pc++);
4600 mod = (modrm >> 6) & 3;
4601 if (mod == 3) /* register destination */
4602 break;
4603 return false;
4604
4605 case 0x0f:
4606 b = ldub_code(pc++);
4607 switch (b)
4608 {
4609 /* /7: BTC reg/memY, imm8 */
4610 /* /6: BTR reg/memY, imm8 */
4611 /* /5: BTS reg/memY, imm8 */
4612 case 0xba:
4613 modrm = ldub_code(pc++);
4614 op = (modrm >> 3) & 7;
4615 if (op < 5)
4616 break;
4617 mod = (modrm >> 6) & 3;
4618 if (mod == 3) /* register destination */
4619 break;
4620 return false;
4621
4622 case 0xbb: /* /r: BTC reg/memY, regY */
4623 case 0xb3: /* /r: BTR reg/memY, regY */
4624 case 0xab: /* /r: BTS reg/memY, regY */
4625 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
4626 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
4627 case 0xc0: /* /r: XADD reg/mem8, reg8 */
4628 case 0xc1: /* /r: XADD reg/memY, regY */
4629 modrm = ldub_code(pc++);
4630 mod = (modrm >> 6) & 3;
4631 if (mod == 3) /* register destination */
4632 break;
4633 return false;
4634
4635 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
4636 case 0xc7:
4637 modrm = ldub_code(pc++);
4638 op = (modrm >> 3) & 7;
4639 if (op != 1)
4640 break;
4641 return false;
4642 }
4643 break;
4644 }
4645
4646 /* illegal sequence. The s->pc is past the lock prefix and that
4647 is sufficient for the TB, I think. */
4648 Log(("illegal lock sequence %RGv (b=%#x)\n", pc_start, b));
4649 return true;
4650}
4651#endif /* VBOX */
4652
4653
4654/* convert one instruction. s->is_jmp is set if the translation must
4655 be stopped. Return the next pc value */
4656static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4657{
4658 int b, prefixes, aflag, dflag;
4659 int shift, ot;
4660 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4661 target_ulong next_eip, tval;
4662 int rex_w, rex_r;
4663
4664 if (unlikely(loglevel & CPU_LOG_TB_OP))
4665 tcg_gen_debug_insn_start(pc_start);
4666
4667 s->pc = pc_start;
4668 prefixes = 0;
4669 aflag = s->code32;
4670 dflag = s->code32;
4671 s->override = -1;
4672 rex_w = -1;
4673 rex_r = 0;
4674#ifdef TARGET_X86_64
4675 s->rex_x = 0;
4676 s->rex_b = 0;
4677 x86_64_hregs = 0;
4678#endif
4679 s->rip_offset = 0; /* for relative ip address */
4680#ifdef VBOX
4681 /* nike: seems only slow down things */
4682# if 0
4683 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
4684
4685 gen_update_eip(pc_start - s->cs_base);
4686# endif
4687#endif
4688
4689 next_byte:
4690 b = ldub_code(s->pc);
4691 s->pc++;
4692 /* check prefixes */
4693#ifdef TARGET_X86_64
4694 if (CODE64(s)) {
4695 switch (b) {
4696 case 0xf3:
4697 prefixes |= PREFIX_REPZ;
4698 goto next_byte;
4699 case 0xf2:
4700 prefixes |= PREFIX_REPNZ;
4701 goto next_byte;
4702 case 0xf0:
4703 prefixes |= PREFIX_LOCK;
4704 goto next_byte;
4705 case 0x2e:
4706 s->override = R_CS;
4707 goto next_byte;
4708 case 0x36:
4709 s->override = R_SS;
4710 goto next_byte;
4711 case 0x3e:
4712 s->override = R_DS;
4713 goto next_byte;
4714 case 0x26:
4715 s->override = R_ES;
4716 goto next_byte;
4717 case 0x64:
4718 s->override = R_FS;
4719 goto next_byte;
4720 case 0x65:
4721 s->override = R_GS;
4722 goto next_byte;
4723 case 0x66:
4724 prefixes |= PREFIX_DATA;
4725 goto next_byte;
4726 case 0x67:
4727 prefixes |= PREFIX_ADR;
4728 goto next_byte;
4729 case 0x40 ... 0x4f:
4730 /* REX prefix */
4731 rex_w = (b >> 3) & 1;
4732 rex_r = (b & 0x4) << 1;
4733 s->rex_x = (b & 0x2) << 2;
4734 REX_B(s) = (b & 0x1) << 3;
4735 x86_64_hregs = 1; /* select uniform byte register addressing */
4736 goto next_byte;
4737 }
4738 if (rex_w == 1) {
4739 /* 0x66 is ignored if rex.w is set */
4740 dflag = 2;
4741 } else {
4742 if (prefixes & PREFIX_DATA)
4743 dflag ^= 1;
4744 }
4745 if (!(prefixes & PREFIX_ADR))
4746 aflag = 2;
4747 } else
4748#endif
4749 {
4750 switch (b) {
4751 case 0xf3:
4752 prefixes |= PREFIX_REPZ;
4753 goto next_byte;
4754 case 0xf2:
4755 prefixes |= PREFIX_REPNZ;
4756 goto next_byte;
4757 case 0xf0:
4758 prefixes |= PREFIX_LOCK;
4759 goto next_byte;
4760 case 0x2e:
4761 s->override = R_CS;
4762 goto next_byte;
4763 case 0x36:
4764 s->override = R_SS;
4765 goto next_byte;
4766 case 0x3e:
4767 s->override = R_DS;
4768 goto next_byte;
4769 case 0x26:
4770 s->override = R_ES;
4771 goto next_byte;
4772 case 0x64:
4773 s->override = R_FS;
4774 goto next_byte;
4775 case 0x65:
4776 s->override = R_GS;
4777 goto next_byte;
4778 case 0x66:
4779 prefixes |= PREFIX_DATA;
4780 goto next_byte;
4781 case 0x67:
4782 prefixes |= PREFIX_ADR;
4783 goto next_byte;
4784 }
4785 if (prefixes & PREFIX_DATA)
4786 dflag ^= 1;
4787 if (prefixes & PREFIX_ADR)
4788 aflag ^= 1;
4789 }
4790
4791 s->prefix = prefixes;
4792 s->aflag = aflag;
4793 s->dflag = dflag;
4794
4795 /* lock generation */
4796#ifndef VBOX
4797 if (prefixes & PREFIX_LOCK)
4798 tcg_gen_helper_0_0(helper_lock);
4799#else /* VBOX */
4800 if (prefixes & PREFIX_LOCK) {
4801 if (is_invalid_lock_sequence(s, pc_start, b)) {
4802 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
4803 return s->pc;
4804 }
4805 tcg_gen_helper_0_0(helper_lock);
4806 }
4807#endif /* VBOX */
4808
4809 /* now check op code */
4810 reswitch:
4811 switch(b) {
4812 case 0x0f:
4813 /**************************/
4814 /* extended op code */
4815 b = ldub_code(s->pc++) | 0x100;
4816 goto reswitch;
4817
4818 /**************************/
4819 /* arith & logic */
4820 case 0x00 ... 0x05:
4821 case 0x08 ... 0x0d:
4822 case 0x10 ... 0x15:
4823 case 0x18 ... 0x1d:
4824 case 0x20 ... 0x25:
4825 case 0x28 ... 0x2d:
4826 case 0x30 ... 0x35:
4827 case 0x38 ... 0x3d:
4828 {
4829 int op, f, val;
4830 op = (b >> 3) & 7;
4831 f = (b >> 1) & 3;
4832
4833 if ((b & 1) == 0)
4834 ot = OT_BYTE;
4835 else
4836 ot = dflag + OT_WORD;
4837
4838 switch(f) {
4839 case 0: /* OP Ev, Gv */
4840 modrm = ldub_code(s->pc++);
4841 reg = ((modrm >> 3) & 7) | rex_r;
4842 mod = (modrm >> 6) & 3;
4843 rm = (modrm & 7) | REX_B(s);
4844 if (mod != 3) {
4845 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4846 opreg = OR_TMP0;
4847 } else if (op == OP_XORL && rm == reg) {
4848 xor_zero:
4849 /* xor reg, reg optimisation */
4850 gen_op_movl_T0_0();
4851 s->cc_op = CC_OP_LOGICB + ot;
4852 gen_op_mov_reg_T0(ot, reg);
4853 gen_op_update1_cc();
4854 break;
4855 } else {
4856 opreg = rm;
4857 }
4858 gen_op_mov_TN_reg(ot, 1, reg);
4859 gen_op(s, op, ot, opreg);
4860 break;
4861 case 1: /* OP Gv, Ev */
4862 modrm = ldub_code(s->pc++);
4863 mod = (modrm >> 6) & 3;
4864 reg = ((modrm >> 3) & 7) | rex_r;
4865 rm = (modrm & 7) | REX_B(s);
4866 if (mod != 3) {
4867 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4868 gen_op_ld_T1_A0(ot + s->mem_index);
4869 } else if (op == OP_XORL && rm == reg) {
4870 goto xor_zero;
4871 } else {
4872 gen_op_mov_TN_reg(ot, 1, rm);
4873 }
4874 gen_op(s, op, ot, reg);
4875 break;
4876 case 2: /* OP A, Iv */
4877 val = insn_get(s, ot);
4878 gen_op_movl_T1_im(val);
4879 gen_op(s, op, ot, OR_EAX);
4880 break;
4881 }
4882 }
4883 break;
4884
4885 case 0x82:
4886 if (CODE64(s))
4887 goto illegal_op;
4888 case 0x80: /* GRP1 */
4889 case 0x81:
4890 case 0x83:
4891 {
4892 int val;
4893
4894 if ((b & 1) == 0)
4895 ot = OT_BYTE;
4896 else
4897 ot = dflag + OT_WORD;
4898
4899 modrm = ldub_code(s->pc++);
4900 mod = (modrm >> 6) & 3;
4901 rm = (modrm & 7) | REX_B(s);
4902 op = (modrm >> 3) & 7;
4903
4904 if (mod != 3) {
4905 if (b == 0x83)
4906 s->rip_offset = 1;
4907 else
4908 s->rip_offset = insn_const_size(ot);
4909 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4910 opreg = OR_TMP0;
4911 } else {
4912 opreg = rm;
4913 }
4914
4915 switch(b) {
4916 default:
4917 case 0x80:
4918 case 0x81:
4919 case 0x82:
4920 val = insn_get(s, ot);
4921 break;
4922 case 0x83:
4923 val = (int8_t)insn_get(s, OT_BYTE);
4924 break;
4925 }
4926 gen_op_movl_T1_im(val);
4927 gen_op(s, op, ot, opreg);
4928 }
4929 break;
4930
4931 /**************************/
4932 /* inc, dec, and other misc arith */
4933 case 0x40 ... 0x47: /* inc Gv */
4934 ot = dflag ? OT_LONG : OT_WORD;
4935 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4936 break;
4937 case 0x48 ... 0x4f: /* dec Gv */
4938 ot = dflag ? OT_LONG : OT_WORD;
4939 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4940 break;
4941 case 0xf6: /* GRP3 */
4942 case 0xf7:
4943 if ((b & 1) == 0)
4944 ot = OT_BYTE;
4945 else
4946 ot = dflag + OT_WORD;
4947
4948 modrm = ldub_code(s->pc++);
4949 mod = (modrm >> 6) & 3;
4950 rm = (modrm & 7) | REX_B(s);
4951 op = (modrm >> 3) & 7;
4952 if (mod != 3) {
4953 if (op == 0)
4954 s->rip_offset = insn_const_size(ot);
4955 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4956 gen_op_ld_T0_A0(ot + s->mem_index);
4957 } else {
4958 gen_op_mov_TN_reg(ot, 0, rm);
4959 }
4960
4961 switch(op) {
4962 case 0: /* test */
4963 val = insn_get(s, ot);
4964 gen_op_movl_T1_im(val);
4965 gen_op_testl_T0_T1_cc();
4966 s->cc_op = CC_OP_LOGICB + ot;
4967 break;
4968 case 2: /* not */
4969 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4970 if (mod != 3) {
4971 gen_op_st_T0_A0(ot + s->mem_index);
4972 } else {
4973 gen_op_mov_reg_T0(ot, rm);
4974 }
4975 break;
4976 case 3: /* neg */
4977 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4978 if (mod != 3) {
4979 gen_op_st_T0_A0(ot + s->mem_index);
4980 } else {
4981 gen_op_mov_reg_T0(ot, rm);
4982 }
4983 gen_op_update_neg_cc();
4984 s->cc_op = CC_OP_SUBB + ot;
4985 break;
4986 case 4: /* mul */
4987 switch(ot) {
4988 case OT_BYTE:
4989 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4990 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4991 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4992 /* XXX: use 32 bit mul which could be faster */
4993 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4994 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4995 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4996 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4997 s->cc_op = CC_OP_MULB;
4998 break;
4999 case OT_WORD:
5000 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5001 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5002 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
5003 /* XXX: use 32 bit mul which could be faster */
5004 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5005 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5006 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5007 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5008 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5009 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5010 s->cc_op = CC_OP_MULW;
5011 break;
5012 default:
5013 case OT_LONG:
5014#ifdef TARGET_X86_64
5015 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5016 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
5017 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
5018 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5019 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5020 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5021 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
5022 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5023 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5024#else
5025 {
5026 TCGv t0, t1;
5027 t0 = tcg_temp_new(TCG_TYPE_I64);
5028 t1 = tcg_temp_new(TCG_TYPE_I64);
5029 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5030 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
5031 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
5032 tcg_gen_mul_i64(t0, t0, t1);
5033 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5034 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5035 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5036 tcg_gen_shri_i64(t0, t0, 32);
5037 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5038 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5039 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5040 }
5041#endif
5042 s->cc_op = CC_OP_MULL;
5043 break;
5044#ifdef TARGET_X86_64
5045 case OT_QUAD:
5046 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
5047 s->cc_op = CC_OP_MULQ;
5048 break;
5049#endif
5050 }
5051 break;
5052 case 5: /* imul */
5053 switch(ot) {
5054 case OT_BYTE:
5055 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
5056 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5057 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
5058 /* XXX: use 32 bit mul which could be faster */
5059 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5060 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5061 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5062 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
5063 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5064 s->cc_op = CC_OP_MULB;
5065 break;
5066 case OT_WORD:
5067 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5068 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5069 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5070 /* XXX: use 32 bit mul which could be faster */
5071 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5072 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5073 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5074 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5075 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5076 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5077 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5078 s->cc_op = CC_OP_MULW;
5079 break;
5080 default:
5081 case OT_LONG:
5082#ifdef TARGET_X86_64
5083 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5084 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5085 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5086 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5087 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5088 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5089 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5090 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5091 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
5092 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5093#else
5094 {
5095 TCGv t0, t1;
5096 t0 = tcg_temp_new(TCG_TYPE_I64);
5097 t1 = tcg_temp_new(TCG_TYPE_I64);
5098 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5099 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5100 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5101 tcg_gen_mul_i64(t0, t0, t1);
5102 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5103 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5104 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5105 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5106 tcg_gen_shri_i64(t0, t0, 32);
5107 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5108 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5109 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5110 }
5111#endif
5112 s->cc_op = CC_OP_MULL;
5113 break;
5114#ifdef TARGET_X86_64
5115 case OT_QUAD:
5116 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
5117 s->cc_op = CC_OP_MULQ;
5118 break;
5119#endif
5120 }
5121 break;
5122 case 6: /* div */
5123 switch(ot) {
5124 case OT_BYTE:
5125 gen_jmp_im(pc_start - s->cs_base);
5126 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
5127 break;
5128 case OT_WORD:
5129 gen_jmp_im(pc_start - s->cs_base);
5130 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
5131 break;
5132 default:
5133 case OT_LONG:
5134 gen_jmp_im(pc_start - s->cs_base);
5135 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
5136 break;
5137#ifdef TARGET_X86_64
5138 case OT_QUAD:
5139 gen_jmp_im(pc_start - s->cs_base);
5140 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
5141 break;
5142#endif
5143 }
5144 break;
5145 case 7: /* idiv */
5146 switch(ot) {
5147 case OT_BYTE:
5148 gen_jmp_im(pc_start - s->cs_base);
5149 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
5150 break;
5151 case OT_WORD:
5152 gen_jmp_im(pc_start - s->cs_base);
5153 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
5154 break;
5155 default:
5156 case OT_LONG:
5157 gen_jmp_im(pc_start - s->cs_base);
5158 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
5159 break;
5160#ifdef TARGET_X86_64
5161 case OT_QUAD:
5162 gen_jmp_im(pc_start - s->cs_base);
5163 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
5164 break;
5165#endif
5166 }
5167 break;
5168 default:
5169 goto illegal_op;
5170 }
5171 break;
5172
5173 case 0xfe: /* GRP4 */
5174 case 0xff: /* GRP5 */
5175 if ((b & 1) == 0)
5176 ot = OT_BYTE;
5177 else
5178 ot = dflag + OT_WORD;
5179
5180 modrm = ldub_code(s->pc++);
5181 mod = (modrm >> 6) & 3;
5182 rm = (modrm & 7) | REX_B(s);
5183 op = (modrm >> 3) & 7;
5184 if (op >= 2 && b == 0xfe) {
5185 goto illegal_op;
5186 }
5187 if (CODE64(s)) {
5188 if (op == 2 || op == 4) {
5189 /* operand size for jumps is 64 bit */
5190 ot = OT_QUAD;
5191 } else if (op == 3 || op == 5) {
5192 /* for call calls, the operand is 16 or 32 bit, even
5193 in long mode */
5194 ot = dflag ? OT_LONG : OT_WORD;
5195 } else if (op == 6) {
5196 /* default push size is 64 bit */
5197 ot = dflag ? OT_QUAD : OT_WORD;
5198 }
5199 }
5200 if (mod != 3) {
5201 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5202 if (op >= 2 && op != 3 && op != 5)
5203 gen_op_ld_T0_A0(ot + s->mem_index);
5204 } else {
5205 gen_op_mov_TN_reg(ot, 0, rm);
5206 }
5207
5208 switch(op) {
5209 case 0: /* inc Ev */
5210 if (mod != 3)
5211 opreg = OR_TMP0;
5212 else
5213 opreg = rm;
5214 gen_inc(s, ot, opreg, 1);
5215 break;
5216 case 1: /* dec Ev */
5217 if (mod != 3)
5218 opreg = OR_TMP0;
5219 else
5220 opreg = rm;
5221 gen_inc(s, ot, opreg, -1);
5222 break;
5223 case 2: /* call Ev */
5224 /* XXX: optimize if memory (no 'and' is necessary) */
5225#ifdef VBOX_WITH_CALL_RECORD
5226 if (s->record_call)
5227 gen_op_record_call();
5228#endif
5229 if (s->dflag == 0)
5230 gen_op_andl_T0_ffff();
5231 next_eip = s->pc - s->cs_base;
5232 gen_movtl_T1_im(next_eip);
5233 gen_push_T1(s);
5234 gen_op_jmp_T0();
5235 gen_eob(s);
5236 break;
5237 case 3: /* lcall Ev */
5238 gen_op_ld_T1_A0(ot + s->mem_index);
5239 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5240 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5241 do_lcall:
5242 if (s->pe && !s->vm86) {
5243 if (s->cc_op != CC_OP_DYNAMIC)
5244 gen_op_set_cc_op(s->cc_op);
5245 gen_jmp_im(pc_start - s->cs_base);
5246 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5247 tcg_gen_helper_0_4(helper_lcall_protected,
5248 cpu_tmp2_i32, cpu_T[1],
5249 tcg_const_i32(dflag),
5250 tcg_const_i32(s->pc - pc_start));
5251 } else {
5252 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5253 tcg_gen_helper_0_4(helper_lcall_real,
5254 cpu_tmp2_i32, cpu_T[1],
5255 tcg_const_i32(dflag),
5256 tcg_const_i32(s->pc - s->cs_base));
5257 }
5258 gen_eob(s);
5259 break;
5260 case 4: /* jmp Ev */
5261 if (s->dflag == 0)
5262 gen_op_andl_T0_ffff();
5263 gen_op_jmp_T0();
5264 gen_eob(s);
5265 break;
5266 case 5: /* ljmp Ev */
5267 gen_op_ld_T1_A0(ot + s->mem_index);
5268 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5269 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5270 do_ljmp:
5271 if (s->pe && !s->vm86) {
5272 if (s->cc_op != CC_OP_DYNAMIC)
5273 gen_op_set_cc_op(s->cc_op);
5274 gen_jmp_im(pc_start - s->cs_base);
5275 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5276 tcg_gen_helper_0_3(helper_ljmp_protected,
5277 cpu_tmp2_i32,
5278 cpu_T[1],
5279 tcg_const_i32(s->pc - pc_start));
5280 } else {
5281 gen_op_movl_seg_T0_vm(R_CS);
5282 gen_op_movl_T0_T1();
5283 gen_op_jmp_T0();
5284 }
5285 gen_eob(s);
5286 break;
5287 case 6: /* push Ev */
5288 gen_push_T0(s);
5289 break;
5290 default:
5291 goto illegal_op;
5292 }
5293 break;
5294
5295 case 0x84: /* test Ev, Gv */
5296 case 0x85:
5297 if ((b & 1) == 0)
5298 ot = OT_BYTE;
5299 else
5300 ot = dflag + OT_WORD;
5301
5302 modrm = ldub_code(s->pc++);
5303 mod = (modrm >> 6) & 3;
5304 rm = (modrm & 7) | REX_B(s);
5305 reg = ((modrm >> 3) & 7) | rex_r;
5306
5307 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5308 gen_op_mov_TN_reg(ot, 1, reg);
5309 gen_op_testl_T0_T1_cc();
5310 s->cc_op = CC_OP_LOGICB + ot;
5311 break;
5312
5313 case 0xa8: /* test eAX, Iv */
5314 case 0xa9:
5315 if ((b & 1) == 0)
5316 ot = OT_BYTE;
5317 else
5318 ot = dflag + OT_WORD;
5319 val = insn_get(s, ot);
5320
5321 gen_op_mov_TN_reg(ot, 0, OR_EAX);
5322 gen_op_movl_T1_im(val);
5323 gen_op_testl_T0_T1_cc();
5324 s->cc_op = CC_OP_LOGICB + ot;
5325 break;
5326
5327 case 0x98: /* CWDE/CBW */
5328#ifdef TARGET_X86_64
5329 if (dflag == 2) {
5330 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5331 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5332 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
5333 } else
5334#endif
5335 if (dflag == 1) {
5336 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5337 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5338 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5339 } else {
5340 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
5341 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5342 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5343 }
5344 break;
5345 case 0x99: /* CDQ/CWD */
5346#ifdef TARGET_X86_64
5347 if (dflag == 2) {
5348 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5349 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
5350 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
5351 } else
5352#endif
5353 if (dflag == 1) {
5354 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5355 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5356 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
5357 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5358 } else {
5359 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5360 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5361 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
5362 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5363 }
5364 break;
5365 case 0x1af: /* imul Gv, Ev */
5366 case 0x69: /* imul Gv, Ev, I */
5367 case 0x6b:
5368 ot = dflag + OT_WORD;
5369 modrm = ldub_code(s->pc++);
5370 reg = ((modrm >> 3) & 7) | rex_r;
5371 if (b == 0x69)
5372 s->rip_offset = insn_const_size(ot);
5373 else if (b == 0x6b)
5374 s->rip_offset = 1;
5375 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5376 if (b == 0x69) {
5377 val = insn_get(s, ot);
5378 gen_op_movl_T1_im(val);
5379 } else if (b == 0x6b) {
5380 val = (int8_t)insn_get(s, OT_BYTE);
5381 gen_op_movl_T1_im(val);
5382 } else {
5383 gen_op_mov_TN_reg(ot, 1, reg);
5384 }
5385
5386#ifdef TARGET_X86_64
5387 if (ot == OT_QUAD) {
5388 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
5389 } else
5390#endif
5391 if (ot == OT_LONG) {
5392#ifdef TARGET_X86_64
5393 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5394 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5395 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5396 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5397 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5398 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5399#else
5400 {
5401 TCGv t0, t1;
5402 t0 = tcg_temp_new(TCG_TYPE_I64);
5403 t1 = tcg_temp_new(TCG_TYPE_I64);
5404 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5405 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5406 tcg_gen_mul_i64(t0, t0, t1);
5407 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5408 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5409 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5410 tcg_gen_shri_i64(t0, t0, 32);
5411 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
5412 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
5413 }
5414#endif
5415 } else {
5416 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5417 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5418 /* XXX: use 32 bit mul which could be faster */
5419 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5420 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5421 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5422 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5423 }
5424 gen_op_mov_reg_T0(ot, reg);
5425 s->cc_op = CC_OP_MULB + ot;
5426 break;
5427 case 0x1c0:
5428 case 0x1c1: /* xadd Ev, Gv */
5429 if ((b & 1) == 0)
5430 ot = OT_BYTE;
5431 else
5432 ot = dflag + OT_WORD;
5433 modrm = ldub_code(s->pc++);
5434 reg = ((modrm >> 3) & 7) | rex_r;
5435 mod = (modrm >> 6) & 3;
5436 if (mod == 3) {
5437 rm = (modrm & 7) | REX_B(s);
5438 gen_op_mov_TN_reg(ot, 0, reg);
5439 gen_op_mov_TN_reg(ot, 1, rm);
5440 gen_op_addl_T0_T1();
5441 gen_op_mov_reg_T1(ot, reg);
5442 gen_op_mov_reg_T0(ot, rm);
5443 } else {
5444 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5445 gen_op_mov_TN_reg(ot, 0, reg);
5446 gen_op_ld_T1_A0(ot + s->mem_index);
5447 gen_op_addl_T0_T1();
5448 gen_op_st_T0_A0(ot + s->mem_index);
5449 gen_op_mov_reg_T1(ot, reg);
5450 }
5451 gen_op_update2_cc();
5452 s->cc_op = CC_OP_ADDB + ot;
5453 break;
5454 case 0x1b0:
5455 case 0x1b1: /* cmpxchg Ev, Gv */
5456 {
5457 int label1, label2;
5458 TCGv t0, t1, t2, a0;
5459
5460 if ((b & 1) == 0)
5461 ot = OT_BYTE;
5462 else
5463 ot = dflag + OT_WORD;
5464 modrm = ldub_code(s->pc++);
5465 reg = ((modrm >> 3) & 7) | rex_r;
5466 mod = (modrm >> 6) & 3;
5467 t0 = tcg_temp_local_new(TCG_TYPE_TL);
5468 t1 = tcg_temp_local_new(TCG_TYPE_TL);
5469 t2 = tcg_temp_local_new(TCG_TYPE_TL);
5470 a0 = tcg_temp_local_new(TCG_TYPE_TL);
5471 gen_op_mov_v_reg(ot, t1, reg);
5472 if (mod == 3) {
5473 rm = (modrm & 7) | REX_B(s);
5474 gen_op_mov_v_reg(ot, t0, rm);
5475 } else {
5476 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5477 tcg_gen_mov_tl(a0, cpu_A0);
5478 gen_op_ld_v(ot + s->mem_index, t0, a0);
5479 rm = 0; /* avoid warning */
5480 }
5481 label1 = gen_new_label();
5482 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
5483 tcg_gen_sub_tl(t2, t2, t0);
5484 gen_extu(ot, t2);
5485 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
5486 if (mod == 3) {
5487 label2 = gen_new_label();
5488 gen_op_mov_reg_v(ot, R_EAX, t0);
5489 tcg_gen_br(label2);
5490 gen_set_label(label1);
5491 gen_op_mov_reg_v(ot, rm, t1);
5492 gen_set_label(label2);
5493 } else {
5494 tcg_gen_mov_tl(t1, t0);
5495 gen_op_mov_reg_v(ot, R_EAX, t0);
5496 gen_set_label(label1);
5497 /* always store */
5498 gen_op_st_v(ot + s->mem_index, t1, a0);
5499 }
5500 tcg_gen_mov_tl(cpu_cc_src, t0);
5501 tcg_gen_mov_tl(cpu_cc_dst, t2);
5502 s->cc_op = CC_OP_SUBB + ot;
5503 tcg_temp_free(t0);
5504 tcg_temp_free(t1);
5505 tcg_temp_free(t2);
5506 tcg_temp_free(a0);
5507 }
5508 break;
5509 case 0x1c7: /* cmpxchg8b */
5510 modrm = ldub_code(s->pc++);
5511 mod = (modrm >> 6) & 3;
5512 if ((mod == 3) || ((modrm & 0x38) != 0x8))
5513 goto illegal_op;
5514#ifdef TARGET_X86_64
5515 if (dflag == 2) {
5516 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
5517 goto illegal_op;
5518 gen_jmp_im(pc_start - s->cs_base);
5519 if (s->cc_op != CC_OP_DYNAMIC)
5520 gen_op_set_cc_op(s->cc_op);
5521 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5522 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
5523 } else
5524#endif
5525 {
5526 if (!(s->cpuid_features & CPUID_CX8))
5527 goto illegal_op;
5528 gen_jmp_im(pc_start - s->cs_base);
5529 if (s->cc_op != CC_OP_DYNAMIC)
5530 gen_op_set_cc_op(s->cc_op);
5531 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5532 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
5533 }
5534 s->cc_op = CC_OP_EFLAGS;
5535 break;
5536
5537 /**************************/
5538 /* push/pop */
5539 case 0x50 ... 0x57: /* push */
5540 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
5541 gen_push_T0(s);
5542 break;
5543 case 0x58 ... 0x5f: /* pop */
5544 if (CODE64(s)) {
5545 ot = dflag ? OT_QUAD : OT_WORD;
5546 } else {
5547 ot = dflag + OT_WORD;
5548 }
5549 gen_pop_T0(s);
5550 /* NOTE: order is important for pop %sp */
5551 gen_pop_update(s);
5552 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
5553 break;
5554 case 0x60: /* pusha */
5555 if (CODE64(s))
5556 goto illegal_op;
5557 gen_pusha(s);
5558 break;
5559 case 0x61: /* popa */
5560 if (CODE64(s))
5561 goto illegal_op;
5562 gen_popa(s);
5563 break;
5564 case 0x68: /* push Iv */
5565 case 0x6a:
5566 if (CODE64(s)) {
5567 ot = dflag ? OT_QUAD : OT_WORD;
5568 } else {
5569 ot = dflag + OT_WORD;
5570 }
5571 if (b == 0x68)
5572 val = insn_get(s, ot);
5573 else
5574 val = (int8_t)insn_get(s, OT_BYTE);
5575 gen_op_movl_T0_im(val);
5576 gen_push_T0(s);
5577 break;
5578 case 0x8f: /* pop Ev */
5579 if (CODE64(s)) {
5580 ot = dflag ? OT_QUAD : OT_WORD;
5581 } else {
5582 ot = dflag + OT_WORD;
5583 }
5584 modrm = ldub_code(s->pc++);
5585 mod = (modrm >> 6) & 3;
5586 gen_pop_T0(s);
5587 if (mod == 3) {
5588 /* NOTE: order is important for pop %sp */
5589 gen_pop_update(s);
5590 rm = (modrm & 7) | REX_B(s);
5591 gen_op_mov_reg_T0(ot, rm);
5592 } else {
5593 /* NOTE: order is important too for MMU exceptions */
5594 s->popl_esp_hack = 1 << ot;
5595 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5596 s->popl_esp_hack = 0;
5597 gen_pop_update(s);
5598 }
5599 break;
5600 case 0xc8: /* enter */
5601 {
5602 int level;
5603 val = lduw_code(s->pc);
5604 s->pc += 2;
5605 level = ldub_code(s->pc++);
5606 gen_enter(s, val, level);
5607 }
5608 break;
5609 case 0xc9: /* leave */
5610 /* XXX: exception not precise (ESP is updated before potential exception) */
5611 if (CODE64(s)) {
5612 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5613 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
5614 } else if (s->ss32) {
5615 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5616 gen_op_mov_reg_T0(OT_LONG, R_ESP);
5617 } else {
5618 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5619 gen_op_mov_reg_T0(OT_WORD, R_ESP);
5620 }
5621 gen_pop_T0(s);
5622 if (CODE64(s)) {
5623 ot = dflag ? OT_QUAD : OT_WORD;
5624 } else {
5625 ot = dflag + OT_WORD;
5626 }
5627 gen_op_mov_reg_T0(ot, R_EBP);
5628 gen_pop_update(s);
5629 break;
5630 case 0x06: /* push es */
5631 case 0x0e: /* push cs */
5632 case 0x16: /* push ss */
5633 case 0x1e: /* push ds */
5634 if (CODE64(s))
5635 goto illegal_op;
5636 gen_op_movl_T0_seg(b >> 3);
5637 gen_push_T0(s);
5638 break;
5639 case 0x1a0: /* push fs */
5640 case 0x1a8: /* push gs */
5641 gen_op_movl_T0_seg((b >> 3) & 7);
5642 gen_push_T0(s);
5643 break;
5644 case 0x07: /* pop es */
5645 case 0x17: /* pop ss */
5646 case 0x1f: /* pop ds */
5647 if (CODE64(s))
5648 goto illegal_op;
5649 reg = b >> 3;
5650 gen_pop_T0(s);
5651 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5652 gen_pop_update(s);
5653 if (reg == R_SS) {
5654 /* if reg == SS, inhibit interrupts/trace. */
5655 /* If several instructions disable interrupts, only the
5656 _first_ does it */
5657 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5658 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5659 s->tf = 0;
5660 }
5661 if (s->is_jmp) {
5662 gen_jmp_im(s->pc - s->cs_base);
5663 gen_eob(s);
5664 }
5665 break;
5666 case 0x1a1: /* pop fs */
5667 case 0x1a9: /* pop gs */
5668 gen_pop_T0(s);
5669 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5670 gen_pop_update(s);
5671 if (s->is_jmp) {
5672 gen_jmp_im(s->pc - s->cs_base);
5673 gen_eob(s);
5674 }
5675 break;
5676
5677 /**************************/
5678 /* mov */
5679 case 0x88:
5680 case 0x89: /* mov Gv, Ev */
5681 if ((b & 1) == 0)
5682 ot = OT_BYTE;
5683 else
5684 ot = dflag + OT_WORD;
5685 modrm = ldub_code(s->pc++);
5686 reg = ((modrm >> 3) & 7) | rex_r;
5687
5688 /* generate a generic store */
5689 gen_ldst_modrm(s, modrm, ot, reg, 1);
5690 break;
5691 case 0xc6:
5692 case 0xc7: /* mov Ev, Iv */
5693 if ((b & 1) == 0)
5694 ot = OT_BYTE;
5695 else
5696 ot = dflag + OT_WORD;
5697 modrm = ldub_code(s->pc++);
5698 mod = (modrm >> 6) & 3;
5699 if (mod != 3) {
5700 s->rip_offset = insn_const_size(ot);
5701 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5702 }
5703 val = insn_get(s, ot);
5704 gen_op_movl_T0_im(val);
5705 if (mod != 3)
5706 gen_op_st_T0_A0(ot + s->mem_index);
5707 else
5708 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5709 break;
5710 case 0x8a:
5711 case 0x8b: /* mov Ev, Gv */
5712#ifdef VBOX /* dtrace hot fix */
5713 if (prefixes & PREFIX_LOCK)
5714 goto illegal_op;
5715#endif
5716 if ((b & 1) == 0)
5717 ot = OT_BYTE;
5718 else
5719 ot = OT_WORD + dflag;
5720 modrm = ldub_code(s->pc++);
5721 reg = ((modrm >> 3) & 7) | rex_r;
5722
5723 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5724 gen_op_mov_reg_T0(ot, reg);
5725 break;
5726 case 0x8e: /* mov seg, Gv */
5727 modrm = ldub_code(s->pc++);
5728 reg = (modrm >> 3) & 7;
5729 if (reg >= 6 || reg == R_CS)
5730 goto illegal_op;
5731 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5732 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5733 if (reg == R_SS) {
5734 /* if reg == SS, inhibit interrupts/trace */
5735 /* If several instructions disable interrupts, only the
5736 _first_ does it */
5737 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5738 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5739 s->tf = 0;
5740 }
5741 if (s->is_jmp) {
5742 gen_jmp_im(s->pc - s->cs_base);
5743 gen_eob(s);
5744 }
5745 break;
5746 case 0x8c: /* mov Gv, seg */
5747 modrm = ldub_code(s->pc++);
5748 reg = (modrm >> 3) & 7;
5749 mod = (modrm >> 6) & 3;
5750 if (reg >= 6)
5751 goto illegal_op;
5752 gen_op_movl_T0_seg(reg);
5753 if (mod == 3)
5754 ot = OT_WORD + dflag;
5755 else
5756 ot = OT_WORD;
5757 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5758 break;
5759
5760 case 0x1b6: /* movzbS Gv, Eb */
5761 case 0x1b7: /* movzwS Gv, Eb */
5762 case 0x1be: /* movsbS Gv, Eb */
5763 case 0x1bf: /* movswS Gv, Eb */
5764 {
5765 int d_ot;
5766 /* d_ot is the size of destination */
5767 d_ot = dflag + OT_WORD;
5768 /* ot is the size of source */
5769 ot = (b & 1) + OT_BYTE;
5770 modrm = ldub_code(s->pc++);
5771 reg = ((modrm >> 3) & 7) | rex_r;
5772 mod = (modrm >> 6) & 3;
5773 rm = (modrm & 7) | REX_B(s);
5774
5775 if (mod == 3) {
5776 gen_op_mov_TN_reg(ot, 0, rm);
5777 switch(ot | (b & 8)) {
5778 case OT_BYTE:
5779 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5780 break;
5781 case OT_BYTE | 8:
5782 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5783 break;
5784 case OT_WORD:
5785 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5786 break;
5787 default:
5788 case OT_WORD | 8:
5789 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5790 break;
5791 }
5792 gen_op_mov_reg_T0(d_ot, reg);
5793 } else {
5794 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5795 if (b & 8) {
5796 gen_op_lds_T0_A0(ot + s->mem_index);
5797 } else {
5798 gen_op_ldu_T0_A0(ot + s->mem_index);
5799 }
5800 gen_op_mov_reg_T0(d_ot, reg);
5801 }
5802 }
5803 break;
5804
5805 case 0x8d: /* lea */
5806 ot = dflag + OT_WORD;
5807 modrm = ldub_code(s->pc++);
5808 mod = (modrm >> 6) & 3;
5809 if (mod == 3)
5810 goto illegal_op;
5811 reg = ((modrm >> 3) & 7) | rex_r;
5812 /* we must ensure that no segment is added */
5813 s->override = -1;
5814 val = s->addseg;
5815 s->addseg = 0;
5816 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5817 s->addseg = val;
5818 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5819 break;
5820
5821 case 0xa0: /* mov EAX, Ov */
5822 case 0xa1:
5823 case 0xa2: /* mov Ov, EAX */
5824 case 0xa3:
5825 {
5826 target_ulong offset_addr;
5827
5828 if ((b & 1) == 0)
5829 ot = OT_BYTE;
5830 else
5831 ot = dflag + OT_WORD;
5832#ifdef TARGET_X86_64
5833 if (s->aflag == 2) {
5834 offset_addr = ldq_code(s->pc);
5835 s->pc += 8;
5836 gen_op_movq_A0_im(offset_addr);
5837 } else
5838#endif
5839 {
5840 if (s->aflag) {
5841 offset_addr = insn_get(s, OT_LONG);
5842 } else {
5843 offset_addr = insn_get(s, OT_WORD);
5844 }
5845 gen_op_movl_A0_im(offset_addr);
5846 }
5847 gen_add_A0_ds_seg(s);
5848 if ((b & 2) == 0) {
5849 gen_op_ld_T0_A0(ot + s->mem_index);
5850 gen_op_mov_reg_T0(ot, R_EAX);
5851 } else {
5852 gen_op_mov_TN_reg(ot, 0, R_EAX);
5853 gen_op_st_T0_A0(ot + s->mem_index);
5854 }
5855 }
5856 break;
5857 case 0xd7: /* xlat */
5858#ifdef TARGET_X86_64
5859 if (s->aflag == 2) {
5860 gen_op_movq_A0_reg(R_EBX);
5861 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5862 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5863 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5864 } else
5865#endif
5866 {
5867 gen_op_movl_A0_reg(R_EBX);
5868 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5869 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5870 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5871 if (s->aflag == 0)
5872 gen_op_andl_A0_ffff();
5873 else
5874 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5875 }
5876 gen_add_A0_ds_seg(s);
5877 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5878 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5879 break;
5880 case 0xb0 ... 0xb7: /* mov R, Ib */
5881 val = insn_get(s, OT_BYTE);
5882 gen_op_movl_T0_im(val);
5883 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5884 break;
5885 case 0xb8 ... 0xbf: /* mov R, Iv */
5886#ifdef TARGET_X86_64
5887 if (dflag == 2) {
5888 uint64_t tmp;
5889 /* 64 bit case */
5890 tmp = ldq_code(s->pc);
5891 s->pc += 8;
5892 reg = (b & 7) | REX_B(s);
5893 gen_movtl_T0_im(tmp);
5894 gen_op_mov_reg_T0(OT_QUAD, reg);
5895 } else
5896#endif
5897 {
5898 ot = dflag ? OT_LONG : OT_WORD;
5899 val = insn_get(s, ot);
5900 reg = (b & 7) | REX_B(s);
5901 gen_op_movl_T0_im(val);
5902 gen_op_mov_reg_T0(ot, reg);
5903 }
5904 break;
5905
5906 case 0x91 ... 0x97: /* xchg R, EAX */
5907 ot = dflag + OT_WORD;
5908 reg = (b & 7) | REX_B(s);
5909 rm = R_EAX;
5910 goto do_xchg_reg;
5911 case 0x86:
5912 case 0x87: /* xchg Ev, Gv */
5913 if ((b & 1) == 0)
5914 ot = OT_BYTE;
5915 else
5916 ot = dflag + OT_WORD;
5917 modrm = ldub_code(s->pc++);
5918 reg = ((modrm >> 3) & 7) | rex_r;
5919 mod = (modrm >> 6) & 3;
5920 if (mod == 3) {
5921 rm = (modrm & 7) | REX_B(s);
5922 do_xchg_reg:
5923 gen_op_mov_TN_reg(ot, 0, reg);
5924 gen_op_mov_TN_reg(ot, 1, rm);
5925 gen_op_mov_reg_T0(ot, rm);
5926 gen_op_mov_reg_T1(ot, reg);
5927 } else {
5928 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5929 gen_op_mov_TN_reg(ot, 0, reg);
5930 /* for xchg, lock is implicit */
5931 if (!(prefixes & PREFIX_LOCK))
5932 tcg_gen_helper_0_0(helper_lock);
5933 gen_op_ld_T1_A0(ot + s->mem_index);
5934 gen_op_st_T0_A0(ot + s->mem_index);
5935 if (!(prefixes & PREFIX_LOCK))
5936 tcg_gen_helper_0_0(helper_unlock);
5937 gen_op_mov_reg_T1(ot, reg);
5938 }
5939 break;
5940 case 0xc4: /* les Gv */
5941 if (CODE64(s))
5942 goto illegal_op;
5943 op = R_ES;
5944 goto do_lxx;
5945 case 0xc5: /* lds Gv */
5946 if (CODE64(s))
5947 goto illegal_op;
5948 op = R_DS;
5949 goto do_lxx;
5950 case 0x1b2: /* lss Gv */
5951 op = R_SS;
5952 goto do_lxx;
5953 case 0x1b4: /* lfs Gv */
5954 op = R_FS;
5955 goto do_lxx;
5956 case 0x1b5: /* lgs Gv */
5957 op = R_GS;
5958 do_lxx:
5959 ot = dflag ? OT_LONG : OT_WORD;
5960 modrm = ldub_code(s->pc++);
5961 reg = ((modrm >> 3) & 7) | rex_r;
5962 mod = (modrm >> 6) & 3;
5963 if (mod == 3)
5964 goto illegal_op;
5965 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5966 gen_op_ld_T1_A0(ot + s->mem_index);
5967 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5968 /* load the segment first to handle exceptions properly */
5969 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5970 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5971 /* then put the data */
5972 gen_op_mov_reg_T1(ot, reg);
5973 if (s->is_jmp) {
5974 gen_jmp_im(s->pc - s->cs_base);
5975 gen_eob(s);
5976 }
5977 break;
5978
5979 /************************/
5980 /* shifts */
5981 case 0xc0:
5982 case 0xc1:
5983 /* shift Ev,Ib */
5984 shift = 2;
5985 grp2:
5986 {
5987 if ((b & 1) == 0)
5988 ot = OT_BYTE;
5989 else
5990 ot = dflag + OT_WORD;
5991
5992 modrm = ldub_code(s->pc++);
5993 mod = (modrm >> 6) & 3;
5994 op = (modrm >> 3) & 7;
5995
5996 if (mod != 3) {
5997 if (shift == 2) {
5998 s->rip_offset = 1;
5999 }
6000 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6001 opreg = OR_TMP0;
6002 } else {
6003 opreg = (modrm & 7) | REX_B(s);
6004 }
6005
6006 /* simpler op */
6007 if (shift == 0) {
6008 gen_shift(s, op, ot, opreg, OR_ECX);
6009 } else {
6010 if (shift == 2) {
6011 shift = ldub_code(s->pc++);
6012 }
6013 gen_shifti(s, op, ot, opreg, shift);
6014 }
6015 }
6016 break;
6017 case 0xd0:
6018 case 0xd1:
6019 /* shift Ev,1 */
6020 shift = 1;
6021 goto grp2;
6022 case 0xd2:
6023 case 0xd3:
6024 /* shift Ev,cl */
6025 shift = 0;
6026 goto grp2;
6027
6028 case 0x1a4: /* shld imm */
6029 op = 0;
6030 shift = 1;
6031 goto do_shiftd;
6032 case 0x1a5: /* shld cl */
6033 op = 0;
6034 shift = 0;
6035 goto do_shiftd;
6036 case 0x1ac: /* shrd imm */
6037 op = 1;
6038 shift = 1;
6039 goto do_shiftd;
6040 case 0x1ad: /* shrd cl */
6041 op = 1;
6042 shift = 0;
6043 do_shiftd:
6044 ot = dflag + OT_WORD;
6045 modrm = ldub_code(s->pc++);
6046 mod = (modrm >> 6) & 3;
6047 rm = (modrm & 7) | REX_B(s);
6048 reg = ((modrm >> 3) & 7) | rex_r;
6049 if (mod != 3) {
6050 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6051 opreg = OR_TMP0;
6052 } else {
6053 opreg = rm;
6054 }
6055 gen_op_mov_TN_reg(ot, 1, reg);
6056
6057 if (shift) {
6058 val = ldub_code(s->pc++);
6059 tcg_gen_movi_tl(cpu_T3, val);
6060 } else {
6061 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
6062 }
6063 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
6064 break;
6065
6066 /************************/
6067 /* floats */
6068 case 0xd8 ... 0xdf:
6069 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
6070 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
6071 /* XXX: what to do if illegal op ? */
6072 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6073 break;
6074 }
6075 modrm = ldub_code(s->pc++);
6076 mod = (modrm >> 6) & 3;
6077 rm = modrm & 7;
6078 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
6079 if (mod != 3) {
6080 /* memory op */
6081 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6082 switch(op) {
6083 case 0x00 ... 0x07: /* fxxxs */
6084 case 0x10 ... 0x17: /* fixxxl */
6085 case 0x20 ... 0x27: /* fxxxl */
6086 case 0x30 ... 0x37: /* fixxx */
6087 {
6088 int op1;
6089 op1 = op & 7;
6090
6091 switch(op >> 4) {
6092 case 0:
6093 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6094 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6095 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
6096 break;
6097 case 1:
6098 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6099 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6100 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6101 break;
6102 case 2:
6103 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6104 (s->mem_index >> 2) - 1);
6105 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
6106 break;
6107 case 3:
6108 default:
6109 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6110 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6111 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6112 break;
6113 }
6114
6115 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6116 if (op1 == 3) {
6117 /* fcomp needs pop */
6118 tcg_gen_helper_0_0(helper_fpop);
6119 }
6120 }
6121 break;
6122 case 0x08: /* flds */
6123 case 0x0a: /* fsts */
6124 case 0x0b: /* fstps */
6125 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
6126 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
6127 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
6128 switch(op & 7) {
6129 case 0:
6130 switch(op >> 4) {
6131 case 0:
6132 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6133 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6134 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
6135 break;
6136 case 1:
6137 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6138 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6139 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6140 break;
6141 case 2:
6142 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6143 (s->mem_index >> 2) - 1);
6144 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
6145 break;
6146 case 3:
6147 default:
6148 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6149 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6150 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6151 break;
6152 }
6153 break;
6154 case 1:
6155 /* XXX: the corresponding CPUID bit must be tested ! */
6156 switch(op >> 4) {
6157 case 1:
6158 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
6159 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6160 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6161 break;
6162 case 2:
6163 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
6164 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6165 (s->mem_index >> 2) - 1);
6166 break;
6167 case 3:
6168 default:
6169 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
6170 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6171 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6172 break;
6173 }
6174 tcg_gen_helper_0_0(helper_fpop);
6175 break;
6176 default:
6177 switch(op >> 4) {
6178 case 0:
6179 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
6180 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6181 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6182 break;
6183 case 1:
6184 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
6185 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6186 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6187 break;
6188 case 2:
6189 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
6190 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6191 (s->mem_index >> 2) - 1);
6192 break;
6193 case 3:
6194 default:
6195 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
6196 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6197 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6198 break;
6199 }
6200 if ((op & 7) == 3)
6201 tcg_gen_helper_0_0(helper_fpop);
6202 break;
6203 }
6204 break;
6205 case 0x0c: /* fldenv mem */
6206 if (s->cc_op != CC_OP_DYNAMIC)
6207 gen_op_set_cc_op(s->cc_op);
6208 gen_jmp_im(pc_start - s->cs_base);
6209 tcg_gen_helper_0_2(helper_fldenv,
6210 cpu_A0, tcg_const_i32(s->dflag));
6211 break;
6212 case 0x0d: /* fldcw mem */
6213 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
6214 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6215 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
6216 break;
6217 case 0x0e: /* fnstenv mem */
6218 if (s->cc_op != CC_OP_DYNAMIC)
6219 gen_op_set_cc_op(s->cc_op);
6220 gen_jmp_im(pc_start - s->cs_base);
6221 tcg_gen_helper_0_2(helper_fstenv,
6222 cpu_A0, tcg_const_i32(s->dflag));
6223 break;
6224 case 0x0f: /* fnstcw mem */
6225 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
6226 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6227 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6228 break;
6229 case 0x1d: /* fldt mem */
6230 if (s->cc_op != CC_OP_DYNAMIC)
6231 gen_op_set_cc_op(s->cc_op);
6232 gen_jmp_im(pc_start - s->cs_base);
6233 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
6234 break;
6235 case 0x1f: /* fstpt mem */
6236 if (s->cc_op != CC_OP_DYNAMIC)
6237 gen_op_set_cc_op(s->cc_op);
6238 gen_jmp_im(pc_start - s->cs_base);
6239 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
6240 tcg_gen_helper_0_0(helper_fpop);
6241 break;
6242 case 0x2c: /* frstor mem */
6243 if (s->cc_op != CC_OP_DYNAMIC)
6244 gen_op_set_cc_op(s->cc_op);
6245 gen_jmp_im(pc_start - s->cs_base);
6246 tcg_gen_helper_0_2(helper_frstor,
6247 cpu_A0, tcg_const_i32(s->dflag));
6248 break;
6249 case 0x2e: /* fnsave mem */
6250 if (s->cc_op != CC_OP_DYNAMIC)
6251 gen_op_set_cc_op(s->cc_op);
6252 gen_jmp_im(pc_start - s->cs_base);
6253 tcg_gen_helper_0_2(helper_fsave,
6254 cpu_A0, tcg_const_i32(s->dflag));
6255 break;
6256 case 0x2f: /* fnstsw mem */
6257 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6258 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6259 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6260 break;
6261 case 0x3c: /* fbld */
6262 if (s->cc_op != CC_OP_DYNAMIC)
6263 gen_op_set_cc_op(s->cc_op);
6264 gen_jmp_im(pc_start - s->cs_base);
6265 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
6266 break;
6267 case 0x3e: /* fbstp */
6268 if (s->cc_op != CC_OP_DYNAMIC)
6269 gen_op_set_cc_op(s->cc_op);
6270 gen_jmp_im(pc_start - s->cs_base);
6271 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
6272 tcg_gen_helper_0_0(helper_fpop);
6273 break;
6274 case 0x3d: /* fildll */
6275 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6276 (s->mem_index >> 2) - 1);
6277 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
6278 break;
6279 case 0x3f: /* fistpll */
6280 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
6281 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6282 (s->mem_index >> 2) - 1);
6283 tcg_gen_helper_0_0(helper_fpop);
6284 break;
6285 default:
6286 goto illegal_op;
6287 }
6288 } else {
6289 /* register float ops */
6290 opreg = rm;
6291
6292 switch(op) {
6293 case 0x08: /* fld sti */
6294 tcg_gen_helper_0_0(helper_fpush);
6295 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
6296 break;
6297 case 0x09: /* fxchg sti */
6298 case 0x29: /* fxchg4 sti, undocumented op */
6299 case 0x39: /* fxchg7 sti, undocumented op */
6300 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
6301 break;
6302 case 0x0a: /* grp d9/2 */
6303 switch(rm) {
6304 case 0: /* fnop */
6305 /* check exceptions (FreeBSD FPU probe) */
6306 if (s->cc_op != CC_OP_DYNAMIC)
6307 gen_op_set_cc_op(s->cc_op);
6308 gen_jmp_im(pc_start - s->cs_base);
6309 tcg_gen_helper_0_0(helper_fwait);
6310 break;
6311 default:
6312 goto illegal_op;
6313 }
6314 break;
6315 case 0x0c: /* grp d9/4 */
6316 switch(rm) {
6317 case 0: /* fchs */
6318 tcg_gen_helper_0_0(helper_fchs_ST0);
6319 break;
6320 case 1: /* fabs */
6321 tcg_gen_helper_0_0(helper_fabs_ST0);
6322 break;
6323 case 4: /* ftst */
6324 tcg_gen_helper_0_0(helper_fldz_FT0);
6325 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6326 break;
6327 case 5: /* fxam */
6328 tcg_gen_helper_0_0(helper_fxam_ST0);
6329 break;
6330 default:
6331 goto illegal_op;
6332 }
6333 break;
6334 case 0x0d: /* grp d9/5 */
6335 {
6336 switch(rm) {
6337 case 0:
6338 tcg_gen_helper_0_0(helper_fpush);
6339 tcg_gen_helper_0_0(helper_fld1_ST0);
6340 break;
6341 case 1:
6342 tcg_gen_helper_0_0(helper_fpush);
6343 tcg_gen_helper_0_0(helper_fldl2t_ST0);
6344 break;
6345 case 2:
6346 tcg_gen_helper_0_0(helper_fpush);
6347 tcg_gen_helper_0_0(helper_fldl2e_ST0);
6348 break;
6349 case 3:
6350 tcg_gen_helper_0_0(helper_fpush);
6351 tcg_gen_helper_0_0(helper_fldpi_ST0);
6352 break;
6353 case 4:
6354 tcg_gen_helper_0_0(helper_fpush);
6355 tcg_gen_helper_0_0(helper_fldlg2_ST0);
6356 break;
6357 case 5:
6358 tcg_gen_helper_0_0(helper_fpush);
6359 tcg_gen_helper_0_0(helper_fldln2_ST0);
6360 break;
6361 case 6:
6362 tcg_gen_helper_0_0(helper_fpush);
6363 tcg_gen_helper_0_0(helper_fldz_ST0);
6364 break;
6365 default:
6366 goto illegal_op;
6367 }
6368 }
6369 break;
6370 case 0x0e: /* grp d9/6 */
6371 switch(rm) {
6372 case 0: /* f2xm1 */
6373 tcg_gen_helper_0_0(helper_f2xm1);
6374 break;
6375 case 1: /* fyl2x */
6376 tcg_gen_helper_0_0(helper_fyl2x);
6377 break;
6378 case 2: /* fptan */
6379 tcg_gen_helper_0_0(helper_fptan);
6380 break;
6381 case 3: /* fpatan */
6382 tcg_gen_helper_0_0(helper_fpatan);
6383 break;
6384 case 4: /* fxtract */
6385 tcg_gen_helper_0_0(helper_fxtract);
6386 break;
6387 case 5: /* fprem1 */
6388 tcg_gen_helper_0_0(helper_fprem1);
6389 break;
6390 case 6: /* fdecstp */
6391 tcg_gen_helper_0_0(helper_fdecstp);
6392 break;
6393 default:
6394 case 7: /* fincstp */
6395 tcg_gen_helper_0_0(helper_fincstp);
6396 break;
6397 }
6398 break;
6399 case 0x0f: /* grp d9/7 */
6400 switch(rm) {
6401 case 0: /* fprem */
6402 tcg_gen_helper_0_0(helper_fprem);
6403 break;
6404 case 1: /* fyl2xp1 */
6405 tcg_gen_helper_0_0(helper_fyl2xp1);
6406 break;
6407 case 2: /* fsqrt */
6408 tcg_gen_helper_0_0(helper_fsqrt);
6409 break;
6410 case 3: /* fsincos */
6411 tcg_gen_helper_0_0(helper_fsincos);
6412 break;
6413 case 5: /* fscale */
6414 tcg_gen_helper_0_0(helper_fscale);
6415 break;
6416 case 4: /* frndint */
6417 tcg_gen_helper_0_0(helper_frndint);
6418 break;
6419 case 6: /* fsin */
6420 tcg_gen_helper_0_0(helper_fsin);
6421 break;
6422 default:
6423 case 7: /* fcos */
6424 tcg_gen_helper_0_0(helper_fcos);
6425 break;
6426 }
6427 break;
6428 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
6429 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
6430 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
6431 {
6432 int op1;
6433
6434 op1 = op & 7;
6435 if (op >= 0x20) {
6436 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
6437 if (op >= 0x30)
6438 tcg_gen_helper_0_0(helper_fpop);
6439 } else {
6440 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6441 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6442 }
6443 }
6444 break;
6445 case 0x02: /* fcom */
6446 case 0x22: /* fcom2, undocumented op */
6447 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6448 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6449 break;
6450 case 0x03: /* fcomp */
6451 case 0x23: /* fcomp3, undocumented op */
6452 case 0x32: /* fcomp5, undocumented op */
6453 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6454 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6455 tcg_gen_helper_0_0(helper_fpop);
6456 break;
6457 case 0x15: /* da/5 */
6458 switch(rm) {
6459 case 1: /* fucompp */
6460 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6461 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6462 tcg_gen_helper_0_0(helper_fpop);
6463 tcg_gen_helper_0_0(helper_fpop);
6464 break;
6465 default:
6466 goto illegal_op;
6467 }
6468 break;
6469 case 0x1c:
6470 switch(rm) {
6471 case 0: /* feni (287 only, just do nop here) */
6472 break;
6473 case 1: /* fdisi (287 only, just do nop here) */
6474 break;
6475 case 2: /* fclex */
6476 tcg_gen_helper_0_0(helper_fclex);
6477 break;
6478 case 3: /* fninit */
6479 tcg_gen_helper_0_0(helper_fninit);
6480 break;
6481 case 4: /* fsetpm (287 only, just do nop here) */
6482 break;
6483 default:
6484 goto illegal_op;
6485 }
6486 break;
6487 case 0x1d: /* fucomi */
6488 if (s->cc_op != CC_OP_DYNAMIC)
6489 gen_op_set_cc_op(s->cc_op);
6490 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6491 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6492 s->cc_op = CC_OP_EFLAGS;
6493 break;
6494 case 0x1e: /* fcomi */
6495 if (s->cc_op != CC_OP_DYNAMIC)
6496 gen_op_set_cc_op(s->cc_op);
6497 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6498 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6499 s->cc_op = CC_OP_EFLAGS;
6500 break;
6501 case 0x28: /* ffree sti */
6502 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6503 break;
6504 case 0x2a: /* fst sti */
6505 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6506 break;
6507 case 0x2b: /* fstp sti */
6508 case 0x0b: /* fstp1 sti, undocumented op */
6509 case 0x3a: /* fstp8 sti, undocumented op */
6510 case 0x3b: /* fstp9 sti, undocumented op */
6511 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6512 tcg_gen_helper_0_0(helper_fpop);
6513 break;
6514 case 0x2c: /* fucom st(i) */
6515 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6516 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6517 break;
6518 case 0x2d: /* fucomp st(i) */
6519 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6520 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6521 tcg_gen_helper_0_0(helper_fpop);
6522 break;
6523 case 0x33: /* de/3 */
6524 switch(rm) {
6525 case 1: /* fcompp */
6526 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6527 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6528 tcg_gen_helper_0_0(helper_fpop);
6529 tcg_gen_helper_0_0(helper_fpop);
6530 break;
6531 default:
6532 goto illegal_op;
6533 }
6534 break;
6535 case 0x38: /* ffreep sti, undocumented op */
6536 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6537 tcg_gen_helper_0_0(helper_fpop);
6538 break;
6539 case 0x3c: /* df/4 */
6540 switch(rm) {
6541 case 0:
6542 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6543 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6544 gen_op_mov_reg_T0(OT_WORD, R_EAX);
6545 break;
6546 default:
6547 goto illegal_op;
6548 }
6549 break;
6550 case 0x3d: /* fucomip */
6551 if (s->cc_op != CC_OP_DYNAMIC)
6552 gen_op_set_cc_op(s->cc_op);
6553 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6554 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6555 tcg_gen_helper_0_0(helper_fpop);
6556 s->cc_op = CC_OP_EFLAGS;
6557 break;
6558 case 0x3e: /* fcomip */
6559 if (s->cc_op != CC_OP_DYNAMIC)
6560 gen_op_set_cc_op(s->cc_op);
6561 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6562 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6563 tcg_gen_helper_0_0(helper_fpop);
6564 s->cc_op = CC_OP_EFLAGS;
6565 break;
6566 case 0x10 ... 0x13: /* fcmovxx */
6567 case 0x18 ... 0x1b:
6568 {
6569 int op1, l1;
6570 static const uint8_t fcmov_cc[8] = {
6571 (JCC_B << 1),
6572 (JCC_Z << 1),
6573 (JCC_BE << 1),
6574 (JCC_P << 1),
6575 };
6576 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
6577 l1 = gen_new_label();
6578 gen_jcc1(s, s->cc_op, op1, l1);
6579 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
6580 gen_set_label(l1);
6581 }
6582 break;
6583 default:
6584 goto illegal_op;
6585 }
6586 }
6587 break;
6588 /************************/
6589 /* string ops */
6590
6591 case 0xa4: /* movsS */
6592 case 0xa5:
6593 if ((b & 1) == 0)
6594 ot = OT_BYTE;
6595 else
6596 ot = dflag + OT_WORD;
6597
6598 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6599 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6600 } else {
6601 gen_movs(s, ot);
6602 }
6603 break;
6604
6605 case 0xaa: /* stosS */
6606 case 0xab:
6607 if ((b & 1) == 0)
6608 ot = OT_BYTE;
6609 else
6610 ot = dflag + OT_WORD;
6611
6612 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6613 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6614 } else {
6615 gen_stos(s, ot);
6616 }
6617 break;
6618 case 0xac: /* lodsS */
6619 case 0xad:
6620 if ((b & 1) == 0)
6621 ot = OT_BYTE;
6622 else
6623 ot = dflag + OT_WORD;
6624 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6625 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6626 } else {
6627 gen_lods(s, ot);
6628 }
6629 break;
6630 case 0xae: /* scasS */
6631 case 0xaf:
6632 if ((b & 1) == 0)
6633 ot = OT_BYTE;
6634 else
6635 ot = dflag + OT_WORD;
6636 if (prefixes & PREFIX_REPNZ) {
6637 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6638 } else if (prefixes & PREFIX_REPZ) {
6639 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6640 } else {
6641 gen_scas(s, ot);
6642 s->cc_op = CC_OP_SUBB + ot;
6643 }
6644 break;
6645
6646 case 0xa6: /* cmpsS */
6647 case 0xa7:
6648 if ((b & 1) == 0)
6649 ot = OT_BYTE;
6650 else
6651 ot = dflag + OT_WORD;
6652 if (prefixes & PREFIX_REPNZ) {
6653 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6654 } else if (prefixes & PREFIX_REPZ) {
6655 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6656 } else {
6657 gen_cmps(s, ot);
6658 s->cc_op = CC_OP_SUBB + ot;
6659 }
6660 break;
6661 case 0x6c: /* insS */
6662 case 0x6d:
6663 if ((b & 1) == 0)
6664 ot = OT_BYTE;
6665 else
6666 ot = dflag ? OT_LONG : OT_WORD;
6667 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6668 gen_op_andl_T0_ffff();
6669 gen_check_io(s, ot, pc_start - s->cs_base,
6670 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6671 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6672 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6673 } else {
6674 gen_ins(s, ot);
6675 if (use_icount) {
6676 gen_jmp(s, s->pc - s->cs_base);
6677 }
6678 }
6679 break;
6680 case 0x6e: /* outsS */
6681 case 0x6f:
6682 if ((b & 1) == 0)
6683 ot = OT_BYTE;
6684 else
6685 ot = dflag ? OT_LONG : OT_WORD;
6686 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6687 gen_op_andl_T0_ffff();
6688 gen_check_io(s, ot, pc_start - s->cs_base,
6689 svm_is_rep(prefixes) | 4);
6690 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6691 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6692 } else {
6693 gen_outs(s, ot);
6694 if (use_icount) {
6695 gen_jmp(s, s->pc - s->cs_base);
6696 }
6697 }
6698 break;
6699
6700 /************************/
6701 /* port I/O */
6702
6703 case 0xe4:
6704 case 0xe5:
6705 if ((b & 1) == 0)
6706 ot = OT_BYTE;
6707 else
6708 ot = dflag ? OT_LONG : OT_WORD;
6709 val = ldub_code(s->pc++);
6710 gen_op_movl_T0_im(val);
6711 gen_check_io(s, ot, pc_start - s->cs_base,
6712 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6713 if (use_icount)
6714 gen_io_start();
6715 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6716 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6717 gen_op_mov_reg_T1(ot, R_EAX);
6718 if (use_icount) {
6719 gen_io_end();
6720 gen_jmp(s, s->pc - s->cs_base);
6721 }
6722 break;
6723 case 0xe6:
6724 case 0xe7:
6725 if ((b & 1) == 0)
6726 ot = OT_BYTE;
6727 else
6728 ot = dflag ? OT_LONG : OT_WORD;
6729 val = ldub_code(s->pc++);
6730 gen_op_movl_T0_im(val);
6731 gen_check_io(s, ot, pc_start - s->cs_base,
6732 svm_is_rep(prefixes));
6733#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
6734 if (val == 0x80)
6735 break;
6736#endif /* VBOX */
6737 gen_op_mov_TN_reg(ot, 1, R_EAX);
6738
6739 if (use_icount)
6740 gen_io_start();
6741 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6742 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6743 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6744 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6745 if (use_icount) {
6746 gen_io_end();
6747 gen_jmp(s, s->pc - s->cs_base);
6748 }
6749 break;
6750 case 0xec:
6751 case 0xed:
6752 if ((b & 1) == 0)
6753 ot = OT_BYTE;
6754 else
6755 ot = dflag ? OT_LONG : OT_WORD;
6756 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6757 gen_op_andl_T0_ffff();
6758 gen_check_io(s, ot, pc_start - s->cs_base,
6759 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6760 if (use_icount)
6761 gen_io_start();
6762 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6763 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6764 gen_op_mov_reg_T1(ot, R_EAX);
6765 if (use_icount) {
6766 gen_io_end();
6767 gen_jmp(s, s->pc - s->cs_base);
6768 }
6769 break;
6770 case 0xee:
6771 case 0xef:
6772 if ((b & 1) == 0)
6773 ot = OT_BYTE;
6774 else
6775 ot = dflag ? OT_LONG : OT_WORD;
6776 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6777 gen_op_andl_T0_ffff();
6778 gen_check_io(s, ot, pc_start - s->cs_base,
6779 svm_is_rep(prefixes));
6780 gen_op_mov_TN_reg(ot, 1, R_EAX);
6781
6782 if (use_icount)
6783 gen_io_start();
6784 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6785 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6786 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6787 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6788 if (use_icount) {
6789 gen_io_end();
6790 gen_jmp(s, s->pc - s->cs_base);
6791 }
6792 break;
6793
6794 /************************/
6795 /* control */
6796 case 0xc2: /* ret im */
6797 val = ldsw_code(s->pc);
6798 s->pc += 2;
6799 gen_pop_T0(s);
6800 if (CODE64(s) && s->dflag)
6801 s->dflag = 2;
6802 gen_stack_update(s, val + (2 << s->dflag));
6803 if (s->dflag == 0)
6804 gen_op_andl_T0_ffff();
6805 gen_op_jmp_T0();
6806 gen_eob(s);
6807 break;
6808 case 0xc3: /* ret */
6809 gen_pop_T0(s);
6810 gen_pop_update(s);
6811 if (s->dflag == 0)
6812 gen_op_andl_T0_ffff();
6813 gen_op_jmp_T0();
6814 gen_eob(s);
6815 break;
6816 case 0xca: /* lret im */
6817 val = ldsw_code(s->pc);
6818 s->pc += 2;
6819 do_lret:
6820 if (s->pe && !s->vm86) {
6821 if (s->cc_op != CC_OP_DYNAMIC)
6822 gen_op_set_cc_op(s->cc_op);
6823 gen_jmp_im(pc_start - s->cs_base);
6824 tcg_gen_helper_0_2(helper_lret_protected,
6825 tcg_const_i32(s->dflag),
6826 tcg_const_i32(val));
6827 } else {
6828 gen_stack_A0(s);
6829 /* pop offset */
6830 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6831 if (s->dflag == 0)
6832 gen_op_andl_T0_ffff();
6833 /* NOTE: keeping EIP updated is not a problem in case of
6834 exception */
6835 gen_op_jmp_T0();
6836 /* pop selector */
6837 gen_op_addl_A0_im(2 << s->dflag);
6838 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6839 gen_op_movl_seg_T0_vm(R_CS);
6840 /* add stack offset */
6841 gen_stack_update(s, val + (4 << s->dflag));
6842 }
6843 gen_eob(s);
6844 break;
6845 case 0xcb: /* lret */
6846 val = 0;
6847 goto do_lret;
6848 case 0xcf: /* iret */
6849 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6850 if (!s->pe) {
6851 /* real mode */
6852 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6853 s->cc_op = CC_OP_EFLAGS;
6854 } else if (s->vm86) {
6855#ifdef VBOX
6856 if (s->iopl != 3 && (!s->vme || s->dflag)) {
6857#else
6858 if (s->iopl != 3) {
6859#endif
6860 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6861 } else {
6862 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6863 s->cc_op = CC_OP_EFLAGS;
6864 }
6865 } else {
6866 if (s->cc_op != CC_OP_DYNAMIC)
6867 gen_op_set_cc_op(s->cc_op);
6868 gen_jmp_im(pc_start - s->cs_base);
6869 tcg_gen_helper_0_2(helper_iret_protected,
6870 tcg_const_i32(s->dflag),
6871 tcg_const_i32(s->pc - s->cs_base));
6872 s->cc_op = CC_OP_EFLAGS;
6873 }
6874 gen_eob(s);
6875 break;
6876 case 0xe8: /* call im */
6877 {
6878 if (dflag)
6879 tval = (int32_t)insn_get(s, OT_LONG);
6880 else
6881 tval = (int16_t)insn_get(s, OT_WORD);
6882 next_eip = s->pc - s->cs_base;
6883 tval += next_eip;
6884 if (s->dflag == 0)
6885 tval &= 0xffff;
6886 else if (!CODE64(s))
6887 tval &= 0xffffffff;
6888 gen_movtl_T0_im(next_eip);
6889 gen_push_T0(s);
6890 gen_jmp(s, tval);
6891 }
6892 break;
6893 case 0x9a: /* lcall im */
6894 {
6895 unsigned int selector, offset;
6896
6897 if (CODE64(s))
6898 goto illegal_op;
6899 ot = dflag ? OT_LONG : OT_WORD;
6900 offset = insn_get(s, ot);
6901 selector = insn_get(s, OT_WORD);
6902
6903 gen_op_movl_T0_im(selector);
6904 gen_op_movl_T1_imu(offset);
6905 }
6906 goto do_lcall;
6907 case 0xe9: /* jmp im */
6908 if (dflag)
6909 tval = (int32_t)insn_get(s, OT_LONG);
6910 else
6911 tval = (int16_t)insn_get(s, OT_WORD);
6912 tval += s->pc - s->cs_base;
6913 if (s->dflag == 0)
6914 tval &= 0xffff;
6915 else if(!CODE64(s))
6916 tval &= 0xffffffff;
6917 gen_jmp(s, tval);
6918 break;
6919 case 0xea: /* ljmp im */
6920 {
6921 unsigned int selector, offset;
6922
6923 if (CODE64(s))
6924 goto illegal_op;
6925 ot = dflag ? OT_LONG : OT_WORD;
6926 offset = insn_get(s, ot);
6927 selector = insn_get(s, OT_WORD);
6928
6929 gen_op_movl_T0_im(selector);
6930 gen_op_movl_T1_imu(offset);
6931 }
6932 goto do_ljmp;
6933 case 0xeb: /* jmp Jb */
6934 tval = (int8_t)insn_get(s, OT_BYTE);
6935 tval += s->pc - s->cs_base;
6936 if (s->dflag == 0)
6937 tval &= 0xffff;
6938 gen_jmp(s, tval);
6939 break;
6940 case 0x70 ... 0x7f: /* jcc Jb */
6941 tval = (int8_t)insn_get(s, OT_BYTE);
6942 goto do_jcc;
6943 case 0x180 ... 0x18f: /* jcc Jv */
6944 if (dflag) {
6945 tval = (int32_t)insn_get(s, OT_LONG);
6946 } else {
6947 tval = (int16_t)insn_get(s, OT_WORD);
6948 }
6949 do_jcc:
6950 next_eip = s->pc - s->cs_base;
6951 tval += next_eip;
6952 if (s->dflag == 0)
6953 tval &= 0xffff;
6954 gen_jcc(s, b, tval, next_eip);
6955 break;
6956
6957 case 0x190 ... 0x19f: /* setcc Gv */
6958 modrm = ldub_code(s->pc++);
6959 gen_setcc(s, b);
6960 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6961 break;
6962 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6963 {
6964 int l1;
6965 TCGv t0;
6966
6967 ot = dflag + OT_WORD;
6968 modrm = ldub_code(s->pc++);
6969 reg = ((modrm >> 3) & 7) | rex_r;
6970 mod = (modrm >> 6) & 3;
6971 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6972 if (mod != 3) {
6973 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6974 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6975 } else {
6976 rm = (modrm & 7) | REX_B(s);
6977 gen_op_mov_v_reg(ot, t0, rm);
6978 }
6979#ifdef TARGET_X86_64
6980 if (ot == OT_LONG) {
6981 /* XXX: specific Intel behaviour ? */
6982 l1 = gen_new_label();
6983 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6984 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
6985 gen_set_label(l1);
6986 tcg_gen_movi_tl(cpu_tmp0, 0);
6987 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
6988 } else
6989#endif
6990 {
6991 l1 = gen_new_label();
6992 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6993 gen_op_mov_reg_v(ot, reg, t0);
6994 gen_set_label(l1);
6995 }
6996 tcg_temp_free(t0);
6997 }
6998 break;
6999
7000 /************************/
7001 /* flags */
7002 case 0x9c: /* pushf */
7003 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
7004#ifdef VBOX
7005 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
7006#else
7007 if (s->vm86 && s->iopl != 3) {
7008#endif
7009 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7010 } else {
7011 if (s->cc_op != CC_OP_DYNAMIC)
7012 gen_op_set_cc_op(s->cc_op);
7013#ifdef VBOX
7014 if (s->vm86 && s->vme && s->iopl != 3)
7015 tcg_gen_helper_1_0(helper_read_eflags_vme, cpu_T[0]);
7016 else
7017#endif
7018 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
7019 gen_push_T0(s);
7020 }
7021 break;
7022 case 0x9d: /* popf */
7023 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
7024#ifdef VBOX
7025 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
7026#else
7027 if (s->vm86 && s->iopl != 3) {
7028#endif
7029 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7030 } else {
7031 gen_pop_T0(s);
7032 if (s->cpl == 0) {
7033 if (s->dflag) {
7034 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7035 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
7036 } else {
7037 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7038 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
7039 }
7040 } else {
7041 if (s->cpl <= s->iopl) {
7042 if (s->dflag) {
7043 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7044 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
7045 } else {
7046 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7047 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
7048 }
7049 } else {
7050 if (s->dflag) {
7051 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7052 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
7053 } else {
7054#ifdef VBOX
7055 if (s->vm86 && s->vme)
7056 tcg_gen_helper_0_1(helper_write_eflags_vme, cpu_T[0]);
7057 else
7058#endif
7059 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7060 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
7061 }
7062 }
7063 }
7064 gen_pop_update(s);
7065 s->cc_op = CC_OP_EFLAGS;
7066 /* abort translation because TF flag may change */
7067 gen_jmp_im(s->pc - s->cs_base);
7068 gen_eob(s);
7069 }
7070 break;
7071 case 0x9e: /* sahf */
7072 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7073 goto illegal_op;
7074 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
7075 if (s->cc_op != CC_OP_DYNAMIC)
7076 gen_op_set_cc_op(s->cc_op);
7077 gen_compute_eflags(cpu_cc_src);
7078 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
7079 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
7080 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
7081 s->cc_op = CC_OP_EFLAGS;
7082 break;
7083 case 0x9f: /* lahf */
7084 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7085 goto illegal_op;
7086 if (s->cc_op != CC_OP_DYNAMIC)
7087 gen_op_set_cc_op(s->cc_op);
7088 gen_compute_eflags(cpu_T[0]);
7089 /* Note: gen_compute_eflags() only gives the condition codes */
7090 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
7091 gen_op_mov_reg_T0(OT_BYTE, R_AH);
7092 break;
7093 case 0xf5: /* cmc */
7094 if (s->cc_op != CC_OP_DYNAMIC)
7095 gen_op_set_cc_op(s->cc_op);
7096 gen_compute_eflags(cpu_cc_src);
7097 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7098 s->cc_op = CC_OP_EFLAGS;
7099 break;
7100 case 0xf8: /* clc */
7101 if (s->cc_op != CC_OP_DYNAMIC)
7102 gen_op_set_cc_op(s->cc_op);
7103 gen_compute_eflags(cpu_cc_src);
7104 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
7105 s->cc_op = CC_OP_EFLAGS;
7106 break;
7107 case 0xf9: /* stc */
7108 if (s->cc_op != CC_OP_DYNAMIC)
7109 gen_op_set_cc_op(s->cc_op);
7110 gen_compute_eflags(cpu_cc_src);
7111 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7112 s->cc_op = CC_OP_EFLAGS;
7113 break;
7114 case 0xfc: /* cld */
7115 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
7116 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7117 break;
7118 case 0xfd: /* std */
7119 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
7120 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7121 break;
7122
7123 /************************/
7124 /* bit operations */
7125 case 0x1ba: /* bt/bts/btr/btc Gv, im */
7126 ot = dflag + OT_WORD;
7127 modrm = ldub_code(s->pc++);
7128 op = (modrm >> 3) & 7;
7129 mod = (modrm >> 6) & 3;
7130 rm = (modrm & 7) | REX_B(s);
7131 if (mod != 3) {
7132 s->rip_offset = 1;
7133 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7134 gen_op_ld_T0_A0(ot + s->mem_index);
7135 } else {
7136 gen_op_mov_TN_reg(ot, 0, rm);
7137 }
7138 /* load shift */
7139 val = ldub_code(s->pc++);
7140 gen_op_movl_T1_im(val);
7141 if (op < 4)
7142 goto illegal_op;
7143 op -= 4;
7144 goto bt_op;
7145 case 0x1a3: /* bt Gv, Ev */
7146 op = 0;
7147 goto do_btx;
7148 case 0x1ab: /* bts */
7149 op = 1;
7150 goto do_btx;
7151 case 0x1b3: /* btr */
7152 op = 2;
7153 goto do_btx;
7154 case 0x1bb: /* btc */
7155 op = 3;
7156 do_btx:
7157 ot = dflag + OT_WORD;
7158 modrm = ldub_code(s->pc++);
7159 reg = ((modrm >> 3) & 7) | rex_r;
7160 mod = (modrm >> 6) & 3;
7161 rm = (modrm & 7) | REX_B(s);
7162 gen_op_mov_TN_reg(OT_LONG, 1, reg);
7163 if (mod != 3) {
7164 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7165 /* specific case: we need to add a displacement */
7166 gen_exts(ot, cpu_T[1]);
7167 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
7168 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
7169 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
7170 gen_op_ld_T0_A0(ot + s->mem_index);
7171 } else {
7172 gen_op_mov_TN_reg(ot, 0, rm);
7173 }
7174 bt_op:
7175 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
7176 switch(op) {
7177 case 0:
7178 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
7179 tcg_gen_movi_tl(cpu_cc_dst, 0);
7180 break;
7181 case 1:
7182 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7183 tcg_gen_movi_tl(cpu_tmp0, 1);
7184 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7185 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7186 break;
7187 case 2:
7188 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7189 tcg_gen_movi_tl(cpu_tmp0, 1);
7190 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7191 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
7192 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7193 break;
7194 default:
7195 case 3:
7196 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7197 tcg_gen_movi_tl(cpu_tmp0, 1);
7198 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7199 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7200 break;
7201 }
7202 s->cc_op = CC_OP_SARB + ot;
7203 if (op != 0) {
7204 if (mod != 3)
7205 gen_op_st_T0_A0(ot + s->mem_index);
7206 else
7207 gen_op_mov_reg_T0(ot, rm);
7208 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
7209 tcg_gen_movi_tl(cpu_cc_dst, 0);
7210 }
7211 break;
7212 case 0x1bc: /* bsf */
7213 case 0x1bd: /* bsr */
7214 {
7215 int label1;
7216 TCGv t0;
7217
7218 ot = dflag + OT_WORD;
7219 modrm = ldub_code(s->pc++);
7220 reg = ((modrm >> 3) & 7) | rex_r;
7221 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7222 gen_extu(ot, cpu_T[0]);
7223 label1 = gen_new_label();
7224 tcg_gen_movi_tl(cpu_cc_dst, 0);
7225 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7226 tcg_gen_mov_tl(t0, cpu_T[0]);
7227 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
7228 if (b & 1) {
7229 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
7230 } else {
7231 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
7232 }
7233 gen_op_mov_reg_T0(ot, reg);
7234 tcg_gen_movi_tl(cpu_cc_dst, 1);
7235 gen_set_label(label1);
7236 tcg_gen_discard_tl(cpu_cc_src);
7237 s->cc_op = CC_OP_LOGICB + ot;
7238 tcg_temp_free(t0);
7239 }
7240 break;
7241 /************************/
7242 /* bcd */
7243 case 0x27: /* daa */
7244 if (CODE64(s))
7245 goto illegal_op;
7246 if (s->cc_op != CC_OP_DYNAMIC)
7247 gen_op_set_cc_op(s->cc_op);
7248 tcg_gen_helper_0_0(helper_daa);
7249 s->cc_op = CC_OP_EFLAGS;
7250 break;
7251 case 0x2f: /* das */
7252 if (CODE64(s))
7253 goto illegal_op;
7254 if (s->cc_op != CC_OP_DYNAMIC)
7255 gen_op_set_cc_op(s->cc_op);
7256 tcg_gen_helper_0_0(helper_das);
7257 s->cc_op = CC_OP_EFLAGS;
7258 break;
7259 case 0x37: /* aaa */
7260 if (CODE64(s))
7261 goto illegal_op;
7262 if (s->cc_op != CC_OP_DYNAMIC)
7263 gen_op_set_cc_op(s->cc_op);
7264 tcg_gen_helper_0_0(helper_aaa);
7265 s->cc_op = CC_OP_EFLAGS;
7266 break;
7267 case 0x3f: /* aas */
7268 if (CODE64(s))
7269 goto illegal_op;
7270 if (s->cc_op != CC_OP_DYNAMIC)
7271 gen_op_set_cc_op(s->cc_op);
7272 tcg_gen_helper_0_0(helper_aas);
7273 s->cc_op = CC_OP_EFLAGS;
7274 break;
7275 case 0xd4: /* aam */
7276 if (CODE64(s))
7277 goto illegal_op;
7278 val = ldub_code(s->pc++);
7279 if (val == 0) {
7280 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
7281 } else {
7282 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
7283 s->cc_op = CC_OP_LOGICB;
7284 }
7285 break;
7286 case 0xd5: /* aad */
7287 if (CODE64(s))
7288 goto illegal_op;
7289 val = ldub_code(s->pc++);
7290 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
7291 s->cc_op = CC_OP_LOGICB;
7292 break;
7293 /************************/
7294 /* misc */
7295 case 0x90: /* nop */
7296 /* XXX: xchg + rex handling */
7297 /* XXX: correct lock test for all insn */
7298 if (prefixes & PREFIX_LOCK)
7299 goto illegal_op;
7300 if (prefixes & PREFIX_REPZ) {
7301 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
7302 }
7303 break;
7304 case 0x9b: /* fwait */
7305 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
7306 (HF_MP_MASK | HF_TS_MASK)) {
7307 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7308 } else {
7309 if (s->cc_op != CC_OP_DYNAMIC)
7310 gen_op_set_cc_op(s->cc_op);
7311 gen_jmp_im(pc_start - s->cs_base);
7312 tcg_gen_helper_0_0(helper_fwait);
7313 }
7314 break;
7315 case 0xcc: /* int3 */
7316#ifdef VBOX
7317 if (s->vm86 && s->iopl != 3 && !s->vme) {
7318 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7319 } else
7320#endif
7321 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
7322 break;
7323 case 0xcd: /* int N */
7324 val = ldub_code(s->pc++);
7325#ifdef VBOX
7326 if (s->vm86 && s->iopl != 3 && !s->vme) {
7327#else
7328 if (s->vm86 && s->iopl != 3) {
7329#endif
7330 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7331 } else {
7332 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
7333 }
7334 break;
7335 case 0xce: /* into */
7336 if (CODE64(s))
7337 goto illegal_op;
7338 if (s->cc_op != CC_OP_DYNAMIC)
7339 gen_op_set_cc_op(s->cc_op);
7340 gen_jmp_im(pc_start - s->cs_base);
7341 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
7342 break;
7343 case 0xf1: /* icebp (undocumented, exits to external debugger) */
7344 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
7345#if 1
7346 gen_debug(s, pc_start - s->cs_base);
7347#else
7348 /* start debug */
7349 tb_flush(cpu_single_env);
7350 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
7351#endif
7352 break;
7353 case 0xfa: /* cli */
7354 if (!s->vm86) {
7355 if (s->cpl <= s->iopl) {
7356 tcg_gen_helper_0_0(helper_cli);
7357 } else {
7358 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7359 }
7360 } else {
7361 if (s->iopl == 3) {
7362 tcg_gen_helper_0_0(helper_cli);
7363#ifdef VBOX
7364 } else if (s->iopl != 3 && s->vme) {
7365 tcg_gen_helper_0_0(helper_cli_vme);
7366#endif
7367 } else {
7368 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7369 }
7370 }
7371 break;
7372 case 0xfb: /* sti */
7373 if (!s->vm86) {
7374 if (s->cpl <= s->iopl) {
7375 gen_sti:
7376 tcg_gen_helper_0_0(helper_sti);
7377 /* interruptions are enabled only the first insn after sti */
7378 /* If several instructions disable interrupts, only the
7379 _first_ does it */
7380 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
7381 tcg_gen_helper_0_0(helper_set_inhibit_irq);
7382 /* give a chance to handle pending irqs */
7383 gen_jmp_im(s->pc - s->cs_base);
7384 gen_eob(s);
7385 } else {
7386 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7387 }
7388 } else {
7389 if (s->iopl == 3) {
7390 goto gen_sti;
7391#ifdef VBOX
7392 } else if (s->iopl != 3 && s->vme) {
7393 tcg_gen_helper_0_0(helper_sti_vme);
7394 /* give a chance to handle pending irqs */
7395 gen_jmp_im(s->pc - s->cs_base);
7396 gen_eob(s);
7397#endif
7398 } else {
7399 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7400 }
7401 }
7402 break;
7403 case 0x62: /* bound */
7404 if (CODE64(s))
7405 goto illegal_op;
7406 ot = dflag ? OT_LONG : OT_WORD;
7407 modrm = ldub_code(s->pc++);
7408 reg = (modrm >> 3) & 7;
7409 mod = (modrm >> 6) & 3;
7410 if (mod == 3)
7411 goto illegal_op;
7412 gen_op_mov_TN_reg(ot, 0, reg);
7413 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7414 gen_jmp_im(pc_start - s->cs_base);
7415 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7416 if (ot == OT_WORD)
7417 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
7418 else
7419 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
7420 break;
7421 case 0x1c8 ... 0x1cf: /* bswap reg */
7422 reg = (b & 7) | REX_B(s);
7423#ifdef TARGET_X86_64
7424 if (dflag == 2) {
7425 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
7426 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
7427 gen_op_mov_reg_T0(OT_QUAD, reg);
7428 } else
7429 {
7430 TCGv tmp0;
7431 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7432
7433 tmp0 = tcg_temp_new(TCG_TYPE_I32);
7434 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
7435 tcg_gen_bswap_i32(tmp0, tmp0);
7436 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
7437 gen_op_mov_reg_T0(OT_LONG, reg);
7438 }
7439#else
7440 {
7441 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7442 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
7443 gen_op_mov_reg_T0(OT_LONG, reg);
7444 }
7445#endif
7446 break;
7447 case 0xd6: /* salc */
7448 if (CODE64(s))
7449 goto illegal_op;
7450 if (s->cc_op != CC_OP_DYNAMIC)
7451 gen_op_set_cc_op(s->cc_op);
7452 gen_compute_eflags_c(cpu_T[0]);
7453 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
7454 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
7455 break;
7456 case 0xe0: /* loopnz */
7457 case 0xe1: /* loopz */
7458 case 0xe2: /* loop */
7459 case 0xe3: /* jecxz */
7460 {
7461 int l1, l2, l3;
7462
7463 tval = (int8_t)insn_get(s, OT_BYTE);
7464 next_eip = s->pc - s->cs_base;
7465 tval += next_eip;
7466 if (s->dflag == 0)
7467 tval &= 0xffff;
7468
7469 l1 = gen_new_label();
7470 l2 = gen_new_label();
7471 l3 = gen_new_label();
7472 b &= 3;
7473 switch(b) {
7474 case 0: /* loopnz */
7475 case 1: /* loopz */
7476 if (s->cc_op != CC_OP_DYNAMIC)
7477 gen_op_set_cc_op(s->cc_op);
7478 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7479 gen_op_jz_ecx(s->aflag, l3);
7480 gen_compute_eflags(cpu_tmp0);
7481 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
7482 if (b == 0) {
7483 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
7484 } else {
7485 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
7486 }
7487 break;
7488 case 2: /* loop */
7489 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7490 gen_op_jnz_ecx(s->aflag, l1);
7491 break;
7492 default:
7493 case 3: /* jcxz */
7494 gen_op_jz_ecx(s->aflag, l1);
7495 break;
7496 }
7497
7498 gen_set_label(l3);
7499 gen_jmp_im(next_eip);
7500 tcg_gen_br(l2);
7501
7502 gen_set_label(l1);
7503 gen_jmp_im(tval);
7504 gen_set_label(l2);
7505 gen_eob(s);
7506 }
7507 break;
7508 case 0x130: /* wrmsr */
7509 case 0x132: /* rdmsr */
7510 if (s->cpl != 0) {
7511 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7512 } else {
7513 if (s->cc_op != CC_OP_DYNAMIC)
7514 gen_op_set_cc_op(s->cc_op);
7515 gen_jmp_im(pc_start - s->cs_base);
7516 if (b & 2) {
7517 tcg_gen_helper_0_0(helper_rdmsr);
7518 } else {
7519 tcg_gen_helper_0_0(helper_wrmsr);
7520 }
7521 }
7522 break;
7523 case 0x131: /* rdtsc */
7524 if (s->cc_op != CC_OP_DYNAMIC)
7525 gen_op_set_cc_op(s->cc_op);
7526 gen_jmp_im(pc_start - s->cs_base);
7527 if (use_icount)
7528 gen_io_start();
7529 tcg_gen_helper_0_0(helper_rdtsc);
7530 if (use_icount) {
7531 gen_io_end();
7532 gen_jmp(s, s->pc - s->cs_base);
7533 }
7534 break;
7535 case 0x133: /* rdpmc */
7536 if (s->cc_op != CC_OP_DYNAMIC)
7537 gen_op_set_cc_op(s->cc_op);
7538 gen_jmp_im(pc_start - s->cs_base);
7539 tcg_gen_helper_0_0(helper_rdpmc);
7540 break;
7541 case 0x134: /* sysenter */
7542#ifndef VBOX
7543 /* For Intel SYSENTER is valid on 64-bit */
7544 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7545#else
7546 /** @todo: make things right */
7547 if (CODE64(s))
7548#endif
7549 goto illegal_op;
7550 if (!s->pe) {
7551 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7552 } else {
7553 if (s->cc_op != CC_OP_DYNAMIC) {
7554 gen_op_set_cc_op(s->cc_op);
7555 s->cc_op = CC_OP_DYNAMIC;
7556 }
7557 gen_jmp_im(pc_start - s->cs_base);
7558 tcg_gen_helper_0_0(helper_sysenter);
7559 gen_eob(s);
7560 }
7561 break;
7562 case 0x135: /* sysexit */
7563#ifndef VBOX
7564 /* For Intel SYSEXIT is valid on 64-bit */
7565 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7566#else
7567 /** @todo: make things right */
7568 if (CODE64(s))
7569#endif
7570 goto illegal_op;
7571 if (!s->pe) {
7572 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7573 } else {
7574 if (s->cc_op != CC_OP_DYNAMIC) {
7575 gen_op_set_cc_op(s->cc_op);
7576 s->cc_op = CC_OP_DYNAMIC;
7577 }
7578 gen_jmp_im(pc_start - s->cs_base);
7579 tcg_gen_helper_0_1(helper_sysexit, tcg_const_i32(dflag));
7580 gen_eob(s);
7581 }
7582 break;
7583#ifdef TARGET_X86_64
7584 case 0x105: /* syscall */
7585 /* XXX: is it usable in real mode ? */
7586 if (s->cc_op != CC_OP_DYNAMIC) {
7587 gen_op_set_cc_op(s->cc_op);
7588 s->cc_op = CC_OP_DYNAMIC;
7589 }
7590 gen_jmp_im(pc_start - s->cs_base);
7591 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
7592 gen_eob(s);
7593 break;
7594 case 0x107: /* sysret */
7595 if (!s->pe) {
7596 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7597 } else {
7598 if (s->cc_op != CC_OP_DYNAMIC) {
7599 gen_op_set_cc_op(s->cc_op);
7600 s->cc_op = CC_OP_DYNAMIC;
7601 }
7602 gen_jmp_im(pc_start - s->cs_base);
7603 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
7604 /* condition codes are modified only in long mode */
7605 if (s->lma)
7606 s->cc_op = CC_OP_EFLAGS;
7607 gen_eob(s);
7608 }
7609 break;
7610#endif
7611 case 0x1a2: /* cpuid */
7612 if (s->cc_op != CC_OP_DYNAMIC)
7613 gen_op_set_cc_op(s->cc_op);
7614 gen_jmp_im(pc_start - s->cs_base);
7615 tcg_gen_helper_0_0(helper_cpuid);
7616 break;
7617 case 0xf4: /* hlt */
7618 if (s->cpl != 0) {
7619 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7620 } else {
7621 if (s->cc_op != CC_OP_DYNAMIC)
7622 gen_op_set_cc_op(s->cc_op);
7623 gen_jmp_im(pc_start - s->cs_base);
7624 tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
7625 s->is_jmp = 3;
7626 }
7627 break;
7628 case 0x100:
7629 modrm = ldub_code(s->pc++);
7630 mod = (modrm >> 6) & 3;
7631 op = (modrm >> 3) & 7;
7632 switch(op) {
7633 case 0: /* sldt */
7634 if (!s->pe || s->vm86)
7635 goto illegal_op;
7636 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
7637 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
7638 ot = OT_WORD;
7639 if (mod == 3)
7640 ot += s->dflag;
7641 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7642 break;
7643 case 2: /* lldt */
7644 if (!s->pe || s->vm86)
7645 goto illegal_op;
7646 if (s->cpl != 0) {
7647 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7648 } else {
7649 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
7650 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7651 gen_jmp_im(pc_start - s->cs_base);
7652 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7653 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
7654 }
7655 break;
7656 case 1: /* str */
7657 if (!s->pe || s->vm86)
7658 goto illegal_op;
7659 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
7660 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
7661 ot = OT_WORD;
7662 if (mod == 3)
7663 ot += s->dflag;
7664 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7665 break;
7666 case 3: /* ltr */
7667 if (!s->pe || s->vm86)
7668 goto illegal_op;
7669 if (s->cpl != 0) {
7670 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7671 } else {
7672 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
7673 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7674 gen_jmp_im(pc_start - s->cs_base);
7675 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7676 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
7677 }
7678 break;
7679 case 4: /* verr */
7680 case 5: /* verw */
7681 if (!s->pe || s->vm86)
7682 goto illegal_op;
7683 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7684 if (s->cc_op != CC_OP_DYNAMIC)
7685 gen_op_set_cc_op(s->cc_op);
7686 if (op == 4)
7687 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
7688 else
7689 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
7690 s->cc_op = CC_OP_EFLAGS;
7691 break;
7692 default:
7693 goto illegal_op;
7694 }
7695 break;
7696 case 0x101:
7697 modrm = ldub_code(s->pc++);
7698 mod = (modrm >> 6) & 3;
7699 op = (modrm >> 3) & 7;
7700 rm = modrm & 7;
7701
7702#ifdef VBOX
7703 /* 0f 01 f9 */
7704 if (modrm == 0xf9)
7705 {
7706 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP))
7707 goto illegal_op;
7708 gen_jmp_im(pc_start - s->cs_base);
7709 tcg_gen_helper_0_0(helper_rdtscp);
7710 break;
7711 }
7712#endif
7713 switch(op) {
7714 case 0: /* sgdt */
7715 if (mod == 3)
7716 goto illegal_op;
7717 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
7718 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7719 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
7720 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7721 gen_add_A0_im(s, 2);
7722 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
7723 if (!s->dflag)
7724 gen_op_andl_T0_im(0xffffff);
7725 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7726 break;
7727 case 1:
7728 if (mod == 3) {
7729 switch (rm) {
7730 case 0: /* monitor */
7731 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7732 s->cpl != 0)
7733 goto illegal_op;
7734 if (s->cc_op != CC_OP_DYNAMIC)
7735 gen_op_set_cc_op(s->cc_op);
7736 gen_jmp_im(pc_start - s->cs_base);
7737#ifdef TARGET_X86_64
7738 if (s->aflag == 2) {
7739 gen_op_movq_A0_reg(R_EAX);
7740 } else
7741#endif
7742 {
7743 gen_op_movl_A0_reg(R_EAX);
7744 if (s->aflag == 0)
7745 gen_op_andl_A0_ffff();
7746 }
7747 gen_add_A0_ds_seg(s);
7748 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
7749 break;
7750 case 1: /* mwait */
7751 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7752 s->cpl != 0)
7753 goto illegal_op;
7754 if (s->cc_op != CC_OP_DYNAMIC) {
7755 gen_op_set_cc_op(s->cc_op);
7756 s->cc_op = CC_OP_DYNAMIC;
7757 }
7758 gen_jmp_im(pc_start - s->cs_base);
7759 tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
7760 gen_eob(s);
7761 break;
7762 default:
7763 goto illegal_op;
7764 }
7765 } else { /* sidt */
7766 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7767 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7768 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7769 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7770 gen_add_A0_im(s, 2);
7771 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7772 if (!s->dflag)
7773 gen_op_andl_T0_im(0xffffff);
7774 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7775 }
7776 break;
7777 case 2: /* lgdt */
7778 case 3: /* lidt */
7779 if (mod == 3) {
7780 if (s->cc_op != CC_OP_DYNAMIC)
7781 gen_op_set_cc_op(s->cc_op);
7782 gen_jmp_im(pc_start - s->cs_base);
7783 switch(rm) {
7784 case 0: /* VMRUN */
7785 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7786 goto illegal_op;
7787 if (s->cpl != 0) {
7788 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7789 break;
7790 } else {
7791 tcg_gen_helper_0_2(helper_vmrun,
7792 tcg_const_i32(s->aflag),
7793 tcg_const_i32(s->pc - pc_start));
7794 tcg_gen_exit_tb(0);
7795 s->is_jmp = 3;
7796 }
7797 break;
7798 case 1: /* VMMCALL */
7799 if (!(s->flags & HF_SVME_MASK))
7800 goto illegal_op;
7801 tcg_gen_helper_0_0(helper_vmmcall);
7802 break;
7803 case 2: /* VMLOAD */
7804 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7805 goto illegal_op;
7806 if (s->cpl != 0) {
7807 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7808 break;
7809 } else {
7810 tcg_gen_helper_0_1(helper_vmload,
7811 tcg_const_i32(s->aflag));
7812 }
7813 break;
7814 case 3: /* VMSAVE */
7815 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7816 goto illegal_op;
7817 if (s->cpl != 0) {
7818 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7819 break;
7820 } else {
7821 tcg_gen_helper_0_1(helper_vmsave,
7822 tcg_const_i32(s->aflag));
7823 }
7824 break;
7825 case 4: /* STGI */
7826 if ((!(s->flags & HF_SVME_MASK) &&
7827 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7828 !s->pe)
7829 goto illegal_op;
7830 if (s->cpl != 0) {
7831 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7832 break;
7833 } else {
7834 tcg_gen_helper_0_0(helper_stgi);
7835 }
7836 break;
7837 case 5: /* CLGI */
7838 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7839 goto illegal_op;
7840 if (s->cpl != 0) {
7841 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7842 break;
7843 } else {
7844 tcg_gen_helper_0_0(helper_clgi);
7845 }
7846 break;
7847 case 6: /* SKINIT */
7848 if ((!(s->flags & HF_SVME_MASK) &&
7849 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7850 !s->pe)
7851 goto illegal_op;
7852 tcg_gen_helper_0_0(helper_skinit);
7853 break;
7854 case 7: /* INVLPGA */
7855 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7856 goto illegal_op;
7857 if (s->cpl != 0) {
7858 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7859 break;
7860 } else {
7861 tcg_gen_helper_0_1(helper_invlpga,
7862 tcg_const_i32(s->aflag));
7863 }
7864 break;
7865 default:
7866 goto illegal_op;
7867 }
7868 } else if (s->cpl != 0) {
7869 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7870 } else {
7871 gen_svm_check_intercept(s, pc_start,
7872 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7873 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7874 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7875 gen_add_A0_im(s, 2);
7876 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7877 if (!s->dflag)
7878 gen_op_andl_T0_im(0xffffff);
7879 if (op == 2) {
7880 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7881 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7882 } else {
7883 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7884 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7885 }
7886 }
7887 break;
7888 case 4: /* smsw */
7889 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7890 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7891 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7892 break;
7893 case 6: /* lmsw */
7894 if (s->cpl != 0) {
7895 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7896 } else {
7897 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7898 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7899 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
7900 gen_jmp_im(s->pc - s->cs_base);
7901 gen_eob(s);
7902 }
7903 break;
7904 case 7: /* invlpg */
7905 if (s->cpl != 0) {
7906 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7907 } else {
7908 if (mod == 3) {
7909#ifdef TARGET_X86_64
7910 if (CODE64(s) && rm == 0) {
7911 /* swapgs */
7912 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7913 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
7914 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7915 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
7916 } else
7917#endif
7918 {
7919 goto illegal_op;
7920 }
7921 } else {
7922 if (s->cc_op != CC_OP_DYNAMIC)
7923 gen_op_set_cc_op(s->cc_op);
7924 gen_jmp_im(pc_start - s->cs_base);
7925 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7926 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
7927 gen_jmp_im(s->pc - s->cs_base);
7928 gen_eob(s);
7929 }
7930 }
7931 break;
7932 default:
7933 goto illegal_op;
7934 }
7935 break;
7936 case 0x108: /* invd */
7937 case 0x109: /* wbinvd */
7938 if (s->cpl != 0) {
7939 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7940 } else {
7941 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7942 /* nothing to do */
7943 }
7944 break;
7945 case 0x63: /* arpl or movslS (x86_64) */
7946#ifdef TARGET_X86_64
7947 if (CODE64(s)) {
7948 int d_ot;
7949 /* d_ot is the size of destination */
7950 d_ot = dflag + OT_WORD;
7951
7952 modrm = ldub_code(s->pc++);
7953 reg = ((modrm >> 3) & 7) | rex_r;
7954 mod = (modrm >> 6) & 3;
7955 rm = (modrm & 7) | REX_B(s);
7956
7957 if (mod == 3) {
7958 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7959 /* sign extend */
7960 if (d_ot == OT_QUAD)
7961 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7962 gen_op_mov_reg_T0(d_ot, reg);
7963 } else {
7964 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7965 if (d_ot == OT_QUAD) {
7966 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7967 } else {
7968 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7969 }
7970 gen_op_mov_reg_T0(d_ot, reg);
7971 }
7972 } else
7973#endif
7974 {
7975 int label1;
7976 TCGv t0, t1, t2, a0;
7977
7978 if (!s->pe || s->vm86)
7979 goto illegal_op;
7980
7981 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7982 t1 = tcg_temp_local_new(TCG_TYPE_TL);
7983 t2 = tcg_temp_local_new(TCG_TYPE_TL);
7984#ifdef VBOX
7985 a0 = tcg_temp_local_new(TCG_TYPE_TL);
7986#endif
7987 ot = OT_WORD;
7988 modrm = ldub_code(s->pc++);
7989 reg = (modrm >> 3) & 7;
7990 mod = (modrm >> 6) & 3;
7991 rm = modrm & 7;
7992 if (mod != 3) {
7993 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7994#ifdef VBOX
7995 tcg_gen_mov_tl(a0, cpu_A0);
7996#endif
7997 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7998 } else {
7999 gen_op_mov_v_reg(ot, t0, rm);
8000 }
8001 gen_op_mov_v_reg(ot, t1, reg);
8002 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
8003 tcg_gen_andi_tl(t1, t1, 3);
8004 tcg_gen_movi_tl(t2, 0);
8005 label1 = gen_new_label();
8006 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
8007 tcg_gen_andi_tl(t0, t0, ~3);
8008 tcg_gen_or_tl(t0, t0, t1);
8009 tcg_gen_movi_tl(t2, CC_Z);
8010 gen_set_label(label1);
8011 if (mod != 3) {
8012#ifdef VBOX
8013 /* cpu_A0 doesn't survive branch */
8014 gen_op_st_v(ot + s->mem_index, t0, a0);
8015#else
8016 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
8017#endif
8018 } else {
8019 gen_op_mov_reg_v(ot, rm, t0);
8020 }
8021 if (s->cc_op != CC_OP_DYNAMIC)
8022 gen_op_set_cc_op(s->cc_op);
8023 gen_compute_eflags(cpu_cc_src);
8024 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
8025 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
8026 s->cc_op = CC_OP_EFLAGS;
8027 tcg_temp_free(t0);
8028 tcg_temp_free(t1);
8029 tcg_temp_free(t2);
8030#ifdef VBOX
8031 tcg_temp_free(a0);
8032#endif
8033 }
8034 break;
8035 case 0x102: /* lar */
8036 case 0x103: /* lsl */
8037 {
8038 int label1;
8039 TCGv t0;
8040 if (!s->pe || s->vm86)
8041 goto illegal_op;
8042 ot = dflag ? OT_LONG : OT_WORD;
8043 modrm = ldub_code(s->pc++);
8044 reg = ((modrm >> 3) & 7) | rex_r;
8045 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
8046 t0 = tcg_temp_local_new(TCG_TYPE_TL);
8047 if (s->cc_op != CC_OP_DYNAMIC)
8048 gen_op_set_cc_op(s->cc_op);
8049 if (b == 0x102)
8050 tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
8051 else
8052 tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
8053 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
8054 label1 = gen_new_label();
8055 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
8056 gen_op_mov_reg_v(ot, reg, t0);
8057 gen_set_label(label1);
8058 s->cc_op = CC_OP_EFLAGS;
8059 tcg_temp_free(t0);
8060 }
8061 break;
8062 case 0x118:
8063 modrm = ldub_code(s->pc++);
8064 mod = (modrm >> 6) & 3;
8065 op = (modrm >> 3) & 7;
8066 switch(op) {
8067 case 0: /* prefetchnta */
8068 case 1: /* prefetchnt0 */
8069 case 2: /* prefetchnt0 */
8070 case 3: /* prefetchnt0 */
8071 if (mod == 3)
8072 goto illegal_op;
8073 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8074 /* nothing more to do */
8075 break;
8076 default: /* nop (multi byte) */
8077 gen_nop_modrm(s, modrm);
8078 break;
8079 }
8080 break;
8081 case 0x119 ... 0x11f: /* nop (multi byte) */
8082 modrm = ldub_code(s->pc++);
8083 gen_nop_modrm(s, modrm);
8084 break;
8085 case 0x120: /* mov reg, crN */
8086 case 0x122: /* mov crN, reg */
8087 if (s->cpl != 0) {
8088 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8089 } else {
8090 modrm = ldub_code(s->pc++);
8091#ifndef VBOX /* mod bits are always understood to be 11 (0xc0) regardless of actual content; see AMD manuals */
8092 if ((modrm & 0xc0) != 0xc0)
8093 goto illegal_op;
8094#endif
8095 rm = (modrm & 7) | REX_B(s);
8096 reg = ((modrm >> 3) & 7) | rex_r;
8097 if (CODE64(s))
8098 ot = OT_QUAD;
8099 else
8100 ot = OT_LONG;
8101 switch(reg) {
8102 case 0:
8103 case 2:
8104 case 3:
8105 case 4:
8106 case 8:
8107 if (s->cc_op != CC_OP_DYNAMIC)
8108 gen_op_set_cc_op(s->cc_op);
8109 gen_jmp_im(pc_start - s->cs_base);
8110 if (b & 2) {
8111 gen_op_mov_TN_reg(ot, 0, rm);
8112 tcg_gen_helper_0_2(helper_write_crN,
8113 tcg_const_i32(reg), cpu_T[0]);
8114 gen_jmp_im(s->pc - s->cs_base);
8115 gen_eob(s);
8116 } else {
8117 tcg_gen_helper_1_1(helper_read_crN,
8118 cpu_T[0], tcg_const_i32(reg));
8119 gen_op_mov_reg_T0(ot, rm);
8120 }
8121 break;
8122 default:
8123 goto illegal_op;
8124 }
8125 }
8126 break;
8127 case 0x121: /* mov reg, drN */
8128 case 0x123: /* mov drN, reg */
8129 if (s->cpl != 0) {
8130 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8131 } else {
8132 modrm = ldub_code(s->pc++);
8133#ifndef VBOX /* mod bits are always understood to be 11 (0xc0) regardless of actual content; see AMD manuals */
8134 if ((modrm & 0xc0) != 0xc0)
8135 goto illegal_op;
8136#endif
8137 rm = (modrm & 7) | REX_B(s);
8138 reg = ((modrm >> 3) & 7) | rex_r;
8139 if (CODE64(s))
8140 ot = OT_QUAD;
8141 else
8142 ot = OT_LONG;
8143 /* XXX: do it dynamically with CR4.DE bit */
8144 if (reg == 4 || reg == 5 || reg >= 8)
8145 goto illegal_op;
8146 if (b & 2) {
8147 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
8148 gen_op_mov_TN_reg(ot, 0, rm);
8149 tcg_gen_helper_0_2(helper_movl_drN_T0,
8150 tcg_const_i32(reg), cpu_T[0]);
8151 gen_jmp_im(s->pc - s->cs_base);
8152 gen_eob(s);
8153 } else {
8154 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
8155 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
8156 gen_op_mov_reg_T0(ot, rm);
8157 }
8158 }
8159 break;
8160 case 0x106: /* clts */
8161 if (s->cpl != 0) {
8162 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8163 } else {
8164 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
8165 tcg_gen_helper_0_0(helper_clts);
8166 /* abort block because static cpu state changed */
8167 gen_jmp_im(s->pc - s->cs_base);
8168 gen_eob(s);
8169 }
8170 break;
8171 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
8172 case 0x1c3: /* MOVNTI reg, mem */
8173 if (!(s->cpuid_features & CPUID_SSE2))
8174 goto illegal_op;
8175 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
8176 modrm = ldub_code(s->pc++);
8177 mod = (modrm >> 6) & 3;
8178 if (mod == 3)
8179 goto illegal_op;
8180 reg = ((modrm >> 3) & 7) | rex_r;
8181 /* generate a generic store */
8182 gen_ldst_modrm(s, modrm, ot, reg, 1);
8183 break;
8184 case 0x1ae:
8185 modrm = ldub_code(s->pc++);
8186 mod = (modrm >> 6) & 3;
8187 op = (modrm >> 3) & 7;
8188 switch(op) {
8189 case 0: /* fxsave */
8190 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8191 (s->flags & HF_EM_MASK))
8192 goto illegal_op;
8193 if (s->flags & HF_TS_MASK) {
8194 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8195 break;
8196 }
8197 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8198 if (s->cc_op != CC_OP_DYNAMIC)
8199 gen_op_set_cc_op(s->cc_op);
8200 gen_jmp_im(pc_start - s->cs_base);
8201 tcg_gen_helper_0_2(helper_fxsave,
8202 cpu_A0, tcg_const_i32((s->dflag == 2)));
8203 break;
8204 case 1: /* fxrstor */
8205 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8206 (s->flags & HF_EM_MASK))
8207 goto illegal_op;
8208 if (s->flags & HF_TS_MASK) {
8209 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8210 break;
8211 }
8212 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8213 if (s->cc_op != CC_OP_DYNAMIC)
8214 gen_op_set_cc_op(s->cc_op);
8215 gen_jmp_im(pc_start - s->cs_base);
8216 tcg_gen_helper_0_2(helper_fxrstor,
8217 cpu_A0, tcg_const_i32((s->dflag == 2)));
8218 break;
8219 case 2: /* ldmxcsr */
8220 case 3: /* stmxcsr */
8221 if (s->flags & HF_TS_MASK) {
8222 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8223 break;
8224 }
8225 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
8226 mod == 3)
8227 goto illegal_op;
8228 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8229 if (op == 2) {
8230 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
8231 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8232 } else {
8233 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8234 gen_op_st_T0_A0(OT_LONG + s->mem_index);
8235 }
8236 break;
8237 case 5: /* lfence */
8238 case 6: /* mfence */
8239 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
8240 goto illegal_op;
8241 break;
8242 case 7: /* sfence / clflush */
8243 if ((modrm & 0xc7) == 0xc0) {
8244 /* sfence */
8245 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
8246 if (!(s->cpuid_features & CPUID_SSE))
8247 goto illegal_op;
8248 } else {
8249 /* clflush */
8250 if (!(s->cpuid_features & CPUID_CLFLUSH))
8251 goto illegal_op;
8252 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8253 }
8254 break;
8255 default:
8256 goto illegal_op;
8257 }
8258 break;
8259 case 0x10d: /* 3DNow! prefetch(w) */
8260 modrm = ldub_code(s->pc++);
8261 mod = (modrm >> 6) & 3;
8262 if (mod == 3)
8263 goto illegal_op;
8264 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8265 /* ignore for now */
8266 break;
8267 case 0x1aa: /* rsm */
8268 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
8269 if (!(s->flags & HF_SMM_MASK))
8270 goto illegal_op;
8271 if (s->cc_op != CC_OP_DYNAMIC) {
8272 gen_op_set_cc_op(s->cc_op);
8273 s->cc_op = CC_OP_DYNAMIC;
8274 }
8275 gen_jmp_im(s->pc - s->cs_base);
8276 tcg_gen_helper_0_0(helper_rsm);
8277 gen_eob(s);
8278 break;
8279 case 0x1b8: /* SSE4.2 popcnt */
8280 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
8281 PREFIX_REPZ)
8282 goto illegal_op;
8283 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
8284 goto illegal_op;
8285
8286 modrm = ldub_code(s->pc++);
8287 reg = ((modrm >> 3) & 7);
8288
8289 if (s->prefix & PREFIX_DATA)
8290 ot = OT_WORD;
8291 else if (s->dflag != 2)
8292 ot = OT_LONG;
8293 else
8294 ot = OT_QUAD;
8295
8296 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
8297 tcg_gen_helper_1_2(helper_popcnt,
8298 cpu_T[0], cpu_T[0], tcg_const_i32(ot));
8299 gen_op_mov_reg_T0(ot, reg);
8300
8301 s->cc_op = CC_OP_EFLAGS;
8302 break;
8303 case 0x10e ... 0x10f:
8304 /* 3DNow! instructions, ignore prefixes */
8305 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
8306 case 0x110 ... 0x117:
8307 case 0x128 ... 0x12f:
8308 case 0x138 ... 0x13a:
8309 case 0x150 ... 0x177:
8310 case 0x17c ... 0x17f:
8311 case 0x1c2:
8312 case 0x1c4 ... 0x1c6:
8313 case 0x1d0 ... 0x1fe:
8314 gen_sse(s, b, pc_start, rex_r);
8315 break;
8316 default:
8317 goto illegal_op;
8318 }
8319 /* lock generation */
8320 if (s->prefix & PREFIX_LOCK)
8321 tcg_gen_helper_0_0(helper_unlock);
8322 return s->pc;
8323 illegal_op:
8324 if (s->prefix & PREFIX_LOCK)
8325 tcg_gen_helper_0_0(helper_unlock);
8326 /* XXX: ensure that no lock was generated */
8327 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
8328 return s->pc;
8329}
8330
8331void optimize_flags_init(void)
8332{
8333#if TCG_TARGET_REG_BITS == 32
8334 assert(sizeof(CCTable) == (1 << 3));
8335#else
8336 assert(sizeof(CCTable) == (1 << 4));
8337#endif
8338 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
8339 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
8340 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
8341 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
8342 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
8343 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
8344 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
8345 cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
8346 TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
8347
8348 /* register helpers */
8349
8350#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
8351#include "helper.h"
8352}
8353
8354/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
8355 basic block 'tb'. If search_pc is TRUE, also generate PC
8356 information for each intermediate instruction. */
8357#ifndef VBOX
8358static inline void gen_intermediate_code_internal(CPUState *env,
8359#else /* VBOX */
8360DECLINLINE(void) gen_intermediate_code_internal(CPUState *env,
8361#endif /* VBOX */
8362 TranslationBlock *tb,
8363 int search_pc)
8364{
8365 DisasContext dc1, *dc = &dc1;
8366 target_ulong pc_ptr;
8367 uint16_t *gen_opc_end;
8368 int j, lj, cflags;
8369 uint64_t flags;
8370 target_ulong pc_start;
8371 target_ulong cs_base;
8372 int num_insns;
8373 int max_insns;
8374
8375 /* generate intermediate code */
8376 pc_start = tb->pc;
8377 cs_base = tb->cs_base;
8378 flags = tb->flags;
8379 cflags = tb->cflags;
8380
8381 dc->pe = (flags >> HF_PE_SHIFT) & 1;
8382 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
8383 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
8384 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
8385 dc->f_st = 0;
8386 dc->vm86 = (flags >> VM_SHIFT) & 1;
8387#ifdef VBOX
8388 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
8389 dc->pvi = !!(env->cr[4] & CR4_PVI_MASK);
8390#ifdef VBOX_WITH_CALL_RECORD
8391 if ( !(env->state & CPU_RAW_RING0)
8392 && (env->cr[0] & CR0_PG_MASK)
8393 && !(env->eflags & X86_EFL_IF)
8394 && dc->code32)
8395 dc->record_call = 1;
8396 else
8397 dc->record_call = 0;
8398#endif
8399#endif
8400 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
8401 dc->iopl = (flags >> IOPL_SHIFT) & 3;
8402 dc->tf = (flags >> TF_SHIFT) & 1;
8403 dc->singlestep_enabled = env->singlestep_enabled;
8404 dc->cc_op = CC_OP_DYNAMIC;
8405 dc->cs_base = cs_base;
8406 dc->tb = tb;
8407 dc->popl_esp_hack = 0;
8408 /* select memory access functions */
8409 dc->mem_index = 0;
8410 if (flags & HF_SOFTMMU_MASK) {
8411 if (dc->cpl == 3)
8412 dc->mem_index = 2 * 4;
8413 else
8414 dc->mem_index = 1 * 4;
8415 }
8416 dc->cpuid_features = env->cpuid_features;
8417 dc->cpuid_ext_features = env->cpuid_ext_features;
8418 dc->cpuid_ext2_features = env->cpuid_ext2_features;
8419 dc->cpuid_ext3_features = env->cpuid_ext3_features;
8420#ifdef TARGET_X86_64
8421 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
8422 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
8423#endif
8424 dc->flags = flags;
8425 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
8426 (flags & HF_INHIBIT_IRQ_MASK)
8427#ifndef CONFIG_SOFTMMU
8428 || (flags & HF_SOFTMMU_MASK)
8429#endif
8430 );
8431#if 0
8432 /* check addseg logic */
8433 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
8434 printf("ERROR addseg\n");
8435#endif
8436
8437 cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
8438 cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
8439 cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
8440 cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
8441
8442 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
8443 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
8444 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
8445 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
8446 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
8447 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
8448 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
8449 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
8450 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
8451
8452 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
8453
8454 dc->is_jmp = DISAS_NEXT;
8455 pc_ptr = pc_start;
8456 lj = -1;
8457 num_insns = 0;
8458 max_insns = tb->cflags & CF_COUNT_MASK;
8459 if (max_insns == 0)
8460 max_insns = CF_COUNT_MASK;
8461
8462 gen_icount_start();
8463 for(;;) {
8464 if (env->nb_breakpoints > 0) {
8465 for(j = 0; j < env->nb_breakpoints; j++) {
8466 if (env->breakpoints[j] == pc_ptr) {
8467 gen_debug(dc, pc_ptr - dc->cs_base);
8468 break;
8469 }
8470 }
8471 }
8472 if (search_pc) {
8473 j = gen_opc_ptr - gen_opc_buf;
8474 if (lj < j) {
8475 lj++;
8476 while (lj < j)
8477 gen_opc_instr_start[lj++] = 0;
8478 }
8479 gen_opc_pc[lj] = pc_ptr;
8480 gen_opc_cc_op[lj] = dc->cc_op;
8481 gen_opc_instr_start[lj] = 1;
8482 gen_opc_icount[lj] = num_insns;
8483 }
8484 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8485 gen_io_start();
8486
8487 pc_ptr = disas_insn(dc, pc_ptr);
8488 num_insns++;
8489 /* stop translation if indicated */
8490 if (dc->is_jmp)
8491 break;
8492#ifdef VBOX
8493#ifdef DEBUG
8494/*
8495 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
8496 {
8497 //should never happen as the jump to the patch code terminates the translation block
8498 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
8499 }
8500*/
8501#endif
8502 if (env->state & CPU_EMULATE_SINGLE_INSTR)
8503 {
8504 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
8505 gen_jmp_im(pc_ptr - dc->cs_base);
8506 gen_eob(dc);
8507 break;
8508 }
8509#endif /* VBOX */
8510
8511 /* if single step mode, we generate only one instruction and
8512 generate an exception */
8513 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
8514 the flag and abort the translation to give the irqs a
8515 change to be happen */
8516 if (dc->tf || dc->singlestep_enabled ||
8517 (flags & HF_INHIBIT_IRQ_MASK)) {
8518 gen_jmp_im(pc_ptr - dc->cs_base);
8519 gen_eob(dc);
8520 break;
8521 }
8522 /* if too long translation, stop generation too */
8523 if (gen_opc_ptr >= gen_opc_end ||
8524 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
8525 num_insns >= max_insns) {
8526 gen_jmp_im(pc_ptr - dc->cs_base);
8527 gen_eob(dc);
8528 break;
8529 }
8530 }
8531 if (tb->cflags & CF_LAST_IO)
8532 gen_io_end();
8533 gen_icount_end(tb, num_insns);
8534 *gen_opc_ptr = INDEX_op_end;
8535 /* we don't forget to fill the last values */
8536 if (search_pc) {
8537 j = gen_opc_ptr - gen_opc_buf;
8538 lj++;
8539 while (lj <= j)
8540 gen_opc_instr_start[lj++] = 0;
8541 }
8542
8543#ifdef DEBUG_DISAS
8544 if (loglevel & CPU_LOG_TB_CPU) {
8545 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
8546 }
8547 if (loglevel & CPU_LOG_TB_IN_ASM) {
8548 int disas_flags;
8549 fprintf(logfile, "----------------\n");
8550 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
8551#ifdef TARGET_X86_64
8552 if (dc->code64)
8553 disas_flags = 2;
8554 else
8555#endif
8556 disas_flags = !dc->code32;
8557 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
8558 fprintf(logfile, "\n");
8559 }
8560#endif
8561
8562 if (!search_pc) {
8563 tb->size = pc_ptr - pc_start;
8564 tb->icount = num_insns;
8565 }
8566}
8567
8568void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
8569{
8570 gen_intermediate_code_internal(env, tb, 0);
8571}
8572
8573void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
8574{
8575 gen_intermediate_code_internal(env, tb, 1);
8576}
8577
8578void gen_pc_load(CPUState *env, TranslationBlock *tb,
8579 unsigned long searched_pc, int pc_pos, void *puc)
8580{
8581 int cc_op;
8582#ifdef DEBUG_DISAS
8583 if (loglevel & CPU_LOG_TB_OP) {
8584 int i;
8585 fprintf(logfile, "RESTORE:\n");
8586 for(i = 0;i <= pc_pos; i++) {
8587 if (gen_opc_instr_start[i]) {
8588 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
8589 }
8590 }
8591 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
8592 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
8593 (uint32_t)tb->cs_base);
8594 }
8595#endif
8596 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
8597 cc_op = gen_opc_cc_op[pc_pos];
8598 if (cc_op != CC_OP_DYNAMIC)
8599 env->cc_op = cc_op;
8600}
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette