VirtualBox

source: vbox/trunk/src/recompiler/target-i386/translate.c@ 4672

Last change on this file since 4672 was 3952, checked in by vboxsync, 17 years ago

Incorporated aam division by zero security fix.

  • Property svn:eol-style set to native
File size: 200.4 KB
Line 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20#include <stdarg.h>
21#include <stdlib.h>
22#include <stdio.h>
23#include <string.h>
24#include <inttypes.h>
25#ifndef VBOX
26#include <signal.h>
27#include <assert.h>
28#endif /* !VBOX */
29
30#include "cpu.h"
31#include "exec-all.h"
32#include "disas.h"
33
34/* XXX: move that elsewhere */
35static uint16_t *gen_opc_ptr;
36static uint32_t *gen_opparam_ptr;
37
38#define PREFIX_REPZ 0x01
39#define PREFIX_REPNZ 0x02
40#define PREFIX_LOCK 0x04
41#define PREFIX_DATA 0x08
42#define PREFIX_ADR 0x10
43
44#ifdef TARGET_X86_64
45#define X86_64_ONLY(x) x
46#define X86_64_DEF(x...) x
47#define CODE64(s) ((s)->code64)
48#define REX_X(s) ((s)->rex_x)
49#define REX_B(s) ((s)->rex_b)
50/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
51#if 1
52#define BUGGY_64(x) NULL
53#endif
54#else
55#define X86_64_ONLY(x) NULL
56#define X86_64_DEF(x...)
57#define CODE64(s) 0
58#define REX_X(s) 0
59#define REX_B(s) 0
60#endif
61
62#ifdef TARGET_X86_64
63static int x86_64_hregs;
64#endif
65
66#ifdef USE_DIRECT_JUMP
67#define TBPARAM(x)
68#else
69#define TBPARAM(x) (long)(x)
70#endif
71
72#ifdef VBOX
73/* Special/override code readers to hide patched code. */
74
75uint8_t ldub_code_raw(target_ulong pc)
76{
77 uint8_t b;
78
79 if (!remR3GetOpcode(cpu_single_env, pc, &b))
80 b = ldub_code(pc);
81 return b;
82}
83#define ldub_code(a) ldub_code_raw(a)
84
85uint16_t lduw_code_raw(target_ulong pc)
86{
87 return (ldub_code(pc+1) << 8) | ldub_code(pc);
88}
89#define lduw_code(a) lduw_code_raw(a)
90
91
92uint32_t ldl_code_raw(target_ulong pc)
93{
94 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
95}
96#define ldl_code(a) ldl_code_raw(a)
97
98#endif /* VBOX */
99
100
101typedef struct DisasContext {
102 /* current insn context */
103 int override; /* -1 if no override */
104 int prefix;
105 int aflag, dflag;
106 target_ulong pc; /* pc = eip + cs_base */
107 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
108 static state change (stop translation) */
109 /* current block context */
110 target_ulong cs_base; /* base of CS segment */
111 int pe; /* protected mode */
112 int code32; /* 32 bit code segment */
113#ifdef TARGET_X86_64
114 int lma; /* long mode active */
115 int code64; /* 64 bit code segment */
116 int rex_x, rex_b;
117#endif
118 int ss32; /* 32 bit stack segment */
119 int cc_op; /* current CC operation */
120 int addseg; /* non zero if either DS/ES/SS have a non zero base */
121 int f_st; /* currently unused */
122 int vm86; /* vm86 mode */
123#ifdef VBOX
124 int vme; /* CR4.VME */
125#endif
126 int cpl;
127 int iopl;
128 int tf; /* TF cpu flag */
129 int singlestep_enabled; /* "hardware" single step enabled */
130 int jmp_opt; /* use direct block chaining for direct jumps */
131 int mem_index; /* select memory access functions */
132 int flags; /* all execution flags */
133 struct TranslationBlock *tb;
134 int popl_esp_hack; /* for correct popl with esp base handling */
135 int rip_offset; /* only used in x86_64, but left for simplicity */
136 int cpuid_features;
137 int cpuid_ext_features;
138} DisasContext;
139
140static void gen_eob(DisasContext *s);
141static void gen_jmp(DisasContext *s, target_ulong eip);
142static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
143
144/* i386 arith/logic operations */
145enum {
146 OP_ADDL,
147 OP_ORL,
148 OP_ADCL,
149 OP_SBBL,
150 OP_ANDL,
151 OP_SUBL,
152 OP_XORL,
153 OP_CMPL,
154};
155
156/* i386 shift ops */
157enum {
158 OP_ROL,
159 OP_ROR,
160 OP_RCL,
161 OP_RCR,
162 OP_SHL,
163 OP_SHR,
164 OP_SHL1, /* undocumented */
165 OP_SAR = 7,
166};
167
168enum {
169#define DEF(s, n, copy_size) INDEX_op_ ## s,
170#include "opc.h"
171#undef DEF
172 NB_OPS,
173};
174
175#include "gen-op.h"
176
177/* operand size */
178enum {
179 OT_BYTE = 0,
180 OT_WORD,
181 OT_LONG,
182 OT_QUAD,
183};
184
185enum {
186 /* I386 int registers */
187 OR_EAX, /* MUST be even numbered */
188 OR_ECX,
189 OR_EDX,
190 OR_EBX,
191 OR_ESP,
192 OR_EBP,
193 OR_ESI,
194 OR_EDI,
195
196 OR_TMP0 = 16, /* temporary operand register */
197 OR_TMP1,
198 OR_A0, /* temporary register used when doing address evaluation */
199};
200
201#ifdef TARGET_X86_64
202
203#define NB_OP_SIZES 4
204
205#define DEF_REGS(prefix, suffix) \
206 prefix ## EAX ## suffix,\
207 prefix ## ECX ## suffix,\
208 prefix ## EDX ## suffix,\
209 prefix ## EBX ## suffix,\
210 prefix ## ESP ## suffix,\
211 prefix ## EBP ## suffix,\
212 prefix ## ESI ## suffix,\
213 prefix ## EDI ## suffix,\
214 prefix ## R8 ## suffix,\
215 prefix ## R9 ## suffix,\
216 prefix ## R10 ## suffix,\
217 prefix ## R11 ## suffix,\
218 prefix ## R12 ## suffix,\
219 prefix ## R13 ## suffix,\
220 prefix ## R14 ## suffix,\
221 prefix ## R15 ## suffix,
222
223#define DEF_BREGS(prefixb, prefixh, suffix) \
224 \
225static void prefixb ## ESP ## suffix ## _wrapper(void) \
226{ \
227 if (x86_64_hregs) \
228 prefixb ## ESP ## suffix (); \
229 else \
230 prefixh ## EAX ## suffix (); \
231} \
232 \
233static void prefixb ## EBP ## suffix ## _wrapper(void) \
234{ \
235 if (x86_64_hregs) \
236 prefixb ## EBP ## suffix (); \
237 else \
238 prefixh ## ECX ## suffix (); \
239} \
240 \
241static void prefixb ## ESI ## suffix ## _wrapper(void) \
242{ \
243 if (x86_64_hregs) \
244 prefixb ## ESI ## suffix (); \
245 else \
246 prefixh ## EDX ## suffix (); \
247} \
248 \
249static void prefixb ## EDI ## suffix ## _wrapper(void) \
250{ \
251 if (x86_64_hregs) \
252 prefixb ## EDI ## suffix (); \
253 else \
254 prefixh ## EBX ## suffix (); \
255}
256
257DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
258DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
259DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
260DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
261
262#else /* !TARGET_X86_64 */
263
264#define NB_OP_SIZES 3
265
266#define DEF_REGS(prefix, suffix) \
267 prefix ## EAX ## suffix,\
268 prefix ## ECX ## suffix,\
269 prefix ## EDX ## suffix,\
270 prefix ## EBX ## suffix,\
271 prefix ## ESP ## suffix,\
272 prefix ## EBP ## suffix,\
273 prefix ## ESI ## suffix,\
274 prefix ## EDI ## suffix,
275
276#endif /* !TARGET_X86_64 */
277
278static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
279 [OT_BYTE] = {
280 gen_op_movb_EAX_T0,
281 gen_op_movb_ECX_T0,
282 gen_op_movb_EDX_T0,
283 gen_op_movb_EBX_T0,
284#ifdef TARGET_X86_64
285 gen_op_movb_ESP_T0_wrapper,
286 gen_op_movb_EBP_T0_wrapper,
287 gen_op_movb_ESI_T0_wrapper,
288 gen_op_movb_EDI_T0_wrapper,
289 gen_op_movb_R8_T0,
290 gen_op_movb_R9_T0,
291 gen_op_movb_R10_T0,
292 gen_op_movb_R11_T0,
293 gen_op_movb_R12_T0,
294 gen_op_movb_R13_T0,
295 gen_op_movb_R14_T0,
296 gen_op_movb_R15_T0,
297#else
298 gen_op_movh_EAX_T0,
299 gen_op_movh_ECX_T0,
300 gen_op_movh_EDX_T0,
301 gen_op_movh_EBX_T0,
302#endif
303 },
304 [OT_WORD] = {
305 DEF_REGS(gen_op_movw_, _T0)
306 },
307 [OT_LONG] = {
308 DEF_REGS(gen_op_movl_, _T0)
309 },
310#ifdef TARGET_X86_64
311 [OT_QUAD] = {
312 DEF_REGS(gen_op_movq_, _T0)
313 },
314#endif
315};
316
317static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
318 [OT_BYTE] = {
319 gen_op_movb_EAX_T1,
320 gen_op_movb_ECX_T1,
321 gen_op_movb_EDX_T1,
322 gen_op_movb_EBX_T1,
323#ifdef TARGET_X86_64
324 gen_op_movb_ESP_T1_wrapper,
325 gen_op_movb_EBP_T1_wrapper,
326 gen_op_movb_ESI_T1_wrapper,
327 gen_op_movb_EDI_T1_wrapper,
328 gen_op_movb_R8_T1,
329 gen_op_movb_R9_T1,
330 gen_op_movb_R10_T1,
331 gen_op_movb_R11_T1,
332 gen_op_movb_R12_T1,
333 gen_op_movb_R13_T1,
334 gen_op_movb_R14_T1,
335 gen_op_movb_R15_T1,
336#else
337 gen_op_movh_EAX_T1,
338 gen_op_movh_ECX_T1,
339 gen_op_movh_EDX_T1,
340 gen_op_movh_EBX_T1,
341#endif
342 },
343 [OT_WORD] = {
344 DEF_REGS(gen_op_movw_, _T1)
345 },
346 [OT_LONG] = {
347 DEF_REGS(gen_op_movl_, _T1)
348 },
349#ifdef TARGET_X86_64
350 [OT_QUAD] = {
351 DEF_REGS(gen_op_movq_, _T1)
352 },
353#endif
354};
355
356static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
357 [0] = {
358 DEF_REGS(gen_op_movw_, _A0)
359 },
360 [1] = {
361 DEF_REGS(gen_op_movl_, _A0)
362 },
363#ifdef TARGET_X86_64
364 [2] = {
365 DEF_REGS(gen_op_movq_, _A0)
366 },
367#endif
368};
369
370static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] =
371{
372 [OT_BYTE] = {
373 {
374 gen_op_movl_T0_EAX,
375 gen_op_movl_T0_ECX,
376 gen_op_movl_T0_EDX,
377 gen_op_movl_T0_EBX,
378#ifdef TARGET_X86_64
379 gen_op_movl_T0_ESP_wrapper,
380 gen_op_movl_T0_EBP_wrapper,
381 gen_op_movl_T0_ESI_wrapper,
382 gen_op_movl_T0_EDI_wrapper,
383 gen_op_movl_T0_R8,
384 gen_op_movl_T0_R9,
385 gen_op_movl_T0_R10,
386 gen_op_movl_T0_R11,
387 gen_op_movl_T0_R12,
388 gen_op_movl_T0_R13,
389 gen_op_movl_T0_R14,
390 gen_op_movl_T0_R15,
391#else
392 gen_op_movh_T0_EAX,
393 gen_op_movh_T0_ECX,
394 gen_op_movh_T0_EDX,
395 gen_op_movh_T0_EBX,
396#endif
397 },
398 {
399 gen_op_movl_T1_EAX,
400 gen_op_movl_T1_ECX,
401 gen_op_movl_T1_EDX,
402 gen_op_movl_T1_EBX,
403#ifdef TARGET_X86_64
404 gen_op_movl_T1_ESP_wrapper,
405 gen_op_movl_T1_EBP_wrapper,
406 gen_op_movl_T1_ESI_wrapper,
407 gen_op_movl_T1_EDI_wrapper,
408 gen_op_movl_T1_R8,
409 gen_op_movl_T1_R9,
410 gen_op_movl_T1_R10,
411 gen_op_movl_T1_R11,
412 gen_op_movl_T1_R12,
413 gen_op_movl_T1_R13,
414 gen_op_movl_T1_R14,
415 gen_op_movl_T1_R15,
416#else
417 gen_op_movh_T1_EAX,
418 gen_op_movh_T1_ECX,
419 gen_op_movh_T1_EDX,
420 gen_op_movh_T1_EBX,
421#endif
422 },
423 },
424 [OT_WORD] = {
425 {
426 DEF_REGS(gen_op_movl_T0_, )
427 },
428 {
429 DEF_REGS(gen_op_movl_T1_, )
430 },
431 },
432 [OT_LONG] = {
433 {
434 DEF_REGS(gen_op_movl_T0_, )
435 },
436 {
437 DEF_REGS(gen_op_movl_T1_, )
438 },
439 },
440#ifdef TARGET_X86_64
441 [OT_QUAD] = {
442 {
443 DEF_REGS(gen_op_movl_T0_, )
444 },
445 {
446 DEF_REGS(gen_op_movl_T1_, )
447 },
448 },
449#endif
450};
451
452static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
453 DEF_REGS(gen_op_movl_A0_, )
454};
455
456static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
457 [0] = {
458 DEF_REGS(gen_op_addl_A0_, )
459 },
460 [1] = {
461 DEF_REGS(gen_op_addl_A0_, _s1)
462 },
463 [2] = {
464 DEF_REGS(gen_op_addl_A0_, _s2)
465 },
466 [3] = {
467 DEF_REGS(gen_op_addl_A0_, _s3)
468 },
469};
470
471#ifdef TARGET_X86_64
472static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
473 DEF_REGS(gen_op_movq_A0_, )
474};
475
476static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
477 [0] = {
478 DEF_REGS(gen_op_addq_A0_, )
479 },
480 [1] = {
481 DEF_REGS(gen_op_addq_A0_, _s1)
482 },
483 [2] = {
484 DEF_REGS(gen_op_addq_A0_, _s2)
485 },
486 [3] = {
487 DEF_REGS(gen_op_addq_A0_, _s3)
488 },
489};
490#endif
491
492static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
493 [0] = {
494 DEF_REGS(gen_op_cmovw_, _T1_T0)
495 },
496 [1] = {
497 DEF_REGS(gen_op_cmovl_, _T1_T0)
498 },
499#ifdef TARGET_X86_64
500 [2] = {
501 DEF_REGS(gen_op_cmovq_, _T1_T0)
502 },
503#endif
504};
505
506static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
507 NULL,
508 gen_op_orl_T0_T1,
509 NULL,
510 NULL,
511 gen_op_andl_T0_T1,
512 NULL,
513 gen_op_xorl_T0_T1,
514 NULL,
515};
516
517#define DEF_ARITHC(SUFFIX)\
518 {\
519 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
520 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
521 },\
522 {\
523 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
524 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
525 },\
526 {\
527 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
528 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
529 },\
530 {\
531 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
532 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
533 },
534
535static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
536 DEF_ARITHC( )
537};
538
539static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
540 DEF_ARITHC(_raw)
541#ifndef CONFIG_USER_ONLY
542 DEF_ARITHC(_kernel)
543 DEF_ARITHC(_user)
544#endif
545};
546
547static const int cc_op_arithb[8] = {
548 CC_OP_ADDB,
549 CC_OP_LOGICB,
550 CC_OP_ADDB,
551 CC_OP_SUBB,
552 CC_OP_LOGICB,
553 CC_OP_SUBB,
554 CC_OP_LOGICB,
555 CC_OP_SUBB,
556};
557
558#define DEF_CMPXCHG(SUFFIX)\
559 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
560 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
561 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
562 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
563
564static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
565 DEF_CMPXCHG( )
566};
567
568static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
569 DEF_CMPXCHG(_raw)
570#ifndef CONFIG_USER_ONLY
571 DEF_CMPXCHG(_kernel)
572 DEF_CMPXCHG(_user)
573#endif
574};
575
576#define DEF_SHIFT(SUFFIX)\
577 {\
578 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
579 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
580 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
581 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
582 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
583 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
584 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
585 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
586 },\
587 {\
588 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
589 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
590 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
591 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
592 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
593 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
594 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
595 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
596 },\
597 {\
598 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
599 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
600 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
601 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
602 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
603 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
604 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
605 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
606 },\
607 {\
608 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
609 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
610 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
611 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
612 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
613 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
614 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
615 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
616 },
617
618static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
619 DEF_SHIFT( )
620};
621
622static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
623 DEF_SHIFT(_raw)
624#ifndef CONFIG_USER_ONLY
625 DEF_SHIFT(_kernel)
626 DEF_SHIFT(_user)
627#endif
628};
629
630#define DEF_SHIFTD(SUFFIX, op)\
631 {\
632 NULL,\
633 NULL,\
634 },\
635 {\
636 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
637 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
638 },\
639 {\
640 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
641 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
642 },\
643 {\
644X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
645 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
646 },
647
648static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
649 DEF_SHIFTD(, im)
650};
651
652static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
653 DEF_SHIFTD(, ECX)
654};
655
656static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
657 DEF_SHIFTD(_raw, im)
658#ifndef CONFIG_USER_ONLY
659 DEF_SHIFTD(_kernel, im)
660 DEF_SHIFTD(_user, im)
661#endif
662};
663
664static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
665 DEF_SHIFTD(_raw, ECX)
666#ifndef CONFIG_USER_ONLY
667 DEF_SHIFTD(_kernel, ECX)
668 DEF_SHIFTD(_user, ECX)
669#endif
670};
671
672static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
673 [0] = {
674 gen_op_btw_T0_T1_cc,
675 gen_op_btsw_T0_T1_cc,
676 gen_op_btrw_T0_T1_cc,
677 gen_op_btcw_T0_T1_cc,
678 },
679 [1] = {
680 gen_op_btl_T0_T1_cc,
681 gen_op_btsl_T0_T1_cc,
682 gen_op_btrl_T0_T1_cc,
683 gen_op_btcl_T0_T1_cc,
684 },
685#ifdef TARGET_X86_64
686 [2] = {
687 gen_op_btq_T0_T1_cc,
688 gen_op_btsq_T0_T1_cc,
689 gen_op_btrq_T0_T1_cc,
690 gen_op_btcq_T0_T1_cc,
691 },
692#endif
693};
694
695static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
696 gen_op_add_bitw_A0_T1,
697 gen_op_add_bitl_A0_T1,
698 X86_64_ONLY(gen_op_add_bitq_A0_T1),
699};
700
701static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
702 [0] = {
703 gen_op_bsfw_T0_cc,
704 gen_op_bsrw_T0_cc,
705 },
706 [1] = {
707 gen_op_bsfl_T0_cc,
708 gen_op_bsrl_T0_cc,
709 },
710#ifdef TARGET_X86_64
711 [2] = {
712 gen_op_bsfq_T0_cc,
713 gen_op_bsrq_T0_cc,
714 },
715#endif
716};
717
718static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
719 gen_op_ldsb_raw_T0_A0,
720 gen_op_ldsw_raw_T0_A0,
721 X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
722 NULL,
723#ifndef CONFIG_USER_ONLY
724 gen_op_ldsb_kernel_T0_A0,
725 gen_op_ldsw_kernel_T0_A0,
726 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
727 NULL,
728
729 gen_op_ldsb_user_T0_A0,
730 gen_op_ldsw_user_T0_A0,
731 X86_64_ONLY(gen_op_ldsl_user_T0_A0),
732 NULL,
733#endif
734};
735
736static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
737 gen_op_ldub_raw_T0_A0,
738 gen_op_lduw_raw_T0_A0,
739 NULL,
740 NULL,
741
742#ifndef CONFIG_USER_ONLY
743 gen_op_ldub_kernel_T0_A0,
744 gen_op_lduw_kernel_T0_A0,
745 NULL,
746 NULL,
747
748 gen_op_ldub_user_T0_A0,
749 gen_op_lduw_user_T0_A0,
750 NULL,
751 NULL,
752#endif
753};
754
755/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
756static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
757 gen_op_ldub_raw_T0_A0,
758 gen_op_lduw_raw_T0_A0,
759 gen_op_ldl_raw_T0_A0,
760 X86_64_ONLY(gen_op_ldq_raw_T0_A0),
761
762#ifndef CONFIG_USER_ONLY
763 gen_op_ldub_kernel_T0_A0,
764 gen_op_lduw_kernel_T0_A0,
765 gen_op_ldl_kernel_T0_A0,
766 X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
767
768 gen_op_ldub_user_T0_A0,
769 gen_op_lduw_user_T0_A0,
770 gen_op_ldl_user_T0_A0,
771 X86_64_ONLY(gen_op_ldq_user_T0_A0),
772#endif
773};
774
775static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
776 gen_op_ldub_raw_T1_A0,
777 gen_op_lduw_raw_T1_A0,
778 gen_op_ldl_raw_T1_A0,
779 X86_64_ONLY(gen_op_ldq_raw_T1_A0),
780
781#ifndef CONFIG_USER_ONLY
782 gen_op_ldub_kernel_T1_A0,
783 gen_op_lduw_kernel_T1_A0,
784 gen_op_ldl_kernel_T1_A0,
785 X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
786
787 gen_op_ldub_user_T1_A0,
788 gen_op_lduw_user_T1_A0,
789 gen_op_ldl_user_T1_A0,
790 X86_64_ONLY(gen_op_ldq_user_T1_A0),
791#endif
792};
793
794static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
795 gen_op_stb_raw_T0_A0,
796 gen_op_stw_raw_T0_A0,
797 gen_op_stl_raw_T0_A0,
798 X86_64_ONLY(gen_op_stq_raw_T0_A0),
799
800#ifndef CONFIG_USER_ONLY
801 gen_op_stb_kernel_T0_A0,
802 gen_op_stw_kernel_T0_A0,
803 gen_op_stl_kernel_T0_A0,
804 X86_64_ONLY(gen_op_stq_kernel_T0_A0),
805
806 gen_op_stb_user_T0_A0,
807 gen_op_stw_user_T0_A0,
808 gen_op_stl_user_T0_A0,
809 X86_64_ONLY(gen_op_stq_user_T0_A0),
810#endif
811};
812
813static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
814 NULL,
815 gen_op_stw_raw_T1_A0,
816 gen_op_stl_raw_T1_A0,
817 X86_64_ONLY(gen_op_stq_raw_T1_A0),
818
819#ifndef CONFIG_USER_ONLY
820 NULL,
821 gen_op_stw_kernel_T1_A0,
822 gen_op_stl_kernel_T1_A0,
823 X86_64_ONLY(gen_op_stq_kernel_T1_A0),
824
825 NULL,
826 gen_op_stw_user_T1_A0,
827 gen_op_stl_user_T1_A0,
828 X86_64_ONLY(gen_op_stq_user_T1_A0),
829#endif
830};
831
832#ifdef VBOX
833static void gen_check_external_event()
834{
835 gen_op_check_external_event();
836}
837
838static inline void gen_update_eip(target_ulong pc)
839{
840#ifdef TARGET_X86_64
841 if (pc == (uint32_t)pc) {
842 gen_op_movl_eip_im(pc);
843 } else if (pc == (int32_t)pc) {
844 gen_op_movq_eip_im(pc);
845 } else {
846 gen_op_movq_eip_im64(pc >> 32, pc);
847 }
848#else
849 gen_op_movl_eip_im(pc);
850#endif
851}
852
853#endif /* VBOX */
854
855static inline void gen_jmp_im(target_ulong pc)
856{
857#ifdef VBOX
858 gen_check_external_event();
859#endif /* VBOX */
860#ifdef TARGET_X86_64
861 if (pc == (uint32_t)pc) {
862 gen_op_movl_eip_im(pc);
863 } else if (pc == (int32_t)pc) {
864 gen_op_movq_eip_im(pc);
865 } else {
866 gen_op_movq_eip_im64(pc >> 32, pc);
867 }
868#else
869 gen_op_movl_eip_im(pc);
870#endif
871}
872
873static inline void gen_string_movl_A0_ESI(DisasContext *s)
874{
875 int override;
876
877 override = s->override;
878#ifdef TARGET_X86_64
879 if (s->aflag == 2) {
880 if (override >= 0) {
881 gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
882 gen_op_addq_A0_reg_sN[0][R_ESI]();
883 } else {
884 gen_op_movq_A0_reg[R_ESI]();
885 }
886 } else
887#endif
888 if (s->aflag) {
889 /* 32 bit address */
890 if (s->addseg && override < 0)
891 override = R_DS;
892 if (override >= 0) {
893 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
894 gen_op_addl_A0_reg_sN[0][R_ESI]();
895 } else {
896 gen_op_movl_A0_reg[R_ESI]();
897 }
898 } else {
899 /* 16 address, always override */
900 if (override < 0)
901 override = R_DS;
902 gen_op_movl_A0_reg[R_ESI]();
903 gen_op_andl_A0_ffff();
904 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
905 }
906}
907
908static inline void gen_string_movl_A0_EDI(DisasContext *s)
909{
910#ifdef TARGET_X86_64
911 if (s->aflag == 2) {
912 gen_op_movq_A0_reg[R_EDI]();
913 } else
914#endif
915 if (s->aflag) {
916 if (s->addseg) {
917 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
918 gen_op_addl_A0_reg_sN[0][R_EDI]();
919 } else {
920 gen_op_movl_A0_reg[R_EDI]();
921 }
922 } else {
923 gen_op_movl_A0_reg[R_EDI]();
924 gen_op_andl_A0_ffff();
925 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
926 }
927}
928
929static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
930 gen_op_movl_T0_Dshiftb,
931 gen_op_movl_T0_Dshiftw,
932 gen_op_movl_T0_Dshiftl,
933 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
934};
935
936static GenOpFunc1 *gen_op_jnz_ecx[3] = {
937 gen_op_jnz_ecxw,
938 gen_op_jnz_ecxl,
939 X86_64_ONLY(gen_op_jnz_ecxq),
940};
941
942static GenOpFunc1 *gen_op_jz_ecx[3] = {
943 gen_op_jz_ecxw,
944 gen_op_jz_ecxl,
945 X86_64_ONLY(gen_op_jz_ecxq),
946};
947
948static GenOpFunc *gen_op_dec_ECX[3] = {
949 gen_op_decw_ECX,
950 gen_op_decl_ECX,
951 X86_64_ONLY(gen_op_decq_ECX),
952};
953
954static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
955 {
956 gen_op_jnz_subb,
957 gen_op_jnz_subw,
958 gen_op_jnz_subl,
959 X86_64_ONLY(gen_op_jnz_subq),
960 },
961 {
962 gen_op_jz_subb,
963 gen_op_jz_subw,
964 gen_op_jz_subl,
965 X86_64_ONLY(gen_op_jz_subq),
966 },
967};
968
969static GenOpFunc *gen_op_in_DX_T0[3] = {
970 gen_op_inb_DX_T0,
971 gen_op_inw_DX_T0,
972 gen_op_inl_DX_T0,
973};
974
975static GenOpFunc *gen_op_out_DX_T0[3] = {
976 gen_op_outb_DX_T0,
977 gen_op_outw_DX_T0,
978 gen_op_outl_DX_T0,
979};
980
981static GenOpFunc *gen_op_in[3] = {
982 gen_op_inb_T0_T1,
983 gen_op_inw_T0_T1,
984 gen_op_inl_T0_T1,
985};
986
987static GenOpFunc *gen_op_out[3] = {
988 gen_op_outb_T0_T1,
989 gen_op_outw_T0_T1,
990 gen_op_outl_T0_T1,
991};
992
993static GenOpFunc *gen_check_io_T0[3] = {
994 gen_op_check_iob_T0,
995 gen_op_check_iow_T0,
996 gen_op_check_iol_T0,
997};
998
999static GenOpFunc *gen_check_io_DX[3] = {
1000 gen_op_check_iob_DX,
1001 gen_op_check_iow_DX,
1002 gen_op_check_iol_DX,
1003};
1004
1005static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
1006{
1007 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1008 if (s->cc_op != CC_OP_DYNAMIC)
1009 gen_op_set_cc_op(s->cc_op);
1010 gen_jmp_im(cur_eip);
1011 if (use_dx)
1012 gen_check_io_DX[ot]();
1013 else
1014 gen_check_io_T0[ot]();
1015 }
1016}
1017
1018static inline void gen_movs(DisasContext *s, int ot)
1019{
1020 gen_string_movl_A0_ESI(s);
1021 gen_op_ld_T0_A0[ot + s->mem_index]();
1022 gen_string_movl_A0_EDI(s);
1023 gen_op_st_T0_A0[ot + s->mem_index]();
1024 gen_op_movl_T0_Dshift[ot]();
1025#ifdef TARGET_X86_64
1026 if (s->aflag == 2) {
1027 gen_op_addq_ESI_T0();
1028 gen_op_addq_EDI_T0();
1029 } else
1030#endif
1031 if (s->aflag) {
1032 gen_op_addl_ESI_T0();
1033 gen_op_addl_EDI_T0();
1034 } else {
1035 gen_op_addw_ESI_T0();
1036 gen_op_addw_EDI_T0();
1037 }
1038}
1039
1040static inline void gen_update_cc_op(DisasContext *s)
1041{
1042 if (s->cc_op != CC_OP_DYNAMIC) {
1043 gen_op_set_cc_op(s->cc_op);
1044 s->cc_op = CC_OP_DYNAMIC;
1045 }
1046}
1047
1048/* XXX: does not work with gdbstub "ice" single step - not a
1049 serious problem */
1050static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1051{
1052 int l1, l2;
1053
1054 l1 = gen_new_label();
1055 l2 = gen_new_label();
1056 gen_op_jnz_ecx[s->aflag](l1);
1057 gen_set_label(l2);
1058 gen_jmp_tb(s, next_eip, 1);
1059 gen_set_label(l1);
1060 return l2;
1061}
1062
1063static inline void gen_stos(DisasContext *s, int ot)
1064{
1065 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1066 gen_string_movl_A0_EDI(s);
1067 gen_op_st_T0_A0[ot + s->mem_index]();
1068 gen_op_movl_T0_Dshift[ot]();
1069#ifdef TARGET_X86_64
1070 if (s->aflag == 2) {
1071 gen_op_addq_EDI_T0();
1072 } else
1073#endif
1074 if (s->aflag) {
1075 gen_op_addl_EDI_T0();
1076 } else {
1077 gen_op_addw_EDI_T0();
1078 }
1079}
1080
1081static inline void gen_lods(DisasContext *s, int ot)
1082{
1083 gen_string_movl_A0_ESI(s);
1084 gen_op_ld_T0_A0[ot + s->mem_index]();
1085 gen_op_mov_reg_T0[ot][R_EAX]();
1086 gen_op_movl_T0_Dshift[ot]();
1087#ifdef TARGET_X86_64
1088 if (s->aflag == 2) {
1089 gen_op_addq_ESI_T0();
1090 } else
1091#endif
1092 if (s->aflag) {
1093 gen_op_addl_ESI_T0();
1094 } else {
1095 gen_op_addw_ESI_T0();
1096 }
1097}
1098
1099static inline void gen_scas(DisasContext *s, int ot)
1100{
1101 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1102 gen_string_movl_A0_EDI(s);
1103 gen_op_ld_T1_A0[ot + s->mem_index]();
1104 gen_op_cmpl_T0_T1_cc();
1105 gen_op_movl_T0_Dshift[ot]();
1106#ifdef TARGET_X86_64
1107 if (s->aflag == 2) {
1108 gen_op_addq_EDI_T0();
1109 } else
1110#endif
1111 if (s->aflag) {
1112 gen_op_addl_EDI_T0();
1113 } else {
1114 gen_op_addw_EDI_T0();
1115 }
1116}
1117
1118static inline void gen_cmps(DisasContext *s, int ot)
1119{
1120 gen_string_movl_A0_ESI(s);
1121 gen_op_ld_T0_A0[ot + s->mem_index]();
1122 gen_string_movl_A0_EDI(s);
1123 gen_op_ld_T1_A0[ot + s->mem_index]();
1124 gen_op_cmpl_T0_T1_cc();
1125 gen_op_movl_T0_Dshift[ot]();
1126#ifdef TARGET_X86_64
1127 if (s->aflag == 2) {
1128 gen_op_addq_ESI_T0();
1129 gen_op_addq_EDI_T0();
1130 } else
1131#endif
1132 if (s->aflag) {
1133 gen_op_addl_ESI_T0();
1134 gen_op_addl_EDI_T0();
1135 } else {
1136 gen_op_addw_ESI_T0();
1137 gen_op_addw_EDI_T0();
1138 }
1139}
1140
1141static inline void gen_ins(DisasContext *s, int ot)
1142{
1143 gen_string_movl_A0_EDI(s);
1144 gen_op_movl_T0_0();
1145 gen_op_st_T0_A0[ot + s->mem_index]();
1146 gen_op_in_DX_T0[ot]();
1147 gen_op_st_T0_A0[ot + s->mem_index]();
1148 gen_op_movl_T0_Dshift[ot]();
1149#ifdef TARGET_X86_64
1150 if (s->aflag == 2) {
1151 gen_op_addq_EDI_T0();
1152 } else
1153#endif
1154 if (s->aflag) {
1155 gen_op_addl_EDI_T0();
1156 } else {
1157 gen_op_addw_EDI_T0();
1158 }
1159}
1160
1161static inline void gen_outs(DisasContext *s, int ot)
1162{
1163 gen_string_movl_A0_ESI(s);
1164 gen_op_ld_T0_A0[ot + s->mem_index]();
1165 gen_op_out_DX_T0[ot]();
1166 gen_op_movl_T0_Dshift[ot]();
1167#ifdef TARGET_X86_64
1168 if (s->aflag == 2) {
1169 gen_op_addq_ESI_T0();
1170 } else
1171#endif
1172 if (s->aflag) {
1173 gen_op_addl_ESI_T0();
1174 } else {
1175 gen_op_addw_ESI_T0();
1176 }
1177}
1178
1179/* same method as Valgrind : we generate jumps to current or next
1180 instruction */
1181#define GEN_REPZ(op) \
1182static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1183 target_ulong cur_eip, target_ulong next_eip) \
1184{ \
1185 int l2;\
1186 gen_update_cc_op(s); \
1187 l2 = gen_jz_ecx_string(s, next_eip); \
1188 gen_ ## op(s, ot); \
1189 gen_op_dec_ECX[s->aflag](); \
1190 /* a loop would cause two single step exceptions if ECX = 1 \
1191 before rep string_insn */ \
1192 if (!s->jmp_opt) \
1193 gen_op_jz_ecx[s->aflag](l2); \
1194 gen_jmp(s, cur_eip); \
1195}
1196
1197#define GEN_REPZ2(op) \
1198static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1199 target_ulong cur_eip, \
1200 target_ulong next_eip, \
1201 int nz) \
1202{ \
1203 int l2;\
1204 gen_update_cc_op(s); \
1205 l2 = gen_jz_ecx_string(s, next_eip); \
1206 gen_ ## op(s, ot); \
1207 gen_op_dec_ECX[s->aflag](); \
1208 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1209 gen_op_string_jnz_sub[nz][ot](l2);\
1210 if (!s->jmp_opt) \
1211 gen_op_jz_ecx[s->aflag](l2); \
1212 gen_jmp(s, cur_eip); \
1213}
1214
1215GEN_REPZ(movs)
1216GEN_REPZ(stos)
1217GEN_REPZ(lods)
1218GEN_REPZ(ins)
1219GEN_REPZ(outs)
1220GEN_REPZ2(scas)
1221GEN_REPZ2(cmps)
1222
1223enum {
1224 JCC_O,
1225 JCC_B,
1226 JCC_Z,
1227 JCC_BE,
1228 JCC_S,
1229 JCC_P,
1230 JCC_L,
1231 JCC_LE,
1232};
1233
1234static GenOpFunc1 *gen_jcc_sub[4][8] = {
1235 [OT_BYTE] = {
1236 NULL,
1237 gen_op_jb_subb,
1238 gen_op_jz_subb,
1239 gen_op_jbe_subb,
1240 gen_op_js_subb,
1241 NULL,
1242 gen_op_jl_subb,
1243 gen_op_jle_subb,
1244 },
1245 [OT_WORD] = {
1246 NULL,
1247 gen_op_jb_subw,
1248 gen_op_jz_subw,
1249 gen_op_jbe_subw,
1250 gen_op_js_subw,
1251 NULL,
1252 gen_op_jl_subw,
1253 gen_op_jle_subw,
1254 },
1255 [OT_LONG] = {
1256 NULL,
1257 gen_op_jb_subl,
1258 gen_op_jz_subl,
1259 gen_op_jbe_subl,
1260 gen_op_js_subl,
1261 NULL,
1262 gen_op_jl_subl,
1263 gen_op_jle_subl,
1264 },
1265#ifdef TARGET_X86_64
1266 [OT_QUAD] = {
1267 NULL,
1268 BUGGY_64(gen_op_jb_subq),
1269 gen_op_jz_subq,
1270 BUGGY_64(gen_op_jbe_subq),
1271 gen_op_js_subq,
1272 NULL,
1273 BUGGY_64(gen_op_jl_subq),
1274 BUGGY_64(gen_op_jle_subq),
1275 },
1276#endif
1277};
1278static GenOpFunc1 *gen_op_loop[3][4] = {
1279 [0] = {
1280 gen_op_loopnzw,
1281 gen_op_loopzw,
1282 gen_op_jnz_ecxw,
1283 },
1284 [1] = {
1285 gen_op_loopnzl,
1286 gen_op_loopzl,
1287 gen_op_jnz_ecxl,
1288 },
1289#ifdef TARGET_X86_64
1290 [2] = {
1291 gen_op_loopnzq,
1292 gen_op_loopzq,
1293 gen_op_jnz_ecxq,
1294 },
1295#endif
1296};
1297
1298static GenOpFunc *gen_setcc_slow[8] = {
1299 gen_op_seto_T0_cc,
1300 gen_op_setb_T0_cc,
1301 gen_op_setz_T0_cc,
1302 gen_op_setbe_T0_cc,
1303 gen_op_sets_T0_cc,
1304 gen_op_setp_T0_cc,
1305 gen_op_setl_T0_cc,
1306 gen_op_setle_T0_cc,
1307};
1308
1309static GenOpFunc *gen_setcc_sub[4][8] = {
1310 [OT_BYTE] = {
1311 NULL,
1312 gen_op_setb_T0_subb,
1313 gen_op_setz_T0_subb,
1314 gen_op_setbe_T0_subb,
1315 gen_op_sets_T0_subb,
1316 NULL,
1317 gen_op_setl_T0_subb,
1318 gen_op_setle_T0_subb,
1319 },
1320 [OT_WORD] = {
1321 NULL,
1322 gen_op_setb_T0_subw,
1323 gen_op_setz_T0_subw,
1324 gen_op_setbe_T0_subw,
1325 gen_op_sets_T0_subw,
1326 NULL,
1327 gen_op_setl_T0_subw,
1328 gen_op_setle_T0_subw,
1329 },
1330 [OT_LONG] = {
1331 NULL,
1332 gen_op_setb_T0_subl,
1333 gen_op_setz_T0_subl,
1334 gen_op_setbe_T0_subl,
1335 gen_op_sets_T0_subl,
1336 NULL,
1337 gen_op_setl_T0_subl,
1338 gen_op_setle_T0_subl,
1339 },
1340#ifdef TARGET_X86_64
1341 [OT_QUAD] = {
1342 NULL,
1343 gen_op_setb_T0_subq,
1344 gen_op_setz_T0_subq,
1345 gen_op_setbe_T0_subq,
1346 gen_op_sets_T0_subq,
1347 NULL,
1348 gen_op_setl_T0_subq,
1349 gen_op_setle_T0_subq,
1350 },
1351#endif
1352};
1353
1354static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1355 gen_op_fadd_ST0_FT0,
1356 gen_op_fmul_ST0_FT0,
1357 gen_op_fcom_ST0_FT0,
1358 gen_op_fcom_ST0_FT0,
1359 gen_op_fsub_ST0_FT0,
1360 gen_op_fsubr_ST0_FT0,
1361 gen_op_fdiv_ST0_FT0,
1362 gen_op_fdivr_ST0_FT0,
1363};
1364
1365/* NOTE the exception in "r" op ordering */
1366static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1367 gen_op_fadd_STN_ST0,
1368 gen_op_fmul_STN_ST0,
1369 NULL,
1370 NULL,
1371 gen_op_fsubr_STN_ST0,
1372 gen_op_fsub_STN_ST0,
1373 gen_op_fdivr_STN_ST0,
1374 gen_op_fdiv_STN_ST0,
1375};
1376
1377/* if d == OR_TMP0, it means memory operand (address in A0) */
1378static void gen_op(DisasContext *s1, int op, int ot, int d)
1379{
1380 GenOpFunc *gen_update_cc;
1381
1382 if (d != OR_TMP0) {
1383 gen_op_mov_TN_reg[ot][0][d]();
1384 } else {
1385 gen_op_ld_T0_A0[ot + s1->mem_index]();
1386 }
1387 switch(op) {
1388 case OP_ADCL:
1389 case OP_SBBL:
1390 if (s1->cc_op != CC_OP_DYNAMIC)
1391 gen_op_set_cc_op(s1->cc_op);
1392 if (d != OR_TMP0) {
1393 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1394 gen_op_mov_reg_T0[ot][d]();
1395 } else {
1396 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1397 }
1398 s1->cc_op = CC_OP_DYNAMIC;
1399 goto the_end;
1400 case OP_ADDL:
1401 gen_op_addl_T0_T1();
1402 s1->cc_op = CC_OP_ADDB + ot;
1403 gen_update_cc = gen_op_update2_cc;
1404 break;
1405 case OP_SUBL:
1406 gen_op_subl_T0_T1();
1407 s1->cc_op = CC_OP_SUBB + ot;
1408 gen_update_cc = gen_op_update2_cc;
1409 break;
1410 default:
1411 case OP_ANDL:
1412 case OP_ORL:
1413 case OP_XORL:
1414 gen_op_arith_T0_T1_cc[op]();
1415 s1->cc_op = CC_OP_LOGICB + ot;
1416 gen_update_cc = gen_op_update1_cc;
1417 break;
1418 case OP_CMPL:
1419 gen_op_cmpl_T0_T1_cc();
1420 s1->cc_op = CC_OP_SUBB + ot;
1421 gen_update_cc = NULL;
1422 break;
1423 }
1424 if (op != OP_CMPL) {
1425 if (d != OR_TMP0)
1426 gen_op_mov_reg_T0[ot][d]();
1427 else
1428 gen_op_st_T0_A0[ot + s1->mem_index]();
1429 }
1430 /* the flags update must happen after the memory write (precise
1431 exception support) */
1432 if (gen_update_cc)
1433 gen_update_cc();
1434 the_end: ;
1435}
1436
1437/* if d == OR_TMP0, it means memory operand (address in A0) */
1438static void gen_inc(DisasContext *s1, int ot, int d, int c)
1439{
1440 if (d != OR_TMP0)
1441 gen_op_mov_TN_reg[ot][0][d]();
1442 else
1443 gen_op_ld_T0_A0[ot + s1->mem_index]();
1444 if (s1->cc_op != CC_OP_DYNAMIC)
1445 gen_op_set_cc_op(s1->cc_op);
1446 if (c > 0) {
1447 gen_op_incl_T0();
1448 s1->cc_op = CC_OP_INCB + ot;
1449 } else {
1450 gen_op_decl_T0();
1451 s1->cc_op = CC_OP_DECB + ot;
1452 }
1453 if (d != OR_TMP0)
1454 gen_op_mov_reg_T0[ot][d]();
1455 else
1456 gen_op_st_T0_A0[ot + s1->mem_index]();
1457 gen_op_update_inc_cc();
1458}
1459
1460static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1461{
1462 if (d != OR_TMP0)
1463 gen_op_mov_TN_reg[ot][0][d]();
1464 else
1465 gen_op_ld_T0_A0[ot + s1->mem_index]();
1466 if (s != OR_TMP1)
1467 gen_op_mov_TN_reg[ot][1][s]();
1468 /* for zero counts, flags are not updated, so must do it dynamically */
1469 if (s1->cc_op != CC_OP_DYNAMIC)
1470 gen_op_set_cc_op(s1->cc_op);
1471
1472 if (d != OR_TMP0)
1473 gen_op_shift_T0_T1_cc[ot][op]();
1474 else
1475 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1476 if (d != OR_TMP0)
1477 gen_op_mov_reg_T0[ot][d]();
1478 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1479}
1480
1481static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1482{
1483 /* currently not optimized */
1484 gen_op_movl_T1_im(c);
1485 gen_shift(s1, op, ot, d, OR_TMP1);
1486}
1487
1488static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1489{
1490 target_long disp;
1491 int havesib;
1492 int base;
1493 int index;
1494 int scale;
1495 int opreg;
1496 int mod, rm, code, override, must_add_seg;
1497
1498 override = s->override;
1499 must_add_seg = s->addseg;
1500 if (override >= 0)
1501 must_add_seg = 1;
1502 mod = (modrm >> 6) & 3;
1503 rm = modrm & 7;
1504
1505 if (s->aflag) {
1506
1507 havesib = 0;
1508 base = rm;
1509 index = 0;
1510 scale = 0;
1511
1512 if (base == 4) {
1513 havesib = 1;
1514 code = ldub_code(s->pc++);
1515 scale = (code >> 6) & 3;
1516 index = ((code >> 3) & 7) | REX_X(s);
1517 base = (code & 7);
1518 }
1519 base |= REX_B(s);
1520
1521 switch (mod) {
1522 case 0:
1523 if ((base & 7) == 5) {
1524 base = -1;
1525 disp = (int32_t)ldl_code(s->pc);
1526 s->pc += 4;
1527 if (CODE64(s) && !havesib) {
1528 disp += s->pc + s->rip_offset;
1529 }
1530 } else {
1531 disp = 0;
1532 }
1533 break;
1534 case 1:
1535 disp = (int8_t)ldub_code(s->pc++);
1536 break;
1537 default:
1538 case 2:
1539 disp = ldl_code(s->pc);
1540 s->pc += 4;
1541 break;
1542 }
1543
1544 if (base >= 0) {
1545 /* for correct popl handling with esp */
1546 if (base == 4 && s->popl_esp_hack)
1547 disp += s->popl_esp_hack;
1548#ifdef TARGET_X86_64
1549 if (s->aflag == 2) {
1550 gen_op_movq_A0_reg[base]();
1551 if (disp != 0) {
1552 if ((int32_t)disp == disp)
1553 gen_op_addq_A0_im(disp);
1554 else
1555 gen_op_addq_A0_im64(disp >> 32, disp);
1556 }
1557 } else
1558#endif
1559 {
1560 gen_op_movl_A0_reg[base]();
1561 if (disp != 0)
1562 gen_op_addl_A0_im(disp);
1563 }
1564 } else {
1565#ifdef TARGET_X86_64
1566 if (s->aflag == 2) {
1567 if ((int32_t)disp == disp)
1568 gen_op_movq_A0_im(disp);
1569 else
1570 gen_op_movq_A0_im64(disp >> 32, disp);
1571 } else
1572#endif
1573 {
1574 gen_op_movl_A0_im(disp);
1575 }
1576 }
1577 /* XXX: index == 4 is always invalid */
1578 if (havesib && (index != 4 || scale != 0)) {
1579#ifdef TARGET_X86_64
1580 if (s->aflag == 2) {
1581 gen_op_addq_A0_reg_sN[scale][index]();
1582 } else
1583#endif
1584 {
1585 gen_op_addl_A0_reg_sN[scale][index]();
1586 }
1587 }
1588 if (must_add_seg) {
1589 if (override < 0) {
1590 if (base == R_EBP || base == R_ESP)
1591 override = R_SS;
1592 else
1593 override = R_DS;
1594 }
1595#ifdef TARGET_X86_64
1596 if (s->aflag == 2) {
1597 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1598 } else
1599#endif
1600 {
1601 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1602 }
1603 }
1604 } else {
1605 switch (mod) {
1606 case 0:
1607 if (rm == 6) {
1608 disp = lduw_code(s->pc);
1609 s->pc += 2;
1610 gen_op_movl_A0_im(disp);
1611 rm = 0; /* avoid SS override */
1612 goto no_rm;
1613 } else {
1614 disp = 0;
1615 }
1616 break;
1617 case 1:
1618 disp = (int8_t)ldub_code(s->pc++);
1619 break;
1620 default:
1621 case 2:
1622 disp = lduw_code(s->pc);
1623 s->pc += 2;
1624 break;
1625 }
1626 switch(rm) {
1627 case 0:
1628 gen_op_movl_A0_reg[R_EBX]();
1629 gen_op_addl_A0_reg_sN[0][R_ESI]();
1630 break;
1631 case 1:
1632 gen_op_movl_A0_reg[R_EBX]();
1633 gen_op_addl_A0_reg_sN[0][R_EDI]();
1634 break;
1635 case 2:
1636 gen_op_movl_A0_reg[R_EBP]();
1637 gen_op_addl_A0_reg_sN[0][R_ESI]();
1638 break;
1639 case 3:
1640 gen_op_movl_A0_reg[R_EBP]();
1641 gen_op_addl_A0_reg_sN[0][R_EDI]();
1642 break;
1643 case 4:
1644 gen_op_movl_A0_reg[R_ESI]();
1645 break;
1646 case 5:
1647 gen_op_movl_A0_reg[R_EDI]();
1648 break;
1649 case 6:
1650 gen_op_movl_A0_reg[R_EBP]();
1651 break;
1652 default:
1653 case 7:
1654 gen_op_movl_A0_reg[R_EBX]();
1655 break;
1656 }
1657 if (disp != 0)
1658 gen_op_addl_A0_im(disp);
1659 gen_op_andl_A0_ffff();
1660 no_rm:
1661 if (must_add_seg) {
1662 if (override < 0) {
1663 if (rm == 2 || rm == 3 || rm == 6)
1664 override = R_SS;
1665 else
1666 override = R_DS;
1667 }
1668 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1669 }
1670 }
1671
1672 opreg = OR_A0;
1673 disp = 0;
1674 *reg_ptr = opreg;
1675 *offset_ptr = disp;
1676}
1677
1678static void gen_nop_modrm(DisasContext *s, int modrm)
1679{
1680 int mod, rm, base, code;
1681
1682 mod = (modrm >> 6) & 3;
1683 if (mod == 3)
1684 return;
1685 rm = modrm & 7;
1686
1687 if (s->aflag) {
1688
1689 base = rm;
1690
1691 if (base == 4) {
1692 code = ldub_code(s->pc++);
1693 base = (code & 7);
1694 }
1695
1696 switch (mod) {
1697 case 0:
1698 if (base == 5) {
1699 s->pc += 4;
1700 }
1701 break;
1702 case 1:
1703 s->pc++;
1704 break;
1705 default:
1706 case 2:
1707 s->pc += 4;
1708 break;
1709 }
1710 } else {
1711 switch (mod) {
1712 case 0:
1713 if (rm == 6) {
1714 s->pc += 2;
1715 }
1716 break;
1717 case 1:
1718 s->pc++;
1719 break;
1720 default:
1721 case 2:
1722 s->pc += 2;
1723 break;
1724 }
1725 }
1726}
1727
1728/* used for LEA and MOV AX, mem */
1729static void gen_add_A0_ds_seg(DisasContext *s)
1730{
1731 int override, must_add_seg;
1732 must_add_seg = s->addseg;
1733 override = R_DS;
1734 if (s->override >= 0) {
1735 override = s->override;
1736 must_add_seg = 1;
1737 } else {
1738 override = R_DS;
1739 }
1740 if (must_add_seg) {
1741#ifdef TARGET_X86_64
1742 if (CODE64(s)) {
1743 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1744 } else
1745#endif
1746 {
1747 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1748 }
1749 }
1750}
1751
1752/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1753 OR_TMP0 */
1754static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1755{
1756 int mod, rm, opreg, disp;
1757
1758 mod = (modrm >> 6) & 3;
1759 rm = (modrm & 7) | REX_B(s);
1760 if (mod == 3) {
1761 if (is_store) {
1762 if (reg != OR_TMP0)
1763 gen_op_mov_TN_reg[ot][0][reg]();
1764 gen_op_mov_reg_T0[ot][rm]();
1765 } else {
1766 gen_op_mov_TN_reg[ot][0][rm]();
1767 if (reg != OR_TMP0)
1768 gen_op_mov_reg_T0[ot][reg]();
1769 }
1770 } else {
1771 gen_lea_modrm(s, modrm, &opreg, &disp);
1772 if (is_store) {
1773 if (reg != OR_TMP0)
1774 gen_op_mov_TN_reg[ot][0][reg]();
1775 gen_op_st_T0_A0[ot + s->mem_index]();
1776 } else {
1777 gen_op_ld_T0_A0[ot + s->mem_index]();
1778 if (reg != OR_TMP0)
1779 gen_op_mov_reg_T0[ot][reg]();
1780 }
1781 }
1782}
1783
1784static inline uint32_t insn_get(DisasContext *s, int ot)
1785{
1786 uint32_t ret;
1787
1788 switch(ot) {
1789 case OT_BYTE:
1790 ret = ldub_code(s->pc);
1791 s->pc++;
1792 break;
1793 case OT_WORD:
1794 ret = lduw_code(s->pc);
1795 s->pc += 2;
1796 break;
1797 default:
1798 case OT_LONG:
1799 ret = ldl_code(s->pc);
1800 s->pc += 4;
1801 break;
1802 }
1803 return ret;
1804}
1805
1806static inline int insn_const_size(unsigned int ot)
1807{
1808 if (ot <= OT_LONG)
1809 return 1 << ot;
1810 else
1811 return 4;
1812}
1813
1814static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1815{
1816 TranslationBlock *tb;
1817 target_ulong pc;
1818
1819 pc = s->cs_base + eip;
1820 tb = s->tb;
1821 /* NOTE: we handle the case where the TB spans two pages here */
1822 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1823 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1824 /* jump to same page: we can use a direct jump */
1825 if (tb_num == 0)
1826 gen_op_goto_tb0(TBPARAM(tb));
1827 else
1828 gen_op_goto_tb1(TBPARAM(tb));
1829 gen_jmp_im(eip);
1830 gen_op_movl_T0_im((long)tb + tb_num);
1831 gen_op_exit_tb();
1832 } else {
1833 /* jump to another page: currently not optimized */
1834 gen_jmp_im(eip);
1835 gen_eob(s);
1836 }
1837}
1838
1839static inline void gen_jcc(DisasContext *s, int b,
1840 target_ulong val, target_ulong next_eip)
1841{
1842 TranslationBlock *tb;
1843 int inv, jcc_op;
1844 GenOpFunc1 *func;
1845 target_ulong tmp;
1846 int l1, l2;
1847
1848 inv = b & 1;
1849 jcc_op = (b >> 1) & 7;
1850
1851 if (s->jmp_opt) {
1852#ifdef VBOX
1853 gen_check_external_event(s);
1854#endif /* VBOX */
1855 switch(s->cc_op) {
1856 /* we optimize the cmp/jcc case */
1857 case CC_OP_SUBB:
1858 case CC_OP_SUBW:
1859 case CC_OP_SUBL:
1860 case CC_OP_SUBQ:
1861 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1862 break;
1863
1864 /* some jumps are easy to compute */
1865 case CC_OP_ADDB:
1866 case CC_OP_ADDW:
1867 case CC_OP_ADDL:
1868 case CC_OP_ADDQ:
1869
1870 case CC_OP_ADCB:
1871 case CC_OP_ADCW:
1872 case CC_OP_ADCL:
1873 case CC_OP_ADCQ:
1874
1875 case CC_OP_SBBB:
1876 case CC_OP_SBBW:
1877 case CC_OP_SBBL:
1878 case CC_OP_SBBQ:
1879
1880 case CC_OP_LOGICB:
1881 case CC_OP_LOGICW:
1882 case CC_OP_LOGICL:
1883 case CC_OP_LOGICQ:
1884
1885 case CC_OP_INCB:
1886 case CC_OP_INCW:
1887 case CC_OP_INCL:
1888 case CC_OP_INCQ:
1889
1890 case CC_OP_DECB:
1891 case CC_OP_DECW:
1892 case CC_OP_DECL:
1893 case CC_OP_DECQ:
1894
1895 case CC_OP_SHLB:
1896 case CC_OP_SHLW:
1897 case CC_OP_SHLL:
1898 case CC_OP_SHLQ:
1899
1900 case CC_OP_SARB:
1901 case CC_OP_SARW:
1902 case CC_OP_SARL:
1903 case CC_OP_SARQ:
1904 switch(jcc_op) {
1905 case JCC_Z:
1906 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1907 break;
1908 case JCC_S:
1909 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1910 break;
1911 default:
1912 func = NULL;
1913 break;
1914 }
1915 break;
1916 default:
1917 func = NULL;
1918 break;
1919 }
1920
1921 if (s->cc_op != CC_OP_DYNAMIC) {
1922 gen_op_set_cc_op(s->cc_op);
1923 s->cc_op = CC_OP_DYNAMIC;
1924 }
1925
1926 if (!func) {
1927 gen_setcc_slow[jcc_op]();
1928 func = gen_op_jnz_T0_label;
1929 }
1930
1931 if (inv) {
1932 tmp = val;
1933 val = next_eip;
1934 next_eip = tmp;
1935 }
1936 tb = s->tb;
1937
1938 l1 = gen_new_label();
1939 func(l1);
1940
1941 gen_goto_tb(s, 0, next_eip);
1942
1943 gen_set_label(l1);
1944 gen_goto_tb(s, 1, val);
1945
1946 s->is_jmp = 3;
1947 } else {
1948
1949 if (s->cc_op != CC_OP_DYNAMIC) {
1950 gen_op_set_cc_op(s->cc_op);
1951 s->cc_op = CC_OP_DYNAMIC;
1952 }
1953 gen_setcc_slow[jcc_op]();
1954 if (inv) {
1955 tmp = val;
1956 val = next_eip;
1957 next_eip = tmp;
1958 }
1959 l1 = gen_new_label();
1960 l2 = gen_new_label();
1961 gen_op_jnz_T0_label(l1);
1962 gen_jmp_im(next_eip);
1963 gen_op_jmp_label(l2);
1964 gen_set_label(l1);
1965 gen_jmp_im(val);
1966 gen_set_label(l2);
1967 gen_eob(s);
1968 }
1969}
1970
1971static void gen_setcc(DisasContext *s, int b)
1972{
1973 int inv, jcc_op;
1974 GenOpFunc *func;
1975
1976 inv = b & 1;
1977 jcc_op = (b >> 1) & 7;
1978 switch(s->cc_op) {
1979 /* we optimize the cmp/jcc case */
1980 case CC_OP_SUBB:
1981 case CC_OP_SUBW:
1982 case CC_OP_SUBL:
1983 case CC_OP_SUBQ:
1984 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1985 if (!func)
1986 goto slow_jcc;
1987 break;
1988
1989 /* some jumps are easy to compute */
1990 case CC_OP_ADDB:
1991 case CC_OP_ADDW:
1992 case CC_OP_ADDL:
1993 case CC_OP_ADDQ:
1994
1995 case CC_OP_LOGICB:
1996 case CC_OP_LOGICW:
1997 case CC_OP_LOGICL:
1998 case CC_OP_LOGICQ:
1999
2000 case CC_OP_INCB:
2001 case CC_OP_INCW:
2002 case CC_OP_INCL:
2003 case CC_OP_INCQ:
2004
2005 case CC_OP_DECB:
2006 case CC_OP_DECW:
2007 case CC_OP_DECL:
2008 case CC_OP_DECQ:
2009
2010 case CC_OP_SHLB:
2011 case CC_OP_SHLW:
2012 case CC_OP_SHLL:
2013 case CC_OP_SHLQ:
2014 switch(jcc_op) {
2015 case JCC_Z:
2016 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2017 break;
2018 case JCC_S:
2019 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2020 break;
2021 default:
2022 goto slow_jcc;
2023 }
2024 break;
2025 default:
2026 slow_jcc:
2027 if (s->cc_op != CC_OP_DYNAMIC)
2028 gen_op_set_cc_op(s->cc_op);
2029 func = gen_setcc_slow[jcc_op];
2030 break;
2031 }
2032 func();
2033 if (inv) {
2034 gen_op_xor_T0_1();
2035 }
2036}
2037
2038/* move T0 to seg_reg and compute if the CPU state may change. Never
2039 call this function with seg_reg == R_CS */
2040static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2041{
2042 if (s->pe && !s->vm86) {
2043 /* XXX: optimize by finding processor state dynamically */
2044 if (s->cc_op != CC_OP_DYNAMIC)
2045 gen_op_set_cc_op(s->cc_op);
2046 gen_jmp_im(cur_eip);
2047 gen_op_movl_seg_T0(seg_reg);
2048 /* abort translation because the addseg value may change or
2049 because ss32 may change. For R_SS, translation must always
2050 stop as a special handling must be done to disable hardware
2051 interrupts for the next instruction */
2052 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2053 s->is_jmp = 3;
2054 } else {
2055 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
2056 if (seg_reg == R_SS)
2057 s->is_jmp = 3;
2058 }
2059}
2060
2061static inline void gen_stack_update(DisasContext *s, int addend)
2062{
2063#ifdef TARGET_X86_64
2064 if (CODE64(s)) {
2065 if (addend == 8)
2066 gen_op_addq_ESP_8();
2067 else
2068 gen_op_addq_ESP_im(addend);
2069 } else
2070#endif
2071 if (s->ss32) {
2072 if (addend == 2)
2073 gen_op_addl_ESP_2();
2074 else if (addend == 4)
2075 gen_op_addl_ESP_4();
2076 else
2077 gen_op_addl_ESP_im(addend);
2078 } else {
2079 if (addend == 2)
2080 gen_op_addw_ESP_2();
2081 else if (addend == 4)
2082 gen_op_addw_ESP_4();
2083 else
2084 gen_op_addw_ESP_im(addend);
2085 }
2086}
2087
2088/* generate a push. It depends on ss32, addseg and dflag */
2089static void gen_push_T0(DisasContext *s)
2090{
2091#ifdef TARGET_X86_64
2092 if (CODE64(s)) {
2093 gen_op_movq_A0_reg[R_ESP]();
2094 if (s->dflag) {
2095 gen_op_subq_A0_8();
2096 gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
2097 } else {
2098 gen_op_subq_A0_2();
2099 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2100 }
2101 gen_op_movq_ESP_A0();
2102 } else
2103#endif
2104 {
2105 gen_op_movl_A0_reg[R_ESP]();
2106 if (!s->dflag)
2107 gen_op_subl_A0_2();
2108 else
2109 gen_op_subl_A0_4();
2110 if (s->ss32) {
2111 if (s->addseg) {
2112 gen_op_movl_T1_A0();
2113 gen_op_addl_A0_SS();
2114 }
2115 } else {
2116 gen_op_andl_A0_ffff();
2117 gen_op_movl_T1_A0();
2118 gen_op_addl_A0_SS();
2119 }
2120 gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
2121 if (s->ss32 && !s->addseg)
2122 gen_op_movl_ESP_A0();
2123 else
2124 gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
2125 }
2126}
2127
2128/* generate a push. It depends on ss32, addseg and dflag */
2129/* slower version for T1, only used for call Ev */
2130static void gen_push_T1(DisasContext *s)
2131{
2132#ifdef TARGET_X86_64
2133 if (CODE64(s)) {
2134 gen_op_movq_A0_reg[R_ESP]();
2135 if (s->dflag) {
2136 gen_op_subq_A0_8();
2137 gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
2138 } else {
2139 gen_op_subq_A0_2();
2140 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2141 }
2142 gen_op_movq_ESP_A0();
2143 } else
2144#endif
2145 {
2146 gen_op_movl_A0_reg[R_ESP]();
2147 if (!s->dflag)
2148 gen_op_subl_A0_2();
2149 else
2150 gen_op_subl_A0_4();
2151 if (s->ss32) {
2152 if (s->addseg) {
2153 gen_op_addl_A0_SS();
2154 }
2155 } else {
2156 gen_op_andl_A0_ffff();
2157 gen_op_addl_A0_SS();
2158 }
2159 gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2160
2161 if (s->ss32 && !s->addseg)
2162 gen_op_movl_ESP_A0();
2163 else
2164 gen_stack_update(s, (-2) << s->dflag);
2165 }
2166}
2167
2168/* two step pop is necessary for precise exceptions */
2169static void gen_pop_T0(DisasContext *s)
2170{
2171#ifdef TARGET_X86_64
2172 if (CODE64(s)) {
2173 gen_op_movq_A0_reg[R_ESP]();
2174 gen_op_ld_T0_A0[(s->dflag ? OT_QUAD : OT_WORD) + s->mem_index]();
2175 } else
2176#endif
2177 {
2178 gen_op_movl_A0_reg[R_ESP]();
2179 if (s->ss32) {
2180 if (s->addseg)
2181 gen_op_addl_A0_SS();
2182 } else {
2183 gen_op_andl_A0_ffff();
2184 gen_op_addl_A0_SS();
2185 }
2186 gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2187 }
2188}
2189
2190static void gen_pop_update(DisasContext *s)
2191{
2192#ifdef TARGET_X86_64
2193 if (CODE64(s) && s->dflag) {
2194 gen_stack_update(s, 8);
2195 } else
2196#endif
2197 {
2198 gen_stack_update(s, 2 << s->dflag);
2199 }
2200}
2201
2202static void gen_stack_A0(DisasContext *s)
2203{
2204 gen_op_movl_A0_ESP();
2205 if (!s->ss32)
2206 gen_op_andl_A0_ffff();
2207 gen_op_movl_T1_A0();
2208 if (s->addseg)
2209 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2210}
2211
2212/* NOTE: wrap around in 16 bit not fully handled */
2213static void gen_pusha(DisasContext *s)
2214{
2215 int i;
2216 gen_op_movl_A0_ESP();
2217 gen_op_addl_A0_im(-16 << s->dflag);
2218 if (!s->ss32)
2219 gen_op_andl_A0_ffff();
2220 gen_op_movl_T1_A0();
2221 if (s->addseg)
2222 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2223 for(i = 0;i < 8; i++) {
2224 gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2225 gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2226 gen_op_addl_A0_im(2 << s->dflag);
2227 }
2228 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2229}
2230
2231/* NOTE: wrap around in 16 bit not fully handled */
2232static void gen_popa(DisasContext *s)
2233{
2234 int i;
2235 gen_op_movl_A0_ESP();
2236 if (!s->ss32)
2237 gen_op_andl_A0_ffff();
2238 gen_op_movl_T1_A0();
2239 gen_op_addl_T1_im(16 << s->dflag);
2240 if (s->addseg)
2241 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2242 for(i = 0;i < 8; i++) {
2243 /* ESP is not reloaded */
2244 if (i != 3) {
2245 gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2246 gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2247 }
2248 gen_op_addl_A0_im(2 << s->dflag);
2249 }
2250 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2251}
2252
2253static void gen_enter(DisasContext *s, int esp_addend, int level)
2254{
2255 int ot, opsize;
2256
2257 level &= 0x1f;
2258#ifdef TARGET_X86_64
2259 if (CODE64(s)) {
2260 ot = s->dflag ? OT_QUAD : OT_WORD;
2261 opsize = 1 << ot;
2262
2263 gen_op_movl_A0_ESP();
2264 gen_op_addq_A0_im(-opsize);
2265 gen_op_movl_T1_A0();
2266
2267 /* push bp */
2268 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2269 gen_op_st_T0_A0[ot + s->mem_index]();
2270 if (level) {
2271 gen_op_enter64_level(level, (ot == OT_QUAD));
2272 }
2273 gen_op_mov_reg_T1[ot][R_EBP]();
2274 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2275 gen_op_mov_reg_T1[OT_QUAD][R_ESP]();
2276 } else
2277#endif
2278 {
2279 ot = s->dflag + OT_WORD;
2280 opsize = 2 << s->dflag;
2281
2282 gen_op_movl_A0_ESP();
2283 gen_op_addl_A0_im(-opsize);
2284 if (!s->ss32)
2285 gen_op_andl_A0_ffff();
2286 gen_op_movl_T1_A0();
2287 if (s->addseg)
2288 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2289 /* push bp */
2290 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2291 gen_op_st_T0_A0[ot + s->mem_index]();
2292 if (level) {
2293 gen_op_enter_level(level, s->dflag);
2294 }
2295 gen_op_mov_reg_T1[ot][R_EBP]();
2296 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2297 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2298 }
2299}
2300
2301static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2302{
2303 if (s->cc_op != CC_OP_DYNAMIC)
2304 gen_op_set_cc_op(s->cc_op);
2305 gen_jmp_im(cur_eip);
2306 gen_op_raise_exception(trapno);
2307 s->is_jmp = 3;
2308}
2309
2310/* an interrupt is different from an exception because of the
2311 priviledge checks */
2312static void gen_interrupt(DisasContext *s, int intno,
2313 target_ulong cur_eip, target_ulong next_eip)
2314{
2315 if (s->cc_op != CC_OP_DYNAMIC)
2316 gen_op_set_cc_op(s->cc_op);
2317 gen_jmp_im(cur_eip);
2318 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2319 s->is_jmp = 3;
2320}
2321
2322static void gen_debug(DisasContext *s, target_ulong cur_eip)
2323{
2324 if (s->cc_op != CC_OP_DYNAMIC)
2325 gen_op_set_cc_op(s->cc_op);
2326 gen_jmp_im(cur_eip);
2327 gen_op_debug();
2328 s->is_jmp = 3;
2329}
2330
2331/* generate a generic end of block. Trace exception is also generated
2332 if needed */
2333static void gen_eob(DisasContext *s)
2334{
2335 if (s->cc_op != CC_OP_DYNAMIC)
2336 gen_op_set_cc_op(s->cc_op);
2337 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2338 gen_op_reset_inhibit_irq();
2339 }
2340 if (s->singlestep_enabled) {
2341 gen_op_debug();
2342 } else if (s->tf) {
2343 gen_op_raise_exception(EXCP01_SSTP);
2344 } else {
2345 gen_op_movl_T0_0();
2346 gen_op_exit_tb();
2347 }
2348 s->is_jmp = 3;
2349}
2350
2351/* generate a jump to eip. No segment change must happen before as a
2352 direct call to the next block may occur */
2353static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2354{
2355 if (s->jmp_opt) {
2356#ifdef VBOX
2357 gen_check_external_event(s);
2358#endif /* VBOX */
2359 if (s->cc_op != CC_OP_DYNAMIC) {
2360 gen_op_set_cc_op(s->cc_op);
2361 s->cc_op = CC_OP_DYNAMIC;
2362 }
2363 gen_goto_tb(s, tb_num, eip);
2364 s->is_jmp = 3;
2365 } else {
2366 gen_jmp_im(eip);
2367 gen_eob(s);
2368 }
2369}
2370
2371static void gen_jmp(DisasContext *s, target_ulong eip)
2372{
2373 gen_jmp_tb(s, eip, 0);
2374}
2375
2376static void gen_movtl_T0_im(target_ulong val)
2377{
2378#ifdef TARGET_X86_64
2379 if ((int32_t)val == val) {
2380 gen_op_movl_T0_im(val);
2381 } else {
2382 gen_op_movq_T0_im64(val >> 32, val);
2383 }
2384#else
2385 gen_op_movl_T0_im(val);
2386#endif
2387}
2388
2389static void gen_movtl_T1_im(target_ulong val)
2390{
2391#ifdef TARGET_X86_64
2392 if ((int32_t)val == val) {
2393 gen_op_movl_T1_im(val);
2394 } else {
2395 gen_op_movq_T1_im64(val >> 32, val);
2396 }
2397#else
2398 gen_op_movl_T1_im(val);
2399#endif
2400}
2401
2402static void gen_add_A0_im(DisasContext *s, int val)
2403{
2404#ifdef TARGET_X86_64
2405 if (CODE64(s))
2406 gen_op_addq_A0_im(val);
2407 else
2408#endif
2409 gen_op_addl_A0_im(val);
2410}
2411
2412static GenOpFunc1 *gen_ldq_env_A0[3] = {
2413 gen_op_ldq_raw_env_A0,
2414#ifndef CONFIG_USER_ONLY
2415 gen_op_ldq_kernel_env_A0,
2416 gen_op_ldq_user_env_A0,
2417#endif
2418};
2419
2420static GenOpFunc1 *gen_stq_env_A0[3] = {
2421 gen_op_stq_raw_env_A0,
2422#ifndef CONFIG_USER_ONLY
2423 gen_op_stq_kernel_env_A0,
2424 gen_op_stq_user_env_A0,
2425#endif
2426};
2427
2428static GenOpFunc1 *gen_ldo_env_A0[3] = {
2429 gen_op_ldo_raw_env_A0,
2430#ifndef CONFIG_USER_ONLY
2431 gen_op_ldo_kernel_env_A0,
2432 gen_op_ldo_user_env_A0,
2433#endif
2434};
2435
2436static GenOpFunc1 *gen_sto_env_A0[3] = {
2437 gen_op_sto_raw_env_A0,
2438#ifndef CONFIG_USER_ONLY
2439 gen_op_sto_kernel_env_A0,
2440 gen_op_sto_user_env_A0,
2441#endif
2442};
2443
2444#define SSE_SPECIAL ((GenOpFunc2 *)1)
2445
2446#define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2447#define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2448 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2449
2450static GenOpFunc2 *sse_op_table1[256][4] = {
2451 /* pure SSE operations */
2452 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2453 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2454 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2455 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2456 [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2457 [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2458 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2459 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2460
2461 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2462 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2463 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2464 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2465 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2466 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2467 [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2468 [0x2f] = { gen_op_comiss, gen_op_comisd },
2469 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2470 [0x51] = SSE_FOP(sqrt),
2471 [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2472 [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2473 [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2474 [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2475 [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2476 [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2477 [0x58] = SSE_FOP(add),
2478 [0x59] = SSE_FOP(mul),
2479 [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2480 gen_op_cvtss2sd, gen_op_cvtsd2ss },
2481 [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2482 [0x5c] = SSE_FOP(sub),
2483 [0x5d] = SSE_FOP(min),
2484 [0x5e] = SSE_FOP(div),
2485 [0x5f] = SSE_FOP(max),
2486
2487 [0xc2] = SSE_FOP(cmpeq),
2488 [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2489
2490 /* MMX ops and their SSE extensions */
2491 [0x60] = MMX_OP2(punpcklbw),
2492 [0x61] = MMX_OP2(punpcklwd),
2493 [0x62] = MMX_OP2(punpckldq),
2494 [0x63] = MMX_OP2(packsswb),
2495 [0x64] = MMX_OP2(pcmpgtb),
2496 [0x65] = MMX_OP2(pcmpgtw),
2497 [0x66] = MMX_OP2(pcmpgtl),
2498 [0x67] = MMX_OP2(packuswb),
2499 [0x68] = MMX_OP2(punpckhbw),
2500 [0x69] = MMX_OP2(punpckhwd),
2501 [0x6a] = MMX_OP2(punpckhdq),
2502 [0x6b] = MMX_OP2(packssdw),
2503 [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2504 [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2505 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2506 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2507 [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2508 (GenOpFunc2 *)gen_op_pshufd_xmm,
2509 (GenOpFunc2 *)gen_op_pshufhw_xmm,
2510 (GenOpFunc2 *)gen_op_pshuflw_xmm },
2511 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2512 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2513 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2514 [0x74] = MMX_OP2(pcmpeqb),
2515 [0x75] = MMX_OP2(pcmpeqw),
2516 [0x76] = MMX_OP2(pcmpeql),
2517 [0x77] = { SSE_SPECIAL }, /* emms */
2518 [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2519 [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2520 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2521 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2522 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2523 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2524 [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2525 [0xd1] = MMX_OP2(psrlw),
2526 [0xd2] = MMX_OP2(psrld),
2527 [0xd3] = MMX_OP2(psrlq),
2528 [0xd4] = MMX_OP2(paddq),
2529 [0xd5] = MMX_OP2(pmullw),
2530 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2531 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2532 [0xd8] = MMX_OP2(psubusb),
2533 [0xd9] = MMX_OP2(psubusw),
2534 [0xda] = MMX_OP2(pminub),
2535 [0xdb] = MMX_OP2(pand),
2536 [0xdc] = MMX_OP2(paddusb),
2537 [0xdd] = MMX_OP2(paddusw),
2538 [0xde] = MMX_OP2(pmaxub),
2539 [0xdf] = MMX_OP2(pandn),
2540 [0xe0] = MMX_OP2(pavgb),
2541 [0xe1] = MMX_OP2(psraw),
2542 [0xe2] = MMX_OP2(psrad),
2543 [0xe3] = MMX_OP2(pavgw),
2544 [0xe4] = MMX_OP2(pmulhuw),
2545 [0xe5] = MMX_OP2(pmulhw),
2546 [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2547 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2548 [0xe8] = MMX_OP2(psubsb),
2549 [0xe9] = MMX_OP2(psubsw),
2550 [0xea] = MMX_OP2(pminsw),
2551 [0xeb] = MMX_OP2(por),
2552 [0xec] = MMX_OP2(paddsb),
2553 [0xed] = MMX_OP2(paddsw),
2554 [0xee] = MMX_OP2(pmaxsw),
2555 [0xef] = MMX_OP2(pxor),
2556 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2557 [0xf1] = MMX_OP2(psllw),
2558 [0xf2] = MMX_OP2(pslld),
2559 [0xf3] = MMX_OP2(psllq),
2560 [0xf4] = MMX_OP2(pmuludq),
2561 [0xf5] = MMX_OP2(pmaddwd),
2562 [0xf6] = MMX_OP2(psadbw),
2563 [0xf7] = MMX_OP2(maskmov),
2564 [0xf8] = MMX_OP2(psubb),
2565 [0xf9] = MMX_OP2(psubw),
2566 [0xfa] = MMX_OP2(psubl),
2567 [0xfb] = MMX_OP2(psubq),
2568 [0xfc] = MMX_OP2(paddb),
2569 [0xfd] = MMX_OP2(paddw),
2570 [0xfe] = MMX_OP2(paddl),
2571};
2572
2573static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2574 [0 + 2] = MMX_OP2(psrlw),
2575 [0 + 4] = MMX_OP2(psraw),
2576 [0 + 6] = MMX_OP2(psllw),
2577 [8 + 2] = MMX_OP2(psrld),
2578 [8 + 4] = MMX_OP2(psrad),
2579 [8 + 6] = MMX_OP2(pslld),
2580 [16 + 2] = MMX_OP2(psrlq),
2581 [16 + 3] = { NULL, gen_op_psrldq_xmm },
2582 [16 + 6] = MMX_OP2(psllq),
2583 [16 + 7] = { NULL, gen_op_pslldq_xmm },
2584};
2585
2586static GenOpFunc1 *sse_op_table3[4 * 3] = {
2587 gen_op_cvtsi2ss,
2588 gen_op_cvtsi2sd,
2589 X86_64_ONLY(gen_op_cvtsq2ss),
2590 X86_64_ONLY(gen_op_cvtsq2sd),
2591
2592 gen_op_cvttss2si,
2593 gen_op_cvttsd2si,
2594 X86_64_ONLY(gen_op_cvttss2sq),
2595 X86_64_ONLY(gen_op_cvttsd2sq),
2596
2597 gen_op_cvtss2si,
2598 gen_op_cvtsd2si,
2599 X86_64_ONLY(gen_op_cvtss2sq),
2600 X86_64_ONLY(gen_op_cvtsd2sq),
2601};
2602
2603static GenOpFunc2 *sse_op_table4[8][4] = {
2604 SSE_FOP(cmpeq),
2605 SSE_FOP(cmplt),
2606 SSE_FOP(cmple),
2607 SSE_FOP(cmpunord),
2608 SSE_FOP(cmpneq),
2609 SSE_FOP(cmpnlt),
2610 SSE_FOP(cmpnle),
2611 SSE_FOP(cmpord),
2612};
2613
2614static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2615{
2616 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2617 int modrm, mod, rm, reg, reg_addr, offset_addr;
2618 GenOpFunc2 *sse_op2;
2619 GenOpFunc3 *sse_op3;
2620
2621 b &= 0xff;
2622 if (s->prefix & PREFIX_DATA)
2623 b1 = 1;
2624 else if (s->prefix & PREFIX_REPZ)
2625 b1 = 2;
2626 else if (s->prefix & PREFIX_REPNZ)
2627 b1 = 3;
2628 else
2629 b1 = 0;
2630 sse_op2 = sse_op_table1[b][b1];
2631 if (!sse_op2)
2632 goto illegal_op;
2633 if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2634 is_xmm = 1;
2635 } else {
2636 if (b1 == 0) {
2637 /* MMX case */
2638 is_xmm = 0;
2639 } else {
2640 is_xmm = 1;
2641 }
2642 }
2643 /* simple MMX/SSE operation */
2644 if (s->flags & HF_TS_MASK) {
2645 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2646 return;
2647 }
2648 if (s->flags & HF_EM_MASK) {
2649 illegal_op:
2650 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2651 return;
2652 }
2653 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2654 goto illegal_op;
2655 if (b == 0x77) {
2656 /* emms */
2657 gen_op_emms();
2658 return;
2659 }
2660 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2661 the static cpu state) */
2662 if (!is_xmm) {
2663 gen_op_enter_mmx();
2664 }
2665
2666 modrm = ldub_code(s->pc++);
2667 reg = ((modrm >> 3) & 7);
2668 if (is_xmm)
2669 reg |= rex_r;
2670 mod = (modrm >> 6) & 3;
2671 if (sse_op2 == SSE_SPECIAL) {
2672 b |= (b1 << 8);
2673 switch(b) {
2674 case 0x0e7: /* movntq */
2675 if (mod == 3)
2676 goto illegal_op;
2677 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2678 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2679 break;
2680 case 0x1e7: /* movntdq */
2681 case 0x02b: /* movntps */
2682 case 0x12b: /* movntps */
2683 case 0x3f0: /* lddqu */
2684 if (mod == 3)
2685 goto illegal_op;
2686 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2687 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2688 break;
2689 case 0x6e: /* movd mm, ea */
2690#ifdef TARGET_X86_64
2691 if (s->dflag == 2) {
2692 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2693 gen_op_movq_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2694 } else
2695#endif
2696 {
2697 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2698 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2699 }
2700 break;
2701 case 0x16e: /* movd xmm, ea */
2702#ifdef TARGET_X86_64
2703 if (s->dflag == 2) {
2704 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2705 gen_op_movq_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2706 } else
2707#endif
2708 {
2709 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2710 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2711 }
2712 break;
2713 case 0x6f: /* movq mm, ea */
2714 if (mod != 3) {
2715 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2716 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2717 } else {
2718 rm = (modrm & 7);
2719 gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2720 offsetof(CPUX86State,fpregs[rm].mmx));
2721 }
2722 break;
2723 case 0x010: /* movups */
2724 case 0x110: /* movupd */
2725 case 0x028: /* movaps */
2726 case 0x128: /* movapd */
2727 case 0x16f: /* movdqa xmm, ea */
2728 case 0x26f: /* movdqu xmm, ea */
2729 if (mod != 3) {
2730 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2731 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2732 } else {
2733 rm = (modrm & 7) | REX_B(s);
2734 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2735 offsetof(CPUX86State,xmm_regs[rm]));
2736 }
2737 break;
2738 case 0x210: /* movss xmm, ea */
2739 if (mod != 3) {
2740 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2741 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2742 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2743 gen_op_movl_T0_0();
2744 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2745 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2746 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2747 } else {
2748 rm = (modrm & 7) | REX_B(s);
2749 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2750 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2751 }
2752 break;
2753 case 0x310: /* movsd xmm, ea */
2754 if (mod != 3) {
2755 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2756 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2757 gen_op_movl_T0_0();
2758 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2759 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2760 } else {
2761 rm = (modrm & 7) | REX_B(s);
2762 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2763 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2764 }
2765 break;
2766 case 0x012: /* movlps */
2767 case 0x112: /* movlpd */
2768 if (mod != 3) {
2769 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2770 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2771 } else {
2772 /* movhlps */
2773 rm = (modrm & 7) | REX_B(s);
2774 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2775 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2776 }
2777 break;
2778 case 0x212: /* movsldup */
2779 if (mod != 3) {
2780 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2781 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2782 } else {
2783 rm = (modrm & 7) | REX_B(s);
2784 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2785 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2786 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2787 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2788 }
2789 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2790 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2791 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2792 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2793 break;
2794 case 0x312: /* movddup */
2795 if (mod != 3) {
2796 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2797 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2798 } else {
2799 rm = (modrm & 7) | REX_B(s);
2800 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2801 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2802 }
2803 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2804 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2805 break;
2806 case 0x016: /* movhps */
2807 case 0x116: /* movhpd */
2808 if (mod != 3) {
2809 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2810 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2811 } else {
2812 /* movlhps */
2813 rm = (modrm & 7) | REX_B(s);
2814 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2815 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2816 }
2817 break;
2818 case 0x216: /* movshdup */
2819 if (mod != 3) {
2820 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2821 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2822 } else {
2823 rm = (modrm & 7) | REX_B(s);
2824 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2825 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2826 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2827 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2828 }
2829 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2830 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2831 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2832 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2833 break;
2834 case 0x7e: /* movd ea, mm */
2835#ifdef TARGET_X86_64
2836 if (s->dflag == 2) {
2837 gen_op_movq_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2838 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2839 } else
2840#endif
2841 {
2842 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2843 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2844 }
2845 break;
2846 case 0x17e: /* movd ea, xmm */
2847#ifdef TARGET_X86_64
2848 if (s->dflag == 2) {
2849 gen_op_movq_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2850 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2851 } else
2852#endif
2853 {
2854 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2855 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2856 }
2857 break;
2858 case 0x27e: /* movq xmm, ea */
2859 if (mod != 3) {
2860 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2861 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2862 } else {
2863 rm = (modrm & 7) | REX_B(s);
2864 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2865 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2866 }
2867 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2868 break;
2869 case 0x7f: /* movq ea, mm */
2870 if (mod != 3) {
2871 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2872 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2873 } else {
2874 rm = (modrm & 7);
2875 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2876 offsetof(CPUX86State,fpregs[reg].mmx));
2877 }
2878 break;
2879 case 0x011: /* movups */
2880 case 0x111: /* movupd */
2881 case 0x029: /* movaps */
2882 case 0x129: /* movapd */
2883 case 0x17f: /* movdqa ea, xmm */
2884 case 0x27f: /* movdqu ea, xmm */
2885 if (mod != 3) {
2886 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2887 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2888 } else {
2889 rm = (modrm & 7) | REX_B(s);
2890 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2891 offsetof(CPUX86State,xmm_regs[reg]));
2892 }
2893 break;
2894 case 0x211: /* movss ea, xmm */
2895 if (mod != 3) {
2896 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2897 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2898 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2899 } else {
2900 rm = (modrm & 7) | REX_B(s);
2901 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2902 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2903 }
2904 break;
2905 case 0x311: /* movsd ea, xmm */
2906 if (mod != 3) {
2907 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2908 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2909 } else {
2910 rm = (modrm & 7) | REX_B(s);
2911 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2912 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2913 }
2914 break;
2915 case 0x013: /* movlps */
2916 case 0x113: /* movlpd */
2917 if (mod != 3) {
2918 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2919 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2920 } else {
2921 goto illegal_op;
2922 }
2923 break;
2924 case 0x017: /* movhps */
2925 case 0x117: /* movhpd */
2926 if (mod != 3) {
2927 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2928 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2929 } else {
2930 goto illegal_op;
2931 }
2932 break;
2933 case 0x71: /* shift mm, im */
2934 case 0x72:
2935 case 0x73:
2936 case 0x171: /* shift xmm, im */
2937 case 0x172:
2938 case 0x173:
2939 val = ldub_code(s->pc++);
2940 if (is_xmm) {
2941 gen_op_movl_T0_im(val);
2942 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2943 gen_op_movl_T0_0();
2944 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2945 op1_offset = offsetof(CPUX86State,xmm_t0);
2946 } else {
2947 gen_op_movl_T0_im(val);
2948 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2949 gen_op_movl_T0_0();
2950 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2951 op1_offset = offsetof(CPUX86State,mmx_t0);
2952 }
2953 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2954 if (!sse_op2)
2955 goto illegal_op;
2956 if (is_xmm) {
2957 rm = (modrm & 7) | REX_B(s);
2958 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2959 } else {
2960 rm = (modrm & 7);
2961 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2962 }
2963 sse_op2(op2_offset, op1_offset);
2964 break;
2965 case 0x050: /* movmskps */
2966 rm = (modrm & 7) | REX_B(s);
2967 gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2968 gen_op_mov_reg_T0[OT_LONG][reg]();
2969 break;
2970 case 0x150: /* movmskpd */
2971 rm = (modrm & 7) | REX_B(s);
2972 gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2973 gen_op_mov_reg_T0[OT_LONG][reg]();
2974 break;
2975 case 0x02a: /* cvtpi2ps */
2976 case 0x12a: /* cvtpi2pd */
2977 gen_op_enter_mmx();
2978 if (mod != 3) {
2979 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2980 op2_offset = offsetof(CPUX86State,mmx_t0);
2981 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2982 } else {
2983 rm = (modrm & 7);
2984 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2985 }
2986 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2987 switch(b >> 8) {
2988 case 0x0:
2989 gen_op_cvtpi2ps(op1_offset, op2_offset);
2990 break;
2991 default:
2992 case 0x1:
2993 gen_op_cvtpi2pd(op1_offset, op2_offset);
2994 break;
2995 }
2996 break;
2997 case 0x22a: /* cvtsi2ss */
2998 case 0x32a: /* cvtsi2sd */
2999 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3000 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3001 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3002 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
3003 break;
3004 case 0x02c: /* cvttps2pi */
3005 case 0x12c: /* cvttpd2pi */
3006 case 0x02d: /* cvtps2pi */
3007 case 0x12d: /* cvtpd2pi */
3008 gen_op_enter_mmx();
3009 if (mod != 3) {
3010 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3011 op2_offset = offsetof(CPUX86State,xmm_t0);
3012 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3013 } else {
3014 rm = (modrm & 7) | REX_B(s);
3015 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3016 }
3017 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3018 switch(b) {
3019 case 0x02c:
3020 gen_op_cvttps2pi(op1_offset, op2_offset);
3021 break;
3022 case 0x12c:
3023 gen_op_cvttpd2pi(op1_offset, op2_offset);
3024 break;
3025 case 0x02d:
3026 gen_op_cvtps2pi(op1_offset, op2_offset);
3027 break;
3028 case 0x12d:
3029 gen_op_cvtpd2pi(op1_offset, op2_offset);
3030 break;
3031 }
3032 break;
3033 case 0x22c: /* cvttss2si */
3034 case 0x32c: /* cvttsd2si */
3035 case 0x22d: /* cvtss2si */
3036 case 0x32d: /* cvtsd2si */
3037 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3038 if (mod != 3) {
3039 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3040 if ((b >> 8) & 1) {
3041 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3042 } else {
3043 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3044 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3045 }
3046 op2_offset = offsetof(CPUX86State,xmm_t0);
3047 } else {
3048 rm = (modrm & 7) | REX_B(s);
3049 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3050 }
3051 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3052 (b & 1) * 4](op2_offset);
3053 gen_op_mov_reg_T0[ot][reg]();
3054 break;
3055 case 0xc4: /* pinsrw */
3056 case 0x1c4:
3057 s->rip_offset = 1;
3058 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3059 val = ldub_code(s->pc++);
3060 if (b1) {
3061 val &= 7;
3062 gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
3063 } else {
3064 val &= 3;
3065 gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
3066 }
3067 break;
3068 case 0xc5: /* pextrw */
3069 case 0x1c5:
3070 if (mod != 3)
3071 goto illegal_op;
3072 val = ldub_code(s->pc++);
3073 if (b1) {
3074 val &= 7;
3075 rm = (modrm & 7) | REX_B(s);
3076 gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
3077 } else {
3078 val &= 3;
3079 rm = (modrm & 7);
3080 gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
3081 }
3082 reg = ((modrm >> 3) & 7) | rex_r;
3083 gen_op_mov_reg_T0[OT_LONG][reg]();
3084 break;
3085 case 0x1d6: /* movq ea, xmm */
3086 if (mod != 3) {
3087 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3088 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3089 } else {
3090 rm = (modrm & 7) | REX_B(s);
3091 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3092 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3093 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3094 }
3095 break;
3096 case 0x2d6: /* movq2dq */
3097 gen_op_enter_mmx();
3098 rm = (modrm & 7);
3099 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3100 offsetof(CPUX86State,fpregs[rm].mmx));
3101 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3102 break;
3103 case 0x3d6: /* movdq2q */
3104 gen_op_enter_mmx();
3105 rm = (modrm & 7) | REX_B(s);
3106 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3107 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3108 break;
3109 case 0xd7: /* pmovmskb */
3110 case 0x1d7:
3111 if (mod != 3)
3112 goto illegal_op;
3113 if (b1) {
3114 rm = (modrm & 7) | REX_B(s);
3115 gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3116 } else {
3117 rm = (modrm & 7);
3118 gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3119 }
3120 reg = ((modrm >> 3) & 7) | rex_r;
3121 gen_op_mov_reg_T0[OT_LONG][reg]();
3122 break;
3123 default:
3124 goto illegal_op;
3125 }
3126 } else {
3127 /* generic MMX or SSE operation */
3128 switch(b) {
3129 case 0xf7:
3130 /* maskmov : we must prepare A0 */
3131 if (mod != 3)
3132 goto illegal_op;
3133#ifdef TARGET_X86_64
3134 if (s->aflag == 2) {
3135 gen_op_movq_A0_reg[R_EDI]();
3136 } else
3137#endif
3138 {
3139 gen_op_movl_A0_reg[R_EDI]();
3140 if (s->aflag == 0)
3141 gen_op_andl_A0_ffff();
3142 }
3143 gen_add_A0_ds_seg(s);
3144 break;
3145 case 0x70: /* pshufx insn */
3146 case 0xc6: /* pshufx insn */
3147 case 0xc2: /* compare insns */
3148 s->rip_offset = 1;
3149 break;
3150 default:
3151 break;
3152 }
3153 if (is_xmm) {
3154 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3155 if (mod != 3) {
3156 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3157 op2_offset = offsetof(CPUX86State,xmm_t0);
3158 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3159 b == 0xc2)) {
3160 /* specific case for SSE single instructions */
3161 if (b1 == 2) {
3162 /* 32 bit access */
3163 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3164 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3165 } else {
3166 /* 64 bit access */
3167 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3168 }
3169 } else {
3170 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3171 }
3172 } else {
3173 rm = (modrm & 7) | REX_B(s);
3174 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3175 }
3176 } else {
3177 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3178 if (mod != 3) {
3179 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3180 op2_offset = offsetof(CPUX86State,mmx_t0);
3181 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3182 } else {
3183 rm = (modrm & 7);
3184 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3185 }
3186 }
3187 switch(b) {
3188 case 0x70: /* pshufx insn */
3189 case 0xc6: /* pshufx insn */
3190 val = ldub_code(s->pc++);
3191 sse_op3 = (GenOpFunc3 *)sse_op2;
3192 sse_op3(op1_offset, op2_offset, val);
3193 break;
3194 case 0xc2:
3195 /* compare insns */
3196 val = ldub_code(s->pc++);
3197 if (val >= 8)
3198 goto illegal_op;
3199 sse_op2 = sse_op_table4[val][b1];
3200 sse_op2(op1_offset, op2_offset);
3201 break;
3202 default:
3203 sse_op2(op1_offset, op2_offset);
3204 break;
3205 }
3206 if (b == 0x2e || b == 0x2f) {
3207 s->cc_op = CC_OP_EFLAGS;
3208 }
3209 }
3210}
3211
3212
3213/* convert one instruction. s->is_jmp is set if the translation must
3214 be stopped. Return the next pc value */
3215static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3216{
3217 int b, prefixes, aflag, dflag;
3218 int shift, ot;
3219 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3220 target_ulong next_eip, tval;
3221 int rex_w, rex_r;
3222
3223 s->pc = pc_start;
3224 prefixes = 0;
3225 aflag = s->code32;
3226 dflag = s->code32;
3227 s->override = -1;
3228 rex_w = -1;
3229 rex_r = 0;
3230#ifdef TARGET_X86_64
3231 s->rex_x = 0;
3232 s->rex_b = 0;
3233 x86_64_hregs = 0;
3234#endif
3235 s->rip_offset = 0; /* for relative ip address */
3236
3237#ifdef VBOX
3238 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
3239 gen_update_eip(pc_start - s->cs_base);
3240#endif
3241
3242 next_byte:
3243 b = ldub_code(s->pc);
3244 s->pc++;
3245 /* check prefixes */
3246#ifdef TARGET_X86_64
3247 if (CODE64(s)) {
3248 switch (b) {
3249 case 0xf3:
3250 prefixes |= PREFIX_REPZ;
3251 goto next_byte;
3252 case 0xf2:
3253 prefixes |= PREFIX_REPNZ;
3254 goto next_byte;
3255 case 0xf0:
3256 prefixes |= PREFIX_LOCK;
3257 goto next_byte;
3258 case 0x2e:
3259 s->override = R_CS;
3260 goto next_byte;
3261 case 0x36:
3262 s->override = R_SS;
3263 goto next_byte;
3264 case 0x3e:
3265 s->override = R_DS;
3266 goto next_byte;
3267 case 0x26:
3268 s->override = R_ES;
3269 goto next_byte;
3270 case 0x64:
3271 s->override = R_FS;
3272 goto next_byte;
3273 case 0x65:
3274 s->override = R_GS;
3275 goto next_byte;
3276 case 0x66:
3277 prefixes |= PREFIX_DATA;
3278 goto next_byte;
3279 case 0x67:
3280 prefixes |= PREFIX_ADR;
3281 goto next_byte;
3282 case 0x40 ... 0x4f:
3283 /* REX prefix */
3284 rex_w = (b >> 3) & 1;
3285 rex_r = (b & 0x4) << 1;
3286 s->rex_x = (b & 0x2) << 2;
3287 REX_B(s) = (b & 0x1) << 3;
3288 x86_64_hregs = 1; /* select uniform byte register addressing */
3289 goto next_byte;
3290 }
3291 if (rex_w == 1) {
3292 /* 0x66 is ignored if rex.w is set */
3293 dflag = 2;
3294 } else {
3295 if (prefixes & PREFIX_DATA)
3296 dflag ^= 1;
3297 }
3298 if (!(prefixes & PREFIX_ADR))
3299 aflag = 2;
3300 } else
3301#endif
3302 {
3303 switch (b) {
3304 case 0xf3:
3305 prefixes |= PREFIX_REPZ;
3306 goto next_byte;
3307 case 0xf2:
3308 prefixes |= PREFIX_REPNZ;
3309 goto next_byte;
3310 case 0xf0:
3311 prefixes |= PREFIX_LOCK;
3312 goto next_byte;
3313 case 0x2e:
3314 s->override = R_CS;
3315 goto next_byte;
3316 case 0x36:
3317 s->override = R_SS;
3318 goto next_byte;
3319 case 0x3e:
3320 s->override = R_DS;
3321 goto next_byte;
3322 case 0x26:
3323 s->override = R_ES;
3324 goto next_byte;
3325 case 0x64:
3326 s->override = R_FS;
3327 goto next_byte;
3328 case 0x65:
3329 s->override = R_GS;
3330 goto next_byte;
3331 case 0x66:
3332 prefixes |= PREFIX_DATA;
3333 goto next_byte;
3334 case 0x67:
3335 prefixes |= PREFIX_ADR;
3336 goto next_byte;
3337 }
3338 if (prefixes & PREFIX_DATA)
3339 dflag ^= 1;
3340 if (prefixes & PREFIX_ADR)
3341 aflag ^= 1;
3342 }
3343
3344 s->prefix = prefixes;
3345 s->aflag = aflag;
3346 s->dflag = dflag;
3347
3348 /* lock generation */
3349 if (prefixes & PREFIX_LOCK)
3350 gen_op_lock();
3351
3352 /* now check op code */
3353 reswitch:
3354 switch(b) {
3355 case 0x0f:
3356 /**************************/
3357 /* extended op code */
3358 b = ldub_code(s->pc++) | 0x100;
3359 goto reswitch;
3360
3361 /**************************/
3362 /* arith & logic */
3363 case 0x00 ... 0x05:
3364 case 0x08 ... 0x0d:
3365 case 0x10 ... 0x15:
3366 case 0x18 ... 0x1d:
3367 case 0x20 ... 0x25:
3368 case 0x28 ... 0x2d:
3369 case 0x30 ... 0x35:
3370 case 0x38 ... 0x3d:
3371 {
3372 int op, f, val;
3373 op = (b >> 3) & 7;
3374 f = (b >> 1) & 3;
3375
3376 if ((b & 1) == 0)
3377 ot = OT_BYTE;
3378 else
3379 ot = dflag + OT_WORD;
3380
3381 switch(f) {
3382 case 0: /* OP Ev, Gv */
3383 modrm = ldub_code(s->pc++);
3384 reg = ((modrm >> 3) & 7) | rex_r;
3385 mod = (modrm >> 6) & 3;
3386 rm = (modrm & 7) | REX_B(s);
3387 if (mod != 3) {
3388 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3389 opreg = OR_TMP0;
3390 } else if (op == OP_XORL && rm == reg) {
3391 xor_zero:
3392 /* xor reg, reg optimisation */
3393 gen_op_movl_T0_0();
3394 s->cc_op = CC_OP_LOGICB + ot;
3395 gen_op_mov_reg_T0[ot][reg]();
3396 gen_op_update1_cc();
3397 break;
3398 } else {
3399 opreg = rm;
3400 }
3401 gen_op_mov_TN_reg[ot][1][reg]();
3402 gen_op(s, op, ot, opreg);
3403 break;
3404 case 1: /* OP Gv, Ev */
3405 modrm = ldub_code(s->pc++);
3406 mod = (modrm >> 6) & 3;
3407 reg = ((modrm >> 3) & 7) | rex_r;
3408 rm = (modrm & 7) | REX_B(s);
3409 if (mod != 3) {
3410 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3411 gen_op_ld_T1_A0[ot + s->mem_index]();
3412 } else if (op == OP_XORL && rm == reg) {
3413 goto xor_zero;
3414 } else {
3415 gen_op_mov_TN_reg[ot][1][rm]();
3416 }
3417 gen_op(s, op, ot, reg);
3418 break;
3419 case 2: /* OP A, Iv */
3420 val = insn_get(s, ot);
3421 gen_op_movl_T1_im(val);
3422 gen_op(s, op, ot, OR_EAX);
3423 break;
3424 }
3425 }
3426 break;
3427
3428 case 0x80: /* GRP1 */
3429 case 0x81:
3430 case 0x82:
3431 case 0x83:
3432 {
3433 int val;
3434
3435 if ((b & 1) == 0)
3436 ot = OT_BYTE;
3437 else
3438 ot = dflag + OT_WORD;
3439
3440 modrm = ldub_code(s->pc++);
3441 mod = (modrm >> 6) & 3;
3442 rm = (modrm & 7) | REX_B(s);
3443 op = (modrm >> 3) & 7;
3444
3445 if (mod != 3) {
3446 if (b == 0x83)
3447 s->rip_offset = 1;
3448 else
3449 s->rip_offset = insn_const_size(ot);
3450 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3451 opreg = OR_TMP0;
3452 } else {
3453 opreg = rm;
3454 }
3455
3456 switch(b) {
3457 default:
3458 case 0x80:
3459 case 0x81:
3460 case 0x82:
3461 val = insn_get(s, ot);
3462 break;
3463 case 0x83:
3464 val = (int8_t)insn_get(s, OT_BYTE);
3465 break;
3466 }
3467 gen_op_movl_T1_im(val);
3468 gen_op(s, op, ot, opreg);
3469 }
3470 break;
3471
3472 /**************************/
3473 /* inc, dec, and other misc arith */
3474 case 0x40 ... 0x47: /* inc Gv */
3475 ot = dflag ? OT_LONG : OT_WORD;
3476 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3477 break;
3478 case 0x48 ... 0x4f: /* dec Gv */
3479 ot = dflag ? OT_LONG : OT_WORD;
3480 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3481 break;
3482 case 0xf6: /* GRP3 */
3483 case 0xf7:
3484 if ((b & 1) == 0)
3485 ot = OT_BYTE;
3486 else
3487 ot = dflag + OT_WORD;
3488
3489 modrm = ldub_code(s->pc++);
3490 mod = (modrm >> 6) & 3;
3491 rm = (modrm & 7) | REX_B(s);
3492 op = (modrm >> 3) & 7;
3493 if (mod != 3) {
3494 if (op == 0)
3495 s->rip_offset = insn_const_size(ot);
3496 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3497 gen_op_ld_T0_A0[ot + s->mem_index]();
3498 } else {
3499 gen_op_mov_TN_reg[ot][0][rm]();
3500 }
3501
3502 switch(op) {
3503 case 0: /* test */
3504 val = insn_get(s, ot);
3505 gen_op_movl_T1_im(val);
3506 gen_op_testl_T0_T1_cc();
3507 s->cc_op = CC_OP_LOGICB + ot;
3508 break;
3509 case 2: /* not */
3510 gen_op_notl_T0();
3511 if (mod != 3) {
3512 gen_op_st_T0_A0[ot + s->mem_index]();
3513 } else {
3514 gen_op_mov_reg_T0[ot][rm]();
3515 }
3516 break;
3517 case 3: /* neg */
3518 gen_op_negl_T0();
3519 if (mod != 3) {
3520 gen_op_st_T0_A0[ot + s->mem_index]();
3521 } else {
3522 gen_op_mov_reg_T0[ot][rm]();
3523 }
3524 gen_op_update_neg_cc();
3525 s->cc_op = CC_OP_SUBB + ot;
3526 break;
3527 case 4: /* mul */
3528 switch(ot) {
3529 case OT_BYTE:
3530 gen_op_mulb_AL_T0();
3531 s->cc_op = CC_OP_MULB;
3532 break;
3533 case OT_WORD:
3534 gen_op_mulw_AX_T0();
3535 s->cc_op = CC_OP_MULW;
3536 break;
3537 default:
3538 case OT_LONG:
3539 gen_op_mull_EAX_T0();
3540 s->cc_op = CC_OP_MULL;
3541 break;
3542#ifdef TARGET_X86_64
3543 case OT_QUAD:
3544 gen_op_mulq_EAX_T0();
3545 s->cc_op = CC_OP_MULQ;
3546 break;
3547#endif
3548 }
3549 break;
3550 case 5: /* imul */
3551 switch(ot) {
3552 case OT_BYTE:
3553 gen_op_imulb_AL_T0();
3554 s->cc_op = CC_OP_MULB;
3555 break;
3556 case OT_WORD:
3557 gen_op_imulw_AX_T0();
3558 s->cc_op = CC_OP_MULW;
3559 break;
3560 default:
3561 case OT_LONG:
3562 gen_op_imull_EAX_T0();
3563 s->cc_op = CC_OP_MULL;
3564 break;
3565#ifdef TARGET_X86_64
3566 case OT_QUAD:
3567 gen_op_imulq_EAX_T0();
3568 s->cc_op = CC_OP_MULQ;
3569 break;
3570#endif
3571 }
3572 break;
3573 case 6: /* div */
3574 switch(ot) {
3575 case OT_BYTE:
3576 gen_jmp_im(pc_start - s->cs_base);
3577 gen_op_divb_AL_T0();
3578 break;
3579 case OT_WORD:
3580 gen_jmp_im(pc_start - s->cs_base);
3581 gen_op_divw_AX_T0();
3582 break;
3583 default:
3584 case OT_LONG:
3585 gen_jmp_im(pc_start - s->cs_base);
3586 gen_op_divl_EAX_T0();
3587 break;
3588#ifdef TARGET_X86_64
3589 case OT_QUAD:
3590 gen_jmp_im(pc_start - s->cs_base);
3591 gen_op_divq_EAX_T0();
3592 break;
3593#endif
3594 }
3595 break;
3596 case 7: /* idiv */
3597 switch(ot) {
3598 case OT_BYTE:
3599 gen_jmp_im(pc_start - s->cs_base);
3600 gen_op_idivb_AL_T0();
3601 break;
3602 case OT_WORD:
3603 gen_jmp_im(pc_start - s->cs_base);
3604 gen_op_idivw_AX_T0();
3605 break;
3606 default:
3607 case OT_LONG:
3608 gen_jmp_im(pc_start - s->cs_base);
3609 gen_op_idivl_EAX_T0();
3610 break;
3611#ifdef TARGET_X86_64
3612 case OT_QUAD:
3613 gen_jmp_im(pc_start - s->cs_base);
3614 gen_op_idivq_EAX_T0();
3615 break;
3616#endif
3617 }
3618 break;
3619 default:
3620 goto illegal_op;
3621 }
3622 break;
3623
3624 case 0xfe: /* GRP4 */
3625 case 0xff: /* GRP5 */
3626 if ((b & 1) == 0)
3627 ot = OT_BYTE;
3628 else
3629 ot = dflag + OT_WORD;
3630
3631 modrm = ldub_code(s->pc++);
3632 mod = (modrm >> 6) & 3;
3633 rm = (modrm & 7) | REX_B(s);
3634 op = (modrm >> 3) & 7;
3635 if (op >= 2 && b == 0xfe) {
3636 goto illegal_op;
3637 }
3638 if (CODE64(s)) {
3639 if (op == 2 || op == 4) {
3640 /* operand size for jumps is 64 bit */
3641 ot = OT_QUAD;
3642 } else if (op == 3 || op == 5) {
3643 /* for call calls, the operand is 16 or 32 bit, even
3644 in long mode */
3645 ot = dflag ? OT_LONG : OT_WORD;
3646 } else if (op == 6) {
3647 /* default push size is 64 bit */
3648 ot = dflag ? OT_QUAD : OT_WORD;
3649 }
3650 }
3651 if (mod != 3) {
3652 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3653 if (op >= 2 && op != 3 && op != 5)
3654 gen_op_ld_T0_A0[ot + s->mem_index]();
3655 } else {
3656 gen_op_mov_TN_reg[ot][0][rm]();
3657 }
3658
3659 switch(op) {
3660 case 0: /* inc Ev */
3661 if (mod != 3)
3662 opreg = OR_TMP0;
3663 else
3664 opreg = rm;
3665 gen_inc(s, ot, opreg, 1);
3666 break;
3667 case 1: /* dec Ev */
3668 if (mod != 3)
3669 opreg = OR_TMP0;
3670 else
3671 opreg = rm;
3672 gen_inc(s, ot, opreg, -1);
3673 break;
3674 case 2: /* call Ev */
3675 /* XXX: optimize if memory (no 'and' is necessary) */
3676 if (s->dflag == 0)
3677 gen_op_andl_T0_ffff();
3678 next_eip = s->pc - s->cs_base;
3679 gen_movtl_T1_im(next_eip);
3680 gen_push_T1(s);
3681 gen_op_jmp_T0();
3682 gen_eob(s);
3683 break;
3684 case 3: /* lcall Ev */
3685 gen_op_ld_T1_A0[ot + s->mem_index]();
3686 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3687 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3688 do_lcall:
3689 if (s->pe && !s->vm86) {
3690 if (s->cc_op != CC_OP_DYNAMIC)
3691 gen_op_set_cc_op(s->cc_op);
3692 gen_jmp_im(pc_start - s->cs_base);
3693 gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3694 } else {
3695 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3696 }
3697 gen_eob(s);
3698 break;
3699 case 4: /* jmp Ev */
3700 if (s->dflag == 0)
3701 gen_op_andl_T0_ffff();
3702 gen_op_jmp_T0();
3703 gen_eob(s);
3704 break;
3705 case 5: /* ljmp Ev */
3706 gen_op_ld_T1_A0[ot + s->mem_index]();
3707 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3708 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3709 do_ljmp:
3710 if (s->pe && !s->vm86) {
3711 if (s->cc_op != CC_OP_DYNAMIC)
3712 gen_op_set_cc_op(s->cc_op);
3713 gen_jmp_im(pc_start - s->cs_base);
3714 gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3715 } else {
3716 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3717 gen_op_movl_T0_T1();
3718 gen_op_jmp_T0();
3719 }
3720 gen_eob(s);
3721 break;
3722 case 6: /* push Ev */
3723 gen_push_T0(s);
3724 break;
3725 default:
3726 goto illegal_op;
3727 }
3728 break;
3729
3730 case 0x84: /* test Ev, Gv */
3731 case 0x85:
3732 if ((b & 1) == 0)
3733 ot = OT_BYTE;
3734 else
3735 ot = dflag + OT_WORD;
3736
3737 modrm = ldub_code(s->pc++);
3738 mod = (modrm >> 6) & 3;
3739 rm = (modrm & 7) | REX_B(s);
3740 reg = ((modrm >> 3) & 7) | rex_r;
3741
3742 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3743 gen_op_mov_TN_reg[ot][1][reg]();
3744 gen_op_testl_T0_T1_cc();
3745 s->cc_op = CC_OP_LOGICB + ot;
3746 break;
3747
3748 case 0xa8: /* test eAX, Iv */
3749 case 0xa9:
3750 if ((b & 1) == 0)
3751 ot = OT_BYTE;
3752 else
3753 ot = dflag + OT_WORD;
3754 val = insn_get(s, ot);
3755
3756 gen_op_mov_TN_reg[ot][0][OR_EAX]();
3757 gen_op_movl_T1_im(val);
3758 gen_op_testl_T0_T1_cc();
3759 s->cc_op = CC_OP_LOGICB + ot;
3760 break;
3761
3762 case 0x98: /* CWDE/CBW */
3763#ifdef TARGET_X86_64
3764 if (dflag == 2) {
3765 gen_op_movslq_RAX_EAX();
3766 } else
3767#endif
3768 if (dflag == 1)
3769 gen_op_movswl_EAX_AX();
3770 else
3771 gen_op_movsbw_AX_AL();
3772 break;
3773 case 0x99: /* CDQ/CWD */
3774#ifdef TARGET_X86_64
3775 if (dflag == 2) {
3776 gen_op_movsqo_RDX_RAX();
3777 } else
3778#endif
3779 if (dflag == 1)
3780 gen_op_movslq_EDX_EAX();
3781 else
3782 gen_op_movswl_DX_AX();
3783 break;
3784 case 0x1af: /* imul Gv, Ev */
3785 case 0x69: /* imul Gv, Ev, I */
3786 case 0x6b:
3787 ot = dflag + OT_WORD;
3788 modrm = ldub_code(s->pc++);
3789 reg = ((modrm >> 3) & 7) | rex_r;
3790 if (b == 0x69)
3791 s->rip_offset = insn_const_size(ot);
3792 else if (b == 0x6b)
3793 s->rip_offset = 1;
3794 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3795 if (b == 0x69) {
3796 val = insn_get(s, ot);
3797 gen_op_movl_T1_im(val);
3798 } else if (b == 0x6b) {
3799 val = (int8_t)insn_get(s, OT_BYTE);
3800 gen_op_movl_T1_im(val);
3801 } else {
3802 gen_op_mov_TN_reg[ot][1][reg]();
3803 }
3804
3805#ifdef TARGET_X86_64
3806 if (ot == OT_QUAD) {
3807 gen_op_imulq_T0_T1();
3808 } else
3809#endif
3810 if (ot == OT_LONG) {
3811 gen_op_imull_T0_T1();
3812 } else {
3813 gen_op_imulw_T0_T1();
3814 }
3815 gen_op_mov_reg_T0[ot][reg]();
3816 s->cc_op = CC_OP_MULB + ot;
3817 break;
3818 case 0x1c0:
3819 case 0x1c1: /* xadd Ev, Gv */
3820 if ((b & 1) == 0)
3821 ot = OT_BYTE;
3822 else
3823 ot = dflag + OT_WORD;
3824 modrm = ldub_code(s->pc++);
3825 reg = ((modrm >> 3) & 7) | rex_r;
3826 mod = (modrm >> 6) & 3;
3827 if (mod == 3) {
3828 rm = (modrm & 7) | REX_B(s);
3829 gen_op_mov_TN_reg[ot][0][reg]();
3830 gen_op_mov_TN_reg[ot][1][rm]();
3831 gen_op_addl_T0_T1();
3832 gen_op_mov_reg_T1[ot][reg]();
3833 gen_op_mov_reg_T0[ot][rm]();
3834 } else {
3835 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3836 gen_op_mov_TN_reg[ot][0][reg]();
3837 gen_op_ld_T1_A0[ot + s->mem_index]();
3838 gen_op_addl_T0_T1();
3839 gen_op_st_T0_A0[ot + s->mem_index]();
3840 gen_op_mov_reg_T1[ot][reg]();
3841 }
3842 gen_op_update2_cc();
3843 s->cc_op = CC_OP_ADDB + ot;
3844 break;
3845 case 0x1b0:
3846 case 0x1b1: /* cmpxchg Ev, Gv */
3847 if ((b & 1) == 0)
3848 ot = OT_BYTE;
3849 else
3850 ot = dflag + OT_WORD;
3851 modrm = ldub_code(s->pc++);
3852 reg = ((modrm >> 3) & 7) | rex_r;
3853 mod = (modrm >> 6) & 3;
3854 gen_op_mov_TN_reg[ot][1][reg]();
3855 if (mod == 3) {
3856 rm = (modrm & 7) | REX_B(s);
3857 gen_op_mov_TN_reg[ot][0][rm]();
3858 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3859 gen_op_mov_reg_T0[ot][rm]();
3860 } else {
3861 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3862 gen_op_ld_T0_A0[ot + s->mem_index]();
3863 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3864 }
3865 s->cc_op = CC_OP_SUBB + ot;
3866 break;
3867 case 0x1c7: /* cmpxchg8b */
3868 modrm = ldub_code(s->pc++);
3869 mod = (modrm >> 6) & 3;
3870 if (mod == 3)
3871 goto illegal_op;
3872 if (s->cc_op != CC_OP_DYNAMIC)
3873 gen_op_set_cc_op(s->cc_op);
3874 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3875 gen_op_cmpxchg8b();
3876 s->cc_op = CC_OP_EFLAGS;
3877 break;
3878
3879 /**************************/
3880 /* push/pop */
3881 case 0x50 ... 0x57: /* push */
3882 gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
3883 gen_push_T0(s);
3884 break;
3885 case 0x58 ... 0x5f: /* pop */
3886 if (CODE64(s)) {
3887 ot = dflag ? OT_QUAD : OT_WORD;
3888 } else {
3889 ot = dflag + OT_WORD;
3890 }
3891 gen_pop_T0(s);
3892 /* NOTE: order is important for pop %sp */
3893 gen_pop_update(s);
3894 gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
3895 break;
3896 case 0x60: /* pusha */
3897 if (CODE64(s))
3898 goto illegal_op;
3899 gen_pusha(s);
3900 break;
3901 case 0x61: /* popa */
3902 if (CODE64(s))
3903 goto illegal_op;
3904 gen_popa(s);
3905 break;
3906 case 0x68: /* push Iv */
3907 case 0x6a:
3908 if (CODE64(s)) {
3909 ot = dflag ? OT_QUAD : OT_WORD;
3910 } else {
3911 ot = dflag + OT_WORD;
3912 }
3913 if (b == 0x68)
3914 val = insn_get(s, ot);
3915 else
3916 val = (int8_t)insn_get(s, OT_BYTE);
3917 gen_op_movl_T0_im(val);
3918 gen_push_T0(s);
3919 break;
3920 case 0x8f: /* pop Ev */
3921 if (CODE64(s)) {
3922 ot = dflag ? OT_QUAD : OT_WORD;
3923 } else {
3924 ot = dflag + OT_WORD;
3925 }
3926 modrm = ldub_code(s->pc++);
3927 mod = (modrm >> 6) & 3;
3928 gen_pop_T0(s);
3929 if (mod == 3) {
3930 /* NOTE: order is important for pop %sp */
3931 gen_pop_update(s);
3932 rm = (modrm & 7) | REX_B(s);
3933 gen_op_mov_reg_T0[ot][rm]();
3934 } else {
3935 /* NOTE: order is important too for MMU exceptions */
3936 s->popl_esp_hack = 1 << ot;
3937 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3938 s->popl_esp_hack = 0;
3939 gen_pop_update(s);
3940 }
3941 break;
3942 case 0xc8: /* enter */
3943 {
3944 int level;
3945 val = lduw_code(s->pc);
3946 s->pc += 2;
3947 level = ldub_code(s->pc++);
3948 gen_enter(s, val, level);
3949 }
3950 break;
3951 case 0xc9: /* leave */
3952 /* XXX: exception not precise (ESP is updated before potential exception) */
3953 if (CODE64(s)) {
3954 gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
3955 gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
3956 } else if (s->ss32) {
3957 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
3958 gen_op_mov_reg_T0[OT_LONG][R_ESP]();
3959 } else {
3960 gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
3961 gen_op_mov_reg_T0[OT_WORD][R_ESP]();
3962 }
3963 gen_pop_T0(s);
3964 if (CODE64(s)) {
3965 ot = dflag ? OT_QUAD : OT_WORD;
3966 } else {
3967 ot = dflag + OT_WORD;
3968 }
3969 gen_op_mov_reg_T0[ot][R_EBP]();
3970 gen_pop_update(s);
3971 break;
3972 case 0x06: /* push es */
3973 case 0x0e: /* push cs */
3974 case 0x16: /* push ss */
3975 case 0x1e: /* push ds */
3976 if (CODE64(s))
3977 goto illegal_op;
3978 gen_op_movl_T0_seg(b >> 3);
3979 gen_push_T0(s);
3980 break;
3981 case 0x1a0: /* push fs */
3982 case 0x1a8: /* push gs */
3983 gen_op_movl_T0_seg((b >> 3) & 7);
3984 gen_push_T0(s);
3985 break;
3986 case 0x07: /* pop es */
3987 case 0x17: /* pop ss */
3988 case 0x1f: /* pop ds */
3989 if (CODE64(s))
3990 goto illegal_op;
3991 reg = b >> 3;
3992 gen_pop_T0(s);
3993 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3994 gen_pop_update(s);
3995 if (reg == R_SS) {
3996 /* if reg == SS, inhibit interrupts/trace. */
3997 /* If several instructions disable interrupts, only the
3998 _first_ does it */
3999 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4000 gen_op_set_inhibit_irq();
4001 s->tf = 0;
4002 }
4003 if (s->is_jmp) {
4004 gen_jmp_im(s->pc - s->cs_base);
4005 gen_eob(s);
4006 }
4007 break;
4008 case 0x1a1: /* pop fs */
4009 case 0x1a9: /* pop gs */
4010 gen_pop_T0(s);
4011 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4012 gen_pop_update(s);
4013 if (s->is_jmp) {
4014 gen_jmp_im(s->pc - s->cs_base);
4015 gen_eob(s);
4016 }
4017 break;
4018
4019 /**************************/
4020 /* mov */
4021 case 0x88:
4022 case 0x89: /* mov Gv, Ev */
4023 if ((b & 1) == 0)
4024 ot = OT_BYTE;
4025 else
4026 ot = dflag + OT_WORD;
4027 modrm = ldub_code(s->pc++);
4028 reg = ((modrm >> 3) & 7) | rex_r;
4029
4030 /* generate a generic store */
4031 gen_ldst_modrm(s, modrm, ot, reg, 1);
4032 break;
4033 case 0xc6:
4034 case 0xc7: /* mov Ev, Iv */
4035 if ((b & 1) == 0)
4036 ot = OT_BYTE;
4037 else
4038 ot = dflag + OT_WORD;
4039 modrm = ldub_code(s->pc++);
4040 mod = (modrm >> 6) & 3;
4041 if (mod != 3) {
4042 s->rip_offset = insn_const_size(ot);
4043 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4044 }
4045 val = insn_get(s, ot);
4046 gen_op_movl_T0_im(val);
4047 if (mod != 3)
4048 gen_op_st_T0_A0[ot + s->mem_index]();
4049 else
4050 gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
4051 break;
4052 case 0x8a:
4053 case 0x8b: /* mov Ev, Gv */
4054 if ((b & 1) == 0)
4055 ot = OT_BYTE;
4056 else
4057 ot = OT_WORD + dflag;
4058 modrm = ldub_code(s->pc++);
4059 reg = ((modrm >> 3) & 7) | rex_r;
4060
4061 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4062 gen_op_mov_reg_T0[ot][reg]();
4063 break;
4064 case 0x8e: /* mov seg, Gv */
4065 modrm = ldub_code(s->pc++);
4066 reg = (modrm >> 3) & 7;
4067 if (reg >= 6 || reg == R_CS)
4068 goto illegal_op;
4069 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4070 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4071 if (reg == R_SS) {
4072 /* if reg == SS, inhibit interrupts/trace */
4073 /* If several instructions disable interrupts, only the
4074 _first_ does it */
4075 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4076 gen_op_set_inhibit_irq();
4077 s->tf = 0;
4078 }
4079 if (s->is_jmp) {
4080 gen_jmp_im(s->pc - s->cs_base);
4081 gen_eob(s);
4082 }
4083 break;
4084 case 0x8c: /* mov Gv, seg */
4085 modrm = ldub_code(s->pc++);
4086 reg = (modrm >> 3) & 7;
4087 mod = (modrm >> 6) & 3;
4088 if (reg >= 6)
4089 goto illegal_op;
4090 gen_op_movl_T0_seg(reg);
4091 if (mod == 3)
4092 ot = OT_WORD + dflag;
4093 else
4094 ot = OT_WORD;
4095 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4096 break;
4097
4098 case 0x1b6: /* movzbS Gv, Eb */
4099 case 0x1b7: /* movzwS Gv, Eb */
4100 case 0x1be: /* movsbS Gv, Eb */
4101 case 0x1bf: /* movswS Gv, Eb */
4102 {
4103 int d_ot;
4104 /* d_ot is the size of destination */
4105 d_ot = dflag + OT_WORD;
4106 /* ot is the size of source */
4107 ot = (b & 1) + OT_BYTE;
4108 modrm = ldub_code(s->pc++);
4109 reg = ((modrm >> 3) & 7) | rex_r;
4110 mod = (modrm >> 6) & 3;
4111 rm = (modrm & 7) | REX_B(s);
4112
4113 if (mod == 3) {
4114 gen_op_mov_TN_reg[ot][0][rm]();
4115 switch(ot | (b & 8)) {
4116 case OT_BYTE:
4117 gen_op_movzbl_T0_T0();
4118 break;
4119 case OT_BYTE | 8:
4120 gen_op_movsbl_T0_T0();
4121 break;
4122 case OT_WORD:
4123 gen_op_movzwl_T0_T0();
4124 break;
4125 default:
4126 case OT_WORD | 8:
4127 gen_op_movswl_T0_T0();
4128 break;
4129 }
4130 gen_op_mov_reg_T0[d_ot][reg]();
4131 } else {
4132 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4133 if (b & 8) {
4134 gen_op_lds_T0_A0[ot + s->mem_index]();
4135 } else {
4136 gen_op_ldu_T0_A0[ot + s->mem_index]();
4137 }
4138 gen_op_mov_reg_T0[d_ot][reg]();
4139 }
4140 }
4141 break;
4142
4143 case 0x8d: /* lea */
4144 ot = dflag + OT_WORD;
4145 modrm = ldub_code(s->pc++);
4146 mod = (modrm >> 6) & 3;
4147 if (mod == 3)
4148 goto illegal_op;
4149 reg = ((modrm >> 3) & 7) | rex_r;
4150 /* we must ensure that no segment is added */
4151 s->override = -1;
4152 val = s->addseg;
4153 s->addseg = 0;
4154 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4155 s->addseg = val;
4156 gen_op_mov_reg_A0[ot - OT_WORD][reg]();
4157 break;
4158
4159 case 0xa0: /* mov EAX, Ov */
4160 case 0xa1:
4161 case 0xa2: /* mov Ov, EAX */
4162 case 0xa3:
4163 {
4164 target_ulong offset_addr;
4165
4166 if ((b & 1) == 0)
4167 ot = OT_BYTE;
4168 else
4169 ot = dflag + OT_WORD;
4170#ifdef TARGET_X86_64
4171 if (s->aflag == 2) {
4172 offset_addr = ldq_code(s->pc);
4173 s->pc += 8;
4174 if (offset_addr == (int32_t)offset_addr)
4175 gen_op_movq_A0_im(offset_addr);
4176 else
4177 gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
4178 } else
4179#endif
4180 {
4181 if (s->aflag) {
4182 offset_addr = insn_get(s, OT_LONG);
4183 } else {
4184 offset_addr = insn_get(s, OT_WORD);
4185 }
4186 gen_op_movl_A0_im(offset_addr);
4187 }
4188 gen_add_A0_ds_seg(s);
4189 if ((b & 2) == 0) {
4190 gen_op_ld_T0_A0[ot + s->mem_index]();
4191 gen_op_mov_reg_T0[ot][R_EAX]();
4192 } else {
4193 gen_op_mov_TN_reg[ot][0][R_EAX]();
4194 gen_op_st_T0_A0[ot + s->mem_index]();
4195 }
4196 }
4197 break;
4198 case 0xd7: /* xlat */
4199#ifdef TARGET_X86_64
4200 if (s->aflag == 2) {
4201 gen_op_movq_A0_reg[R_EBX]();
4202 gen_op_addq_A0_AL();
4203 } else
4204#endif
4205 {
4206 gen_op_movl_A0_reg[R_EBX]();
4207 gen_op_addl_A0_AL();
4208 if (s->aflag == 0)
4209 gen_op_andl_A0_ffff();
4210 }
4211 gen_add_A0_ds_seg(s);
4212 gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
4213 gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
4214 break;
4215 case 0xb0 ... 0xb7: /* mov R, Ib */
4216 val = insn_get(s, OT_BYTE);
4217 gen_op_movl_T0_im(val);
4218 gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
4219 break;
4220 case 0xb8 ... 0xbf: /* mov R, Iv */
4221#ifdef TARGET_X86_64
4222 if (dflag == 2) {
4223 uint64_t tmp;
4224 /* 64 bit case */
4225 tmp = ldq_code(s->pc);
4226 s->pc += 8;
4227 reg = (b & 7) | REX_B(s);
4228 gen_movtl_T0_im(tmp);
4229 gen_op_mov_reg_T0[OT_QUAD][reg]();
4230 } else
4231#endif
4232 {
4233 ot = dflag ? OT_LONG : OT_WORD;
4234 val = insn_get(s, ot);
4235 reg = (b & 7) | REX_B(s);
4236 gen_op_movl_T0_im(val);
4237 gen_op_mov_reg_T0[ot][reg]();
4238 }
4239 break;
4240
4241 case 0x91 ... 0x97: /* xchg R, EAX */
4242 ot = dflag + OT_WORD;
4243 reg = (b & 7) | REX_B(s);
4244 rm = R_EAX;
4245 goto do_xchg_reg;
4246 case 0x86:
4247 case 0x87: /* xchg Ev, Gv */
4248 if ((b & 1) == 0)
4249 ot = OT_BYTE;
4250 else
4251 ot = dflag + OT_WORD;
4252 modrm = ldub_code(s->pc++);
4253 reg = ((modrm >> 3) & 7) | rex_r;
4254 mod = (modrm >> 6) & 3;
4255 if (mod == 3) {
4256 rm = (modrm & 7) | REX_B(s);
4257 do_xchg_reg:
4258 gen_op_mov_TN_reg[ot][0][reg]();
4259 gen_op_mov_TN_reg[ot][1][rm]();
4260 gen_op_mov_reg_T0[ot][rm]();
4261 gen_op_mov_reg_T1[ot][reg]();
4262 } else {
4263 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4264 gen_op_mov_TN_reg[ot][0][reg]();
4265 /* for xchg, lock is implicit */
4266 if (!(prefixes & PREFIX_LOCK))
4267 gen_op_lock();
4268 gen_op_ld_T1_A0[ot + s->mem_index]();
4269 gen_op_st_T0_A0[ot + s->mem_index]();
4270 if (!(prefixes & PREFIX_LOCK))
4271 gen_op_unlock();
4272 gen_op_mov_reg_T1[ot][reg]();
4273 }
4274 break;
4275 case 0xc4: /* les Gv */
4276 if (CODE64(s))
4277 goto illegal_op;
4278 op = R_ES;
4279 goto do_lxx;
4280 case 0xc5: /* lds Gv */
4281 if (CODE64(s))
4282 goto illegal_op;
4283 op = R_DS;
4284 goto do_lxx;
4285 case 0x1b2: /* lss Gv */
4286 op = R_SS;
4287 goto do_lxx;
4288 case 0x1b4: /* lfs Gv */
4289 op = R_FS;
4290 goto do_lxx;
4291 case 0x1b5: /* lgs Gv */
4292 op = R_GS;
4293 do_lxx:
4294 ot = dflag ? OT_LONG : OT_WORD;
4295 modrm = ldub_code(s->pc++);
4296 reg = ((modrm >> 3) & 7) | rex_r;
4297 mod = (modrm >> 6) & 3;
4298 if (mod == 3)
4299 goto illegal_op;
4300 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4301 gen_op_ld_T1_A0[ot + s->mem_index]();
4302 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4303 /* load the segment first to handle exceptions properly */
4304 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4305 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4306 /* then put the data */
4307 gen_op_mov_reg_T1[ot][reg]();
4308 if (s->is_jmp) {
4309 gen_jmp_im(s->pc - s->cs_base);
4310 gen_eob(s);
4311 }
4312 break;
4313
4314 /************************/
4315 /* shifts */
4316 case 0xc0:
4317 case 0xc1:
4318 /* shift Ev,Ib */
4319 shift = 2;
4320 grp2:
4321 {
4322 if ((b & 1) == 0)
4323 ot = OT_BYTE;
4324 else
4325 ot = dflag + OT_WORD;
4326
4327 modrm = ldub_code(s->pc++);
4328 mod = (modrm >> 6) & 3;
4329 op = (modrm >> 3) & 7;
4330
4331 if (mod != 3) {
4332 if (shift == 2) {
4333 s->rip_offset = 1;
4334 }
4335 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4336 opreg = OR_TMP0;
4337 } else {
4338 opreg = (modrm & 7) | REX_B(s);
4339 }
4340
4341 /* simpler op */
4342 if (shift == 0) {
4343 gen_shift(s, op, ot, opreg, OR_ECX);
4344 } else {
4345 if (shift == 2) {
4346 shift = ldub_code(s->pc++);
4347 }
4348 gen_shifti(s, op, ot, opreg, shift);
4349 }
4350 }
4351 break;
4352 case 0xd0:
4353 case 0xd1:
4354 /* shift Ev,1 */
4355 shift = 1;
4356 goto grp2;
4357 case 0xd2:
4358 case 0xd3:
4359 /* shift Ev,cl */
4360 shift = 0;
4361 goto grp2;
4362
4363 case 0x1a4: /* shld imm */
4364 op = 0;
4365 shift = 1;
4366 goto do_shiftd;
4367 case 0x1a5: /* shld cl */
4368 op = 0;
4369 shift = 0;
4370 goto do_shiftd;
4371 case 0x1ac: /* shrd imm */
4372 op = 1;
4373 shift = 1;
4374 goto do_shiftd;
4375 case 0x1ad: /* shrd cl */
4376 op = 1;
4377 shift = 0;
4378 do_shiftd:
4379 ot = dflag + OT_WORD;
4380 modrm = ldub_code(s->pc++);
4381 mod = (modrm >> 6) & 3;
4382 rm = (modrm & 7) | REX_B(s);
4383 reg = ((modrm >> 3) & 7) | rex_r;
4384
4385 if (mod != 3) {
4386 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4387 gen_op_ld_T0_A0[ot + s->mem_index]();
4388 } else {
4389 gen_op_mov_TN_reg[ot][0][rm]();
4390 }
4391 gen_op_mov_TN_reg[ot][1][reg]();
4392
4393 if (shift) {
4394 val = ldub_code(s->pc++);
4395 if (ot == OT_QUAD)
4396 val &= 0x3f;
4397 else
4398 val &= 0x1f;
4399 if (val) {
4400 if (mod == 3)
4401 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4402 else
4403 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4404 if (op == 0 && ot != OT_WORD)
4405 s->cc_op = CC_OP_SHLB + ot;
4406 else
4407 s->cc_op = CC_OP_SARB + ot;
4408 }
4409 } else {
4410 if (s->cc_op != CC_OP_DYNAMIC)
4411 gen_op_set_cc_op(s->cc_op);
4412 if (mod == 3)
4413 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4414 else
4415 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4416 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4417 }
4418 if (mod == 3) {
4419 gen_op_mov_reg_T0[ot][rm]();
4420 }
4421 break;
4422
4423 /************************/
4424 /* floats */
4425 case 0xd8 ... 0xdf:
4426 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4427 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4428 /* XXX: what to do if illegal op ? */
4429 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4430 break;
4431 }
4432 modrm = ldub_code(s->pc++);
4433 mod = (modrm >> 6) & 3;
4434 rm = modrm & 7;
4435 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4436 if (mod != 3) {
4437 /* memory op */
4438 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4439 switch(op) {
4440 case 0x00 ... 0x07: /* fxxxs */
4441 case 0x10 ... 0x17: /* fixxxl */
4442 case 0x20 ... 0x27: /* fxxxl */
4443 case 0x30 ... 0x37: /* fixxx */
4444 {
4445 int op1;
4446 op1 = op & 7;
4447
4448 switch(op >> 4) {
4449 case 0:
4450 gen_op_flds_FT0_A0();
4451 break;
4452 case 1:
4453 gen_op_fildl_FT0_A0();
4454 break;
4455 case 2:
4456 gen_op_fldl_FT0_A0();
4457 break;
4458 case 3:
4459 default:
4460 gen_op_fild_FT0_A0();
4461 break;
4462 }
4463
4464 gen_op_fp_arith_ST0_FT0[op1]();
4465 if (op1 == 3) {
4466 /* fcomp needs pop */
4467 gen_op_fpop();
4468 }
4469 }
4470 break;
4471 case 0x08: /* flds */
4472 case 0x0a: /* fsts */
4473 case 0x0b: /* fstps */
4474 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4475 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4476 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4477 switch(op & 7) {
4478 case 0:
4479 switch(op >> 4) {
4480 case 0:
4481 gen_op_flds_ST0_A0();
4482 break;
4483 case 1:
4484 gen_op_fildl_ST0_A0();
4485 break;
4486 case 2:
4487 gen_op_fldl_ST0_A0();
4488 break;
4489 case 3:
4490 default:
4491 gen_op_fild_ST0_A0();
4492 break;
4493 }
4494 break;
4495 case 1:
4496 switch(op >> 4) {
4497 case 1:
4498 gen_op_fisttl_ST0_A0();
4499 break;
4500 case 2:
4501 gen_op_fisttll_ST0_A0();
4502 break;
4503 case 3:
4504 default:
4505 gen_op_fistt_ST0_A0();
4506 }
4507 gen_op_fpop();
4508 break;
4509 default:
4510 switch(op >> 4) {
4511 case 0:
4512 gen_op_fsts_ST0_A0();
4513 break;
4514 case 1:
4515 gen_op_fistl_ST0_A0();
4516 break;
4517 case 2:
4518 gen_op_fstl_ST0_A0();
4519 break;
4520 case 3:
4521 default:
4522 gen_op_fist_ST0_A0();
4523 break;
4524 }
4525 if ((op & 7) == 3)
4526 gen_op_fpop();
4527 break;
4528 }
4529 break;
4530 case 0x0c: /* fldenv mem */
4531 gen_op_fldenv_A0(s->dflag);
4532 break;
4533 case 0x0d: /* fldcw mem */
4534 gen_op_fldcw_A0();
4535 break;
4536 case 0x0e: /* fnstenv mem */
4537 gen_op_fnstenv_A0(s->dflag);
4538 break;
4539 case 0x0f: /* fnstcw mem */
4540 gen_op_fnstcw_A0();
4541 break;
4542 case 0x1d: /* fldt mem */
4543 gen_op_fldt_ST0_A0();
4544 break;
4545 case 0x1f: /* fstpt mem */
4546 gen_op_fstt_ST0_A0();
4547 gen_op_fpop();
4548 break;
4549 case 0x2c: /* frstor mem */
4550 gen_op_frstor_A0(s->dflag);
4551 break;
4552 case 0x2e: /* fnsave mem */
4553 gen_op_fnsave_A0(s->dflag);
4554 break;
4555 case 0x2f: /* fnstsw mem */
4556 gen_op_fnstsw_A0();
4557 break;
4558 case 0x3c: /* fbld */
4559 gen_op_fbld_ST0_A0();
4560 break;
4561 case 0x3e: /* fbstp */
4562 gen_op_fbst_ST0_A0();
4563 gen_op_fpop();
4564 break;
4565 case 0x3d: /* fildll */
4566 gen_op_fildll_ST0_A0();
4567 break;
4568 case 0x3f: /* fistpll */
4569 gen_op_fistll_ST0_A0();
4570 gen_op_fpop();
4571 break;
4572 default:
4573 goto illegal_op;
4574 }
4575 } else {
4576 /* register float ops */
4577 opreg = rm;
4578
4579 switch(op) {
4580 case 0x08: /* fld sti */
4581 gen_op_fpush();
4582 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4583 break;
4584 case 0x09: /* fxchg sti */
4585 case 0x29: /* fxchg4 sti, undocumented op */
4586 case 0x39: /* fxchg7 sti, undocumented op */
4587 gen_op_fxchg_ST0_STN(opreg);
4588 break;
4589 case 0x0a: /* grp d9/2 */
4590 switch(rm) {
4591 case 0: /* fnop */
4592 /* check exceptions (FreeBSD FPU probe) */
4593 if (s->cc_op != CC_OP_DYNAMIC)
4594 gen_op_set_cc_op(s->cc_op);
4595 gen_jmp_im(pc_start - s->cs_base);
4596 gen_op_fwait();
4597 break;
4598 default:
4599 goto illegal_op;
4600 }
4601 break;
4602 case 0x0c: /* grp d9/4 */
4603 switch(rm) {
4604 case 0: /* fchs */
4605 gen_op_fchs_ST0();
4606 break;
4607 case 1: /* fabs */
4608 gen_op_fabs_ST0();
4609 break;
4610 case 4: /* ftst */
4611 gen_op_fldz_FT0();
4612 gen_op_fcom_ST0_FT0();
4613 break;
4614 case 5: /* fxam */
4615 gen_op_fxam_ST0();
4616 break;
4617 default:
4618 goto illegal_op;
4619 }
4620 break;
4621 case 0x0d: /* grp d9/5 */
4622 {
4623 switch(rm) {
4624 case 0:
4625 gen_op_fpush();
4626 gen_op_fld1_ST0();
4627 break;
4628 case 1:
4629 gen_op_fpush();
4630 gen_op_fldl2t_ST0();
4631 break;
4632 case 2:
4633 gen_op_fpush();
4634 gen_op_fldl2e_ST0();
4635 break;
4636 case 3:
4637 gen_op_fpush();
4638 gen_op_fldpi_ST0();
4639 break;
4640 case 4:
4641 gen_op_fpush();
4642 gen_op_fldlg2_ST0();
4643 break;
4644 case 5:
4645 gen_op_fpush();
4646 gen_op_fldln2_ST0();
4647 break;
4648 case 6:
4649 gen_op_fpush();
4650 gen_op_fldz_ST0();
4651 break;
4652 default:
4653 goto illegal_op;
4654 }
4655 }
4656 break;
4657 case 0x0e: /* grp d9/6 */
4658 switch(rm) {
4659 case 0: /* f2xm1 */
4660 gen_op_f2xm1();
4661 break;
4662 case 1: /* fyl2x */
4663 gen_op_fyl2x();
4664 break;
4665 case 2: /* fptan */
4666 gen_op_fptan();
4667 break;
4668 case 3: /* fpatan */
4669 gen_op_fpatan();
4670 break;
4671 case 4: /* fxtract */
4672 gen_op_fxtract();
4673 break;
4674 case 5: /* fprem1 */
4675 gen_op_fprem1();
4676 break;
4677 case 6: /* fdecstp */
4678 gen_op_fdecstp();
4679 break;
4680 default:
4681 case 7: /* fincstp */
4682 gen_op_fincstp();
4683 break;
4684 }
4685 break;
4686 case 0x0f: /* grp d9/7 */
4687 switch(rm) {
4688 case 0: /* fprem */
4689 gen_op_fprem();
4690 break;
4691 case 1: /* fyl2xp1 */
4692 gen_op_fyl2xp1();
4693 break;
4694 case 2: /* fsqrt */
4695 gen_op_fsqrt();
4696 break;
4697 case 3: /* fsincos */
4698 gen_op_fsincos();
4699 break;
4700 case 5: /* fscale */
4701 gen_op_fscale();
4702 break;
4703 case 4: /* frndint */
4704 gen_op_frndint();
4705 break;
4706 case 6: /* fsin */
4707 gen_op_fsin();
4708 break;
4709 default:
4710 case 7: /* fcos */
4711 gen_op_fcos();
4712 break;
4713 }
4714 break;
4715 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4716 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4717 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4718 {
4719 int op1;
4720
4721 op1 = op & 7;
4722 if (op >= 0x20) {
4723 gen_op_fp_arith_STN_ST0[op1](opreg);
4724 if (op >= 0x30)
4725 gen_op_fpop();
4726 } else {
4727 gen_op_fmov_FT0_STN(opreg);
4728 gen_op_fp_arith_ST0_FT0[op1]();
4729 }
4730 }
4731 break;
4732 case 0x02: /* fcom */
4733 case 0x22: /* fcom2, undocumented op */
4734 gen_op_fmov_FT0_STN(opreg);
4735 gen_op_fcom_ST0_FT0();
4736 break;
4737 case 0x03: /* fcomp */
4738 case 0x23: /* fcomp3, undocumented op */
4739 case 0x32: /* fcomp5, undocumented op */
4740 gen_op_fmov_FT0_STN(opreg);
4741 gen_op_fcom_ST0_FT0();
4742 gen_op_fpop();
4743 break;
4744 case 0x15: /* da/5 */
4745 switch(rm) {
4746 case 1: /* fucompp */
4747 gen_op_fmov_FT0_STN(1);
4748 gen_op_fucom_ST0_FT0();
4749 gen_op_fpop();
4750 gen_op_fpop();
4751 break;
4752 default:
4753 goto illegal_op;
4754 }
4755 break;
4756 case 0x1c:
4757 switch(rm) {
4758 case 0: /* feni (287 only, just do nop here) */
4759 break;
4760 case 1: /* fdisi (287 only, just do nop here) */
4761 break;
4762 case 2: /* fclex */
4763 gen_op_fclex();
4764 break;
4765 case 3: /* fninit */
4766 gen_op_fninit();
4767 break;
4768 case 4: /* fsetpm (287 only, just do nop here) */
4769 break;
4770 default:
4771 goto illegal_op;
4772 }
4773 break;
4774 case 0x1d: /* fucomi */
4775 if (s->cc_op != CC_OP_DYNAMIC)
4776 gen_op_set_cc_op(s->cc_op);
4777 gen_op_fmov_FT0_STN(opreg);
4778 gen_op_fucomi_ST0_FT0();
4779 s->cc_op = CC_OP_EFLAGS;
4780 break;
4781 case 0x1e: /* fcomi */
4782 if (s->cc_op != CC_OP_DYNAMIC)
4783 gen_op_set_cc_op(s->cc_op);
4784 gen_op_fmov_FT0_STN(opreg);
4785 gen_op_fcomi_ST0_FT0();
4786 s->cc_op = CC_OP_EFLAGS;
4787 break;
4788 case 0x28: /* ffree sti */
4789 gen_op_ffree_STN(opreg);
4790 break;
4791 case 0x2a: /* fst sti */
4792 gen_op_fmov_STN_ST0(opreg);
4793 break;
4794 case 0x2b: /* fstp sti */
4795 case 0x0b: /* fstp1 sti, undocumented op */
4796 case 0x3a: /* fstp8 sti, undocumented op */
4797 case 0x3b: /* fstp9 sti, undocumented op */
4798 gen_op_fmov_STN_ST0(opreg);
4799 gen_op_fpop();
4800 break;
4801 case 0x2c: /* fucom st(i) */
4802 gen_op_fmov_FT0_STN(opreg);
4803 gen_op_fucom_ST0_FT0();
4804 break;
4805 case 0x2d: /* fucomp st(i) */
4806 gen_op_fmov_FT0_STN(opreg);
4807 gen_op_fucom_ST0_FT0();
4808 gen_op_fpop();
4809 break;
4810 case 0x33: /* de/3 */
4811 switch(rm) {
4812 case 1: /* fcompp */
4813 gen_op_fmov_FT0_STN(1);
4814 gen_op_fcom_ST0_FT0();
4815 gen_op_fpop();
4816 gen_op_fpop();
4817 break;
4818 default:
4819 goto illegal_op;
4820 }
4821 break;
4822 case 0x38: /* ffreep sti, undocumented op */
4823 gen_op_ffree_STN(opreg);
4824 gen_op_fpop();
4825 break;
4826 case 0x3c: /* df/4 */
4827 switch(rm) {
4828 case 0:
4829 gen_op_fnstsw_EAX();
4830 break;
4831 default:
4832 goto illegal_op;
4833 }
4834 break;
4835 case 0x3d: /* fucomip */
4836 if (s->cc_op != CC_OP_DYNAMIC)
4837 gen_op_set_cc_op(s->cc_op);
4838 gen_op_fmov_FT0_STN(opreg);
4839 gen_op_fucomi_ST0_FT0();
4840 gen_op_fpop();
4841 s->cc_op = CC_OP_EFLAGS;
4842 break;
4843 case 0x3e: /* fcomip */
4844 if (s->cc_op != CC_OP_DYNAMIC)
4845 gen_op_set_cc_op(s->cc_op);
4846 gen_op_fmov_FT0_STN(opreg);
4847 gen_op_fcomi_ST0_FT0();
4848 gen_op_fpop();
4849 s->cc_op = CC_OP_EFLAGS;
4850 break;
4851 case 0x10 ... 0x13: /* fcmovxx */
4852 case 0x18 ... 0x1b:
4853 {
4854 int op1;
4855 const static uint8_t fcmov_cc[8] = {
4856 (JCC_B << 1),
4857 (JCC_Z << 1),
4858 (JCC_BE << 1),
4859 (JCC_P << 1),
4860 };
4861 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4862 gen_setcc(s, op1);
4863 gen_op_fcmov_ST0_STN_T0(opreg);
4864 }
4865 break;
4866 default:
4867 goto illegal_op;
4868 }
4869 }
4870#ifdef USE_CODE_COPY
4871 s->tb->cflags |= CF_TB_FP_USED;
4872#endif
4873 break;
4874 /************************/
4875 /* string ops */
4876
4877 case 0xa4: /* movsS */
4878 case 0xa5:
4879 if ((b & 1) == 0)
4880 ot = OT_BYTE;
4881 else
4882 ot = dflag + OT_WORD;
4883
4884 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4885 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4886 } else {
4887 gen_movs(s, ot);
4888 }
4889 break;
4890
4891 case 0xaa: /* stosS */
4892 case 0xab:
4893 if ((b & 1) == 0)
4894 ot = OT_BYTE;
4895 else
4896 ot = dflag + OT_WORD;
4897
4898 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4899 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4900 } else {
4901 gen_stos(s, ot);
4902 }
4903 break;
4904 case 0xac: /* lodsS */
4905 case 0xad:
4906 if ((b & 1) == 0)
4907 ot = OT_BYTE;
4908 else
4909 ot = dflag + OT_WORD;
4910 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4911 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4912 } else {
4913 gen_lods(s, ot);
4914 }
4915 break;
4916 case 0xae: /* scasS */
4917 case 0xaf:
4918 if ((b & 1) == 0)
4919 ot = OT_BYTE;
4920 else
4921 ot = dflag + OT_WORD;
4922 if (prefixes & PREFIX_REPNZ) {
4923 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4924 } else if (prefixes & PREFIX_REPZ) {
4925 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4926 } else {
4927 gen_scas(s, ot);
4928 s->cc_op = CC_OP_SUBB + ot;
4929 }
4930 break;
4931
4932 case 0xa6: /* cmpsS */
4933 case 0xa7:
4934 if ((b & 1) == 0)
4935 ot = OT_BYTE;
4936 else
4937 ot = dflag + OT_WORD;
4938 if (prefixes & PREFIX_REPNZ) {
4939 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4940 } else if (prefixes & PREFIX_REPZ) {
4941 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4942 } else {
4943 gen_cmps(s, ot);
4944 s->cc_op = CC_OP_SUBB + ot;
4945 }
4946 break;
4947 case 0x6c: /* insS */
4948 case 0x6d:
4949 if ((b & 1) == 0)
4950 ot = OT_BYTE;
4951 else
4952 ot = dflag ? OT_LONG : OT_WORD;
4953 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4954 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4955 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4956 } else {
4957 gen_ins(s, ot);
4958 }
4959 break;
4960 case 0x6e: /* outsS */
4961 case 0x6f:
4962 if ((b & 1) == 0)
4963 ot = OT_BYTE;
4964 else
4965 ot = dflag ? OT_LONG : OT_WORD;
4966 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4967 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4968 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4969 } else {
4970 gen_outs(s, ot);
4971 }
4972 break;
4973
4974 /************************/
4975 /* port I/O */
4976 case 0xe4:
4977 case 0xe5:
4978 if ((b & 1) == 0)
4979 ot = OT_BYTE;
4980 else
4981 ot = dflag ? OT_LONG : OT_WORD;
4982 val = ldub_code(s->pc++);
4983 gen_op_movl_T0_im(val);
4984 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4985 gen_op_in[ot]();
4986 gen_op_mov_reg_T1[ot][R_EAX]();
4987 break;
4988 case 0xe6:
4989 case 0xe7:
4990 if ((b & 1) == 0)
4991 ot = OT_BYTE;
4992 else
4993 ot = dflag ? OT_LONG : OT_WORD;
4994 val = ldub_code(s->pc++);
4995 gen_op_movl_T0_im(val);
4996 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4997#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
4998 if (val == 0x80)
4999 break;
5000#endif /* VBOX */
5001 gen_op_mov_TN_reg[ot][1][R_EAX]();
5002 gen_op_out[ot]();
5003 break;
5004 case 0xec:
5005 case 0xed:
5006 if ((b & 1) == 0)
5007 ot = OT_BYTE;
5008 else
5009 ot = dflag ? OT_LONG : OT_WORD;
5010 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
5011 gen_op_andl_T0_ffff();
5012 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5013 gen_op_in[ot]();
5014 gen_op_mov_reg_T1[ot][R_EAX]();
5015 break;
5016 case 0xee:
5017 case 0xef:
5018 if ((b & 1) == 0)
5019 ot = OT_BYTE;
5020 else
5021 ot = dflag ? OT_LONG : OT_WORD;
5022 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
5023 gen_op_andl_T0_ffff();
5024 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5025 gen_op_mov_TN_reg[ot][1][R_EAX]();
5026 gen_op_out[ot]();
5027 break;
5028
5029 /************************/
5030 /* control */
5031 case 0xc2: /* ret im */
5032 val = ldsw_code(s->pc);
5033 s->pc += 2;
5034 gen_pop_T0(s);
5035 if (CODE64(s) && s->dflag)
5036 s->dflag = 2;
5037 gen_stack_update(s, val + (2 << s->dflag));
5038 if (s->dflag == 0)
5039 gen_op_andl_T0_ffff();
5040 gen_op_jmp_T0();
5041 gen_eob(s);
5042 break;
5043 case 0xc3: /* ret */
5044 gen_pop_T0(s);
5045 gen_pop_update(s);
5046 if (s->dflag == 0)
5047 gen_op_andl_T0_ffff();
5048 gen_op_jmp_T0();
5049 gen_eob(s);
5050 break;
5051 case 0xca: /* lret im */
5052 val = ldsw_code(s->pc);
5053 s->pc += 2;
5054 do_lret:
5055 if (s->pe && !s->vm86) {
5056 if (s->cc_op != CC_OP_DYNAMIC)
5057 gen_op_set_cc_op(s->cc_op);
5058 gen_jmp_im(pc_start - s->cs_base);
5059 gen_op_lret_protected(s->dflag, val);
5060 } else {
5061 gen_stack_A0(s);
5062 /* pop offset */
5063 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
5064 if (s->dflag == 0)
5065 gen_op_andl_T0_ffff();
5066 /* NOTE: keeping EIP updated is not a problem in case of
5067 exception */
5068 gen_op_jmp_T0();
5069 /* pop selector */
5070 gen_op_addl_A0_im(2 << s->dflag);
5071 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
5072 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5073 /* add stack offset */
5074 gen_stack_update(s, val + (4 << s->dflag));
5075 }
5076 gen_eob(s);
5077 break;
5078 case 0xcb: /* lret */
5079 val = 0;
5080 goto do_lret;
5081 case 0xcf: /* iret */
5082 if (!s->pe) {
5083 /* real mode */
5084 gen_op_iret_real(s->dflag);
5085 s->cc_op = CC_OP_EFLAGS;
5086 } else if (s->vm86) {
5087#ifdef VBOX
5088 if (s->iopl != 3 && (!s->vme || s->dflag)) {
5089#else
5090 if (s->iopl != 3) {
5091#endif
5092 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5093 } else {
5094 gen_op_iret_real(s->dflag);
5095 s->cc_op = CC_OP_EFLAGS;
5096 }
5097 } else {
5098 if (s->cc_op != CC_OP_DYNAMIC)
5099 gen_op_set_cc_op(s->cc_op);
5100 gen_jmp_im(pc_start - s->cs_base);
5101 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5102 s->cc_op = CC_OP_EFLAGS;
5103 }
5104 gen_eob(s);
5105 break;
5106 case 0xe8: /* call im */
5107 {
5108 if (dflag)
5109 tval = (int32_t)insn_get(s, OT_LONG);
5110 else
5111 tval = (int16_t)insn_get(s, OT_WORD);
5112 next_eip = s->pc - s->cs_base;
5113 tval += next_eip;
5114 if (s->dflag == 0)
5115 tval &= 0xffff;
5116 gen_movtl_T0_im(next_eip);
5117 gen_push_T0(s);
5118 gen_jmp(s, tval);
5119 }
5120 break;
5121 case 0x9a: /* lcall im */
5122 {
5123 unsigned int selector, offset;
5124
5125 if (CODE64(s))
5126 goto illegal_op;
5127 ot = dflag ? OT_LONG : OT_WORD;
5128 offset = insn_get(s, ot);
5129 selector = insn_get(s, OT_WORD);
5130
5131 gen_op_movl_T0_im(selector);
5132 gen_op_movl_T1_imu(offset);
5133 }
5134 goto do_lcall;
5135 case 0xe9: /* jmp im */
5136 if (dflag)
5137 tval = (int32_t)insn_get(s, OT_LONG);
5138 else
5139 tval = (int16_t)insn_get(s, OT_WORD);
5140 tval += s->pc - s->cs_base;
5141 if (s->dflag == 0)
5142 tval &= 0xffff;
5143 gen_jmp(s, tval);
5144 break;
5145 case 0xea: /* ljmp im */
5146 {
5147 unsigned int selector, offset;
5148
5149 if (CODE64(s))
5150 goto illegal_op;
5151 ot = dflag ? OT_LONG : OT_WORD;
5152 offset = insn_get(s, ot);
5153 selector = insn_get(s, OT_WORD);
5154
5155 gen_op_movl_T0_im(selector);
5156 gen_op_movl_T1_imu(offset);
5157 }
5158 goto do_ljmp;
5159 case 0xeb: /* jmp Jb */
5160 tval = (int8_t)insn_get(s, OT_BYTE);
5161 tval += s->pc - s->cs_base;
5162 if (s->dflag == 0)
5163 tval &= 0xffff;
5164 gen_jmp(s, tval);
5165 break;
5166 case 0x70 ... 0x7f: /* jcc Jb */
5167 tval = (int8_t)insn_get(s, OT_BYTE);
5168 goto do_jcc;
5169 case 0x180 ... 0x18f: /* jcc Jv */
5170 if (dflag) {
5171 tval = (int32_t)insn_get(s, OT_LONG);
5172 } else {
5173 tval = (int16_t)insn_get(s, OT_WORD);
5174 }
5175 do_jcc:
5176 next_eip = s->pc - s->cs_base;
5177 tval += next_eip;
5178 if (s->dflag == 0)
5179 tval &= 0xffff;
5180 gen_jcc(s, b, tval, next_eip);
5181 break;
5182
5183 case 0x190 ... 0x19f: /* setcc Gv */
5184 modrm = ldub_code(s->pc++);
5185 gen_setcc(s, b);
5186 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5187 break;
5188 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5189 ot = dflag + OT_WORD;
5190 modrm = ldub_code(s->pc++);
5191 reg = ((modrm >> 3) & 7) | rex_r;
5192 mod = (modrm >> 6) & 3;
5193 gen_setcc(s, b);
5194 if (mod != 3) {
5195 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5196 gen_op_ld_T1_A0[ot + s->mem_index]();
5197 } else {
5198 rm = (modrm & 7) | REX_B(s);
5199 gen_op_mov_TN_reg[ot][1][rm]();
5200 }
5201 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5202 break;
5203
5204 /************************/
5205 /* flags */
5206 case 0x9c: /* pushf */
5207#ifdef VBOX
5208 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
5209#else
5210 if (s->vm86 && s->iopl != 3) {
5211#endif
5212 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5213 } else {
5214 if (s->cc_op != CC_OP_DYNAMIC)
5215 gen_op_set_cc_op(s->cc_op);
5216#ifdef VBOX
5217 if (s->vm86 && s->vme && s->iopl != 3)
5218 gen_op_movl_T0_eflags_vme();
5219 else
5220#endif
5221 gen_op_movl_T0_eflags();
5222 gen_push_T0(s);
5223 }
5224 break;
5225 case 0x9d: /* popf */
5226#ifdef VBOX
5227 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
5228#else
5229 if (s->vm86 && s->iopl != 3) {
5230#endif
5231 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5232 } else {
5233 gen_pop_T0(s);
5234 if (s->cpl == 0) {
5235 if (s->dflag) {
5236 gen_op_movl_eflags_T0_cpl0();
5237 } else {
5238 gen_op_movw_eflags_T0_cpl0();
5239 }
5240 } else {
5241 if (s->cpl <= s->iopl) {
5242 if (s->dflag) {
5243 gen_op_movl_eflags_T0_io();
5244 } else {
5245 gen_op_movw_eflags_T0_io();
5246 }
5247 } else {
5248 if (s->dflag) {
5249 gen_op_movl_eflags_T0();
5250 } else {
5251#ifdef VBOX
5252 if (s->vm86 && s->vme)
5253 gen_op_movw_eflags_T0_vme();
5254 else
5255#endif
5256 gen_op_movw_eflags_T0();
5257 }
5258 }
5259 }
5260 gen_pop_update(s);
5261 s->cc_op = CC_OP_EFLAGS;
5262 /* abort translation because TF flag may change */
5263 gen_jmp_im(s->pc - s->cs_base);
5264 gen_eob(s);
5265 }
5266 break;
5267 case 0x9e: /* sahf */
5268 if (CODE64(s))
5269 goto illegal_op;
5270 gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
5271 if (s->cc_op != CC_OP_DYNAMIC)
5272 gen_op_set_cc_op(s->cc_op);
5273 gen_op_movb_eflags_T0();
5274 s->cc_op = CC_OP_EFLAGS;
5275 break;
5276 case 0x9f: /* lahf */
5277 if (CODE64(s))
5278 goto illegal_op;
5279 if (s->cc_op != CC_OP_DYNAMIC)
5280 gen_op_set_cc_op(s->cc_op);
5281 gen_op_movl_T0_eflags();
5282 gen_op_mov_reg_T0[OT_BYTE][R_AH]();
5283 break;
5284 case 0xf5: /* cmc */
5285 if (s->cc_op != CC_OP_DYNAMIC)
5286 gen_op_set_cc_op(s->cc_op);
5287 gen_op_cmc();
5288 s->cc_op = CC_OP_EFLAGS;
5289 break;
5290 case 0xf8: /* clc */
5291 if (s->cc_op != CC_OP_DYNAMIC)
5292 gen_op_set_cc_op(s->cc_op);
5293 gen_op_clc();
5294 s->cc_op = CC_OP_EFLAGS;
5295 break;
5296 case 0xf9: /* stc */
5297 if (s->cc_op != CC_OP_DYNAMIC)
5298 gen_op_set_cc_op(s->cc_op);
5299 gen_op_stc();
5300 s->cc_op = CC_OP_EFLAGS;
5301 break;
5302 case 0xfc: /* cld */
5303 gen_op_cld();
5304 break;
5305 case 0xfd: /* std */
5306 gen_op_std();
5307 break;
5308
5309 /************************/
5310 /* bit operations */
5311 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5312 ot = dflag + OT_WORD;
5313 modrm = ldub_code(s->pc++);
5314 op = (modrm >> 3) & 7;
5315 mod = (modrm >> 6) & 3;
5316 rm = (modrm & 7) | REX_B(s);
5317 if (mod != 3) {
5318 s->rip_offset = 1;
5319 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5320 gen_op_ld_T0_A0[ot + s->mem_index]();
5321 } else {
5322 gen_op_mov_TN_reg[ot][0][rm]();
5323 }
5324 /* load shift */
5325 val = ldub_code(s->pc++);
5326 gen_op_movl_T1_im(val);
5327 if (op < 4)
5328 goto illegal_op;
5329 op -= 4;
5330 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5331 s->cc_op = CC_OP_SARB + ot;
5332 if (op != 0) {
5333 if (mod != 3)
5334 gen_op_st_T0_A0[ot + s->mem_index]();
5335 else
5336 gen_op_mov_reg_T0[ot][rm]();
5337 gen_op_update_bt_cc();
5338 }
5339 break;
5340 case 0x1a3: /* bt Gv, Ev */
5341 op = 0;
5342 goto do_btx;
5343 case 0x1ab: /* bts */
5344 op = 1;
5345 goto do_btx;
5346 case 0x1b3: /* btr */
5347 op = 2;
5348 goto do_btx;
5349 case 0x1bb: /* btc */
5350 op = 3;
5351 do_btx:
5352 ot = dflag + OT_WORD;
5353 modrm = ldub_code(s->pc++);
5354 reg = ((modrm >> 3) & 7) | rex_r;
5355 mod = (modrm >> 6) & 3;
5356 rm = (modrm & 7) | REX_B(s);
5357 gen_op_mov_TN_reg[OT_LONG][1][reg]();
5358 if (mod != 3) {
5359 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5360 /* specific case: we need to add a displacement */
5361 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5362 gen_op_ld_T0_A0[ot + s->mem_index]();
5363 } else {
5364 gen_op_mov_TN_reg[ot][0][rm]();
5365 }
5366 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5367 s->cc_op = CC_OP_SARB + ot;
5368 if (op != 0) {
5369 if (mod != 3)
5370 gen_op_st_T0_A0[ot + s->mem_index]();
5371 else
5372 gen_op_mov_reg_T0[ot][rm]();
5373 gen_op_update_bt_cc();
5374 }
5375 break;
5376 case 0x1bc: /* bsf */
5377 case 0x1bd: /* bsr */
5378 ot = dflag + OT_WORD;
5379 modrm = ldub_code(s->pc++);
5380 reg = ((modrm >> 3) & 7) | rex_r;
5381 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5382 /* NOTE: in order to handle the 0 case, we must load the
5383 result. It could be optimized with a generated jump */
5384 gen_op_mov_TN_reg[ot][1][reg]();
5385 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5386 gen_op_mov_reg_T1[ot][reg]();
5387 s->cc_op = CC_OP_LOGICB + ot;
5388 break;
5389 /************************/
5390 /* bcd */
5391 case 0x27: /* daa */
5392 if (CODE64(s))
5393 goto illegal_op;
5394 if (s->cc_op != CC_OP_DYNAMIC)
5395 gen_op_set_cc_op(s->cc_op);
5396 gen_op_daa();
5397 s->cc_op = CC_OP_EFLAGS;
5398 break;
5399 case 0x2f: /* das */
5400 if (CODE64(s))
5401 goto illegal_op;
5402 if (s->cc_op != CC_OP_DYNAMIC)
5403 gen_op_set_cc_op(s->cc_op);
5404 gen_op_das();
5405 s->cc_op = CC_OP_EFLAGS;
5406 break;
5407 case 0x37: /* aaa */
5408 if (CODE64(s))
5409 goto illegal_op;
5410 if (s->cc_op != CC_OP_DYNAMIC)
5411 gen_op_set_cc_op(s->cc_op);
5412 gen_op_aaa();
5413 s->cc_op = CC_OP_EFLAGS;
5414 break;
5415 case 0x3f: /* aas */
5416 if (CODE64(s))
5417 goto illegal_op;
5418 if (s->cc_op != CC_OP_DYNAMIC)
5419 gen_op_set_cc_op(s->cc_op);
5420 gen_op_aas();
5421 s->cc_op = CC_OP_EFLAGS;
5422 break;
5423 case 0xd4: /* aam */
5424 if (CODE64(s))
5425 goto illegal_op;
5426 val = ldub_code(s->pc++);
5427 if (val == 0) {
5428 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5429 } else {
5430 gen_op_aam(val);
5431 s->cc_op = CC_OP_LOGICB;
5432 }
5433 break;
5434 case 0xd5: /* aad */
5435 if (CODE64(s))
5436 goto illegal_op;
5437 val = ldub_code(s->pc++);
5438 gen_op_aad(val);
5439 s->cc_op = CC_OP_LOGICB;
5440 break;
5441 /************************/
5442 /* misc */
5443 case 0x90: /* nop */
5444 /* XXX: xchg + rex handling */
5445 /* XXX: correct lock test for all insn */
5446 if (prefixes & PREFIX_LOCK)
5447 goto illegal_op;
5448 break;
5449 case 0x9b: /* fwait */
5450 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5451 (HF_MP_MASK | HF_TS_MASK)) {
5452 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5453 } else {
5454 if (s->cc_op != CC_OP_DYNAMIC)
5455 gen_op_set_cc_op(s->cc_op);
5456 gen_jmp_im(pc_start - s->cs_base);
5457 gen_op_fwait();
5458 }
5459 break;
5460 case 0xcc: /* int3 */
5461#ifdef VBOX
5462 if (s->vm86 && s->iopl != 3 && !s->vme) {
5463 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5464 } else
5465#endif
5466 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5467 break;
5468 case 0xcd: /* int N */
5469 val = ldub_code(s->pc++);
5470#ifdef VBOX
5471 if (s->vm86 && s->iopl != 3 && !s->vme) {
5472#else
5473 if (s->vm86 && s->iopl != 3) {
5474#endif
5475 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5476 } else {
5477 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5478 }
5479 break;
5480 case 0xce: /* into */
5481 if (CODE64(s))
5482 goto illegal_op;
5483 if (s->cc_op != CC_OP_DYNAMIC)
5484 gen_op_set_cc_op(s->cc_op);
5485 gen_jmp_im(pc_start - s->cs_base);
5486 gen_op_into(s->pc - pc_start);
5487 break;
5488 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5489#if 1
5490 gen_debug(s, pc_start - s->cs_base);
5491#else
5492 /* start debug */
5493 tb_flush(cpu_single_env);
5494 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5495#endif
5496 break;
5497 case 0xfa: /* cli */
5498 if (!s->vm86) {
5499 if (s->cpl <= s->iopl) {
5500 gen_op_cli();
5501 } else {
5502 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5503 }
5504 } else {
5505 if (s->iopl == 3) {
5506 gen_op_cli();
5507#ifdef VBOX
5508 } else if (s->iopl != 3 && s->vme) {
5509 gen_op_cli_vme();
5510#endif
5511 } else {
5512 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5513 }
5514 }
5515 break;
5516 case 0xfb: /* sti */
5517 if (!s->vm86) {
5518 if (s->cpl <= s->iopl) {
5519 gen_sti:
5520 gen_op_sti();
5521 /* interruptions are enabled only the first insn after sti */
5522 /* If several instructions disable interrupts, only the
5523 _first_ does it */
5524 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5525 gen_op_set_inhibit_irq();
5526 /* give a chance to handle pending irqs */
5527 gen_jmp_im(s->pc - s->cs_base);
5528 gen_eob(s);
5529 } else {
5530 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5531 }
5532 } else {
5533 if (s->iopl == 3) {
5534 goto gen_sti;
5535#ifdef VBOX
5536 } else if (s->iopl != 3 && s->vme) {
5537 gen_op_sti_vme();
5538 /* give a chance to handle pending irqs */
5539 gen_jmp_im(s->pc - s->cs_base);
5540 gen_eob(s);
5541#endif
5542 } else {
5543 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5544 }
5545 }
5546 break;
5547 case 0x62: /* bound */
5548 if (CODE64(s))
5549 goto illegal_op;
5550 ot = dflag ? OT_LONG : OT_WORD;
5551 modrm = ldub_code(s->pc++);
5552 reg = (modrm >> 3) & 7;
5553 mod = (modrm >> 6) & 3;
5554 if (mod == 3)
5555 goto illegal_op;
5556 gen_op_mov_TN_reg[ot][0][reg]();
5557 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5558 gen_jmp_im(pc_start - s->cs_base);
5559 if (ot == OT_WORD)
5560 gen_op_boundw();
5561 else
5562 gen_op_boundl();
5563 break;
5564 case 0x1c8 ... 0x1cf: /* bswap reg */
5565 reg = (b & 7) | REX_B(s);
5566#ifdef TARGET_X86_64
5567 if (dflag == 2) {
5568 gen_op_mov_TN_reg[OT_QUAD][0][reg]();
5569 gen_op_bswapq_T0();
5570 gen_op_mov_reg_T0[OT_QUAD][reg]();
5571 } else
5572#endif
5573 {
5574 gen_op_mov_TN_reg[OT_LONG][0][reg]();
5575 gen_op_bswapl_T0();
5576 gen_op_mov_reg_T0[OT_LONG][reg]();
5577 }
5578 break;
5579 case 0xd6: /* salc */
5580 if (CODE64(s))
5581 goto illegal_op;
5582 if (s->cc_op != CC_OP_DYNAMIC)
5583 gen_op_set_cc_op(s->cc_op);
5584 gen_op_salc();
5585 break;
5586 case 0xe0: /* loopnz */
5587 case 0xe1: /* loopz */
5588 if (s->cc_op != CC_OP_DYNAMIC)
5589 gen_op_set_cc_op(s->cc_op);
5590 /* FALL THRU */
5591 case 0xe2: /* loop */
5592 case 0xe3: /* jecxz */
5593 {
5594 int l1, l2;
5595
5596 tval = (int8_t)insn_get(s, OT_BYTE);
5597 next_eip = s->pc - s->cs_base;
5598 tval += next_eip;
5599 if (s->dflag == 0)
5600 tval &= 0xffff;
5601
5602 l1 = gen_new_label();
5603 l2 = gen_new_label();
5604 b &= 3;
5605 if (b == 3) {
5606 gen_op_jz_ecx[s->aflag](l1);
5607 } else {
5608 gen_op_dec_ECX[s->aflag]();
5609 if (b <= 1)
5610 gen_op_mov_T0_cc();
5611 gen_op_loop[s->aflag][b](l1);
5612 }
5613
5614 gen_jmp_im(next_eip);
5615 gen_op_jmp_label(l2);
5616 gen_set_label(l1);
5617 gen_jmp_im(tval);
5618 gen_set_label(l2);
5619 gen_eob(s);
5620 }
5621 break;
5622 case 0x130: /* wrmsr */
5623 case 0x132: /* rdmsr */
5624 if (s->cpl != 0) {
5625 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5626 } else {
5627 if (b & 2)
5628 gen_op_rdmsr();
5629 else
5630 gen_op_wrmsr();
5631 }
5632 break;
5633 case 0x131: /* rdtsc */
5634 gen_jmp_im(pc_start - s->cs_base);
5635 gen_op_rdtsc();
5636 break;
5637 case 0x134: /* sysenter */
5638 if (CODE64(s))
5639 goto illegal_op;
5640 if (!s->pe) {
5641 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5642 } else {
5643 if (s->cc_op != CC_OP_DYNAMIC) {
5644 gen_op_set_cc_op(s->cc_op);
5645 s->cc_op = CC_OP_DYNAMIC;
5646 }
5647 gen_jmp_im(pc_start - s->cs_base);
5648 gen_op_sysenter();
5649 gen_eob(s);
5650 }
5651 break;
5652 case 0x135: /* sysexit */
5653 if (CODE64(s))
5654 goto illegal_op;
5655 if (!s->pe) {
5656 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5657 } else {
5658 if (s->cc_op != CC_OP_DYNAMIC) {
5659 gen_op_set_cc_op(s->cc_op);
5660 s->cc_op = CC_OP_DYNAMIC;
5661 }
5662 gen_jmp_im(pc_start - s->cs_base);
5663 gen_op_sysexit();
5664 gen_eob(s);
5665 }
5666 break;
5667#ifdef TARGET_X86_64
5668 case 0x105: /* syscall */
5669 /* XXX: is it usable in real mode ? */
5670 if (s->cc_op != CC_OP_DYNAMIC) {
5671 gen_op_set_cc_op(s->cc_op);
5672 s->cc_op = CC_OP_DYNAMIC;
5673 }
5674 gen_jmp_im(pc_start - s->cs_base);
5675 gen_op_syscall(s->pc - pc_start);
5676 gen_eob(s);
5677 break;
5678 case 0x107: /* sysret */
5679 if (!s->pe) {
5680 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5681 } else {
5682 if (s->cc_op != CC_OP_DYNAMIC) {
5683 gen_op_set_cc_op(s->cc_op);
5684 s->cc_op = CC_OP_DYNAMIC;
5685 }
5686 gen_jmp_im(pc_start - s->cs_base);
5687 gen_op_sysret(s->dflag);
5688 /* condition codes are modified only in long mode */
5689 if (s->lma)
5690 s->cc_op = CC_OP_EFLAGS;
5691 gen_eob(s);
5692 }
5693 break;
5694#endif
5695 case 0x1a2: /* cpuid */
5696 gen_op_cpuid();
5697 break;
5698 case 0xf4: /* hlt */
5699 if (s->cpl != 0) {
5700 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5701 } else {
5702 if (s->cc_op != CC_OP_DYNAMIC)
5703 gen_op_set_cc_op(s->cc_op);
5704 gen_jmp_im(s->pc - s->cs_base);
5705 gen_op_hlt();
5706 s->is_jmp = 3;
5707 }
5708 break;
5709 case 0x100:
5710 modrm = ldub_code(s->pc++);
5711 mod = (modrm >> 6) & 3;
5712 op = (modrm >> 3) & 7;
5713 switch(op) {
5714 case 0: /* sldt */
5715 if (!s->pe || s->vm86)
5716 goto illegal_op;
5717 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5718 ot = OT_WORD;
5719 if (mod == 3)
5720 ot += s->dflag;
5721 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5722 break;
5723 case 2: /* lldt */
5724 if (!s->pe || s->vm86)
5725 goto illegal_op;
5726 if (s->cpl != 0) {
5727 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5728 } else {
5729 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5730 gen_jmp_im(pc_start - s->cs_base);
5731 gen_op_lldt_T0();
5732 }
5733 break;
5734 case 1: /* str */
5735 if (!s->pe || s->vm86)
5736 goto illegal_op;
5737 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5738 ot = OT_WORD;
5739 if (mod == 3)
5740 ot += s->dflag;
5741 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5742 break;
5743 case 3: /* ltr */
5744 if (!s->pe || s->vm86)
5745 goto illegal_op;
5746 if (s->cpl != 0) {
5747 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5748 } else {
5749 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5750 gen_jmp_im(pc_start - s->cs_base);
5751 gen_op_ltr_T0();
5752 }
5753 break;
5754 case 4: /* verr */
5755 case 5: /* verw */
5756 if (!s->pe || s->vm86)
5757 goto illegal_op;
5758 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5759 if (s->cc_op != CC_OP_DYNAMIC)
5760 gen_op_set_cc_op(s->cc_op);
5761 if (op == 4)
5762 gen_op_verr();
5763 else
5764 gen_op_verw();
5765 s->cc_op = CC_OP_EFLAGS;
5766 break;
5767 default:
5768 goto illegal_op;
5769 }
5770 break;
5771 case 0x101:
5772 modrm = ldub_code(s->pc++);
5773 mod = (modrm >> 6) & 3;
5774 op = (modrm >> 3) & 7;
5775 rm = modrm & 7;
5776 switch(op) {
5777 case 0: /* sgdt */
5778 if (mod == 3)
5779 goto illegal_op;
5780 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5781 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5782 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5783 gen_add_A0_im(s, 2);
5784 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5785 if (!s->dflag)
5786 gen_op_andl_T0_im(0xffffff);
5787 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5788 break;
5789 case 1:
5790 if (mod == 3) {
5791 switch (rm) {
5792 case 0: /* monitor */
5793 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5794 s->cpl != 0)
5795 goto illegal_op;
5796 gen_jmp_im(pc_start - s->cs_base);
5797#ifdef TARGET_X86_64
5798 if (s->aflag == 2) {
5799 gen_op_movq_A0_reg[R_EBX]();
5800 gen_op_addq_A0_AL();
5801 } else
5802#endif
5803 {
5804 gen_op_movl_A0_reg[R_EBX]();
5805 gen_op_addl_A0_AL();
5806 if (s->aflag == 0)
5807 gen_op_andl_A0_ffff();
5808 }
5809 gen_add_A0_ds_seg(s);
5810 gen_op_monitor();
5811 break;
5812 case 1: /* mwait */
5813 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5814 s->cpl != 0)
5815 goto illegal_op;
5816 if (s->cc_op != CC_OP_DYNAMIC) {
5817 gen_op_set_cc_op(s->cc_op);
5818 s->cc_op = CC_OP_DYNAMIC;
5819 }
5820 gen_jmp_im(s->pc - s->cs_base);
5821 gen_op_mwait();
5822 gen_eob(s);
5823 break;
5824 default:
5825 goto illegal_op;
5826 }
5827 } else { /* sidt */
5828 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5829 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5830 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5831 gen_add_A0_im(s, 2);
5832 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
5833 if (!s->dflag)
5834 gen_op_andl_T0_im(0xffffff);
5835 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5836 }
5837 break;
5838 case 2: /* lgdt */
5839 case 3: /* lidt */
5840 if (mod == 3)
5841 goto illegal_op;
5842 if (s->cpl != 0) {
5843 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5844 } else {
5845 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5846 gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
5847 gen_add_A0_im(s, 2);
5848 gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5849 if (!s->dflag)
5850 gen_op_andl_T0_im(0xffffff);
5851 if (op == 2) {
5852 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5853 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5854 } else {
5855 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5856 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5857 }
5858 }
5859 break;
5860 case 4: /* smsw */
5861 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5862 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5863 break;
5864 case 6: /* lmsw */
5865 if (s->cpl != 0) {
5866 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5867 } else {
5868 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5869 gen_op_lmsw_T0();
5870 gen_jmp_im(s->pc - s->cs_base);
5871 gen_eob(s);
5872 }
5873 break;
5874 case 7: /* invlpg */
5875 if (s->cpl != 0) {
5876 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5877 } else {
5878 if (mod == 3) {
5879#ifdef TARGET_X86_64
5880 if (CODE64(s) && rm == 0) {
5881 /* swapgs */
5882 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5883 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5884 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5885 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5886 } else
5887#endif
5888 {
5889 goto illegal_op;
5890 }
5891 } else {
5892 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5893 gen_op_invlpg_A0();
5894 gen_jmp_im(s->pc - s->cs_base);
5895 gen_eob(s);
5896 }
5897 }
5898 break;
5899 default:
5900 goto illegal_op;
5901 }
5902 break;
5903 case 0x108: /* invd */
5904 case 0x109: /* wbinvd */
5905 if (s->cpl != 0) {
5906 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5907 } else {
5908 /* nothing to do */
5909 }
5910 break;
5911 case 0x63: /* arpl or movslS (x86_64) */
5912#ifdef TARGET_X86_64
5913 if (CODE64(s)) {
5914 int d_ot;
5915 /* d_ot is the size of destination */
5916 d_ot = dflag + OT_WORD;
5917
5918 modrm = ldub_code(s->pc++);
5919 reg = ((modrm >> 3) & 7) | rex_r;
5920 mod = (modrm >> 6) & 3;
5921 rm = (modrm & 7) | REX_B(s);
5922
5923 if (mod == 3) {
5924 gen_op_mov_TN_reg[OT_LONG][0][rm]();
5925 /* sign extend */
5926 if (d_ot == OT_QUAD)
5927 gen_op_movslq_T0_T0();
5928 gen_op_mov_reg_T0[d_ot][reg]();
5929 } else {
5930 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5931 if (d_ot == OT_QUAD) {
5932 gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
5933 } else {
5934 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5935 }
5936 gen_op_mov_reg_T0[d_ot][reg]();
5937 }
5938 } else
5939#endif
5940 {
5941 if (!s->pe || s->vm86)
5942 goto illegal_op;
5943 ot = dflag ? OT_LONG : OT_WORD;
5944 modrm = ldub_code(s->pc++);
5945 reg = (modrm >> 3) & 7;
5946 mod = (modrm >> 6) & 3;
5947 rm = modrm & 7;
5948 if (mod != 3) {
5949 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5950 gen_op_ld_T0_A0[ot + s->mem_index]();
5951 } else {
5952 gen_op_mov_TN_reg[ot][0][rm]();
5953 }
5954 if (s->cc_op != CC_OP_DYNAMIC)
5955 gen_op_set_cc_op(s->cc_op);
5956 gen_op_arpl();
5957 s->cc_op = CC_OP_EFLAGS;
5958 if (mod != 3) {
5959 gen_op_st_T0_A0[ot + s->mem_index]();
5960 } else {
5961 gen_op_mov_reg_T0[ot][rm]();
5962 }
5963 gen_op_arpl_update();
5964 }
5965 break;
5966 case 0x102: /* lar */
5967 case 0x103: /* lsl */
5968 if (!s->pe || s->vm86)
5969 goto illegal_op;
5970 ot = dflag ? OT_LONG : OT_WORD;
5971 modrm = ldub_code(s->pc++);
5972 reg = ((modrm >> 3) & 7) | rex_r;
5973 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5974 gen_op_mov_TN_reg[ot][1][reg]();
5975 if (s->cc_op != CC_OP_DYNAMIC)
5976 gen_op_set_cc_op(s->cc_op);
5977 if (b == 0x102)
5978 gen_op_lar();
5979 else
5980 gen_op_lsl();
5981 s->cc_op = CC_OP_EFLAGS;
5982 gen_op_mov_reg_T1[ot][reg]();
5983 break;
5984 case 0x118:
5985 modrm = ldub_code(s->pc++);
5986 mod = (modrm >> 6) & 3;
5987 op = (modrm >> 3) & 7;
5988 switch(op) {
5989 case 0: /* prefetchnta */
5990 case 1: /* prefetchnt0 */
5991 case 2: /* prefetchnt0 */
5992 case 3: /* prefetchnt0 */
5993 if (mod == 3)
5994 goto illegal_op;
5995 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5996 /* nothing more to do */
5997 break;
5998 default: /* nop (multi byte) */
5999 gen_nop_modrm(s, modrm);
6000 break;
6001 }
6002 break;
6003 case 0x119 ... 0x11f: /* nop (multi byte) */
6004 modrm = ldub_code(s->pc++);
6005 gen_nop_modrm(s, modrm);
6006 break;
6007 case 0x120: /* mov reg, crN */
6008 case 0x122: /* mov crN, reg */
6009 if (s->cpl != 0) {
6010 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6011 } else {
6012 modrm = ldub_code(s->pc++);
6013 if ((modrm & 0xc0) != 0xc0)
6014 goto illegal_op;
6015 rm = (modrm & 7) | REX_B(s);
6016 reg = ((modrm >> 3) & 7) | rex_r;
6017 if (CODE64(s))
6018 ot = OT_QUAD;
6019 else
6020 ot = OT_LONG;
6021 switch(reg) {
6022 case 0:
6023 case 2:
6024 case 3:
6025 case 4:
6026 case 8:
6027 if (b & 2) {
6028 gen_op_mov_TN_reg[ot][0][rm]();
6029 gen_op_movl_crN_T0(reg);
6030 gen_jmp_im(s->pc - s->cs_base);
6031 gen_eob(s);
6032 } else {
6033#if !defined(CONFIG_USER_ONLY)
6034 if (reg == 8)
6035 gen_op_movtl_T0_cr8();
6036 else
6037#endif
6038 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6039 gen_op_mov_reg_T0[ot][rm]();
6040 }
6041 break;
6042 default:
6043 goto illegal_op;
6044 }
6045 }
6046 break;
6047 case 0x121: /* mov reg, drN */
6048 case 0x123: /* mov drN, reg */
6049 if (s->cpl != 0) {
6050 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6051 } else {
6052 modrm = ldub_code(s->pc++);
6053 if ((modrm & 0xc0) != 0xc0)
6054 goto illegal_op;
6055 rm = (modrm & 7) | REX_B(s);
6056 reg = ((modrm >> 3) & 7) | rex_r;
6057 if (CODE64(s))
6058 ot = OT_QUAD;
6059 else
6060 ot = OT_LONG;
6061 /* XXX: do it dynamically with CR4.DE bit */
6062 if (reg == 4 || reg == 5 || reg >= 8)
6063 goto illegal_op;
6064 if (b & 2) {
6065 gen_op_mov_TN_reg[ot][0][rm]();
6066 gen_op_movl_drN_T0(reg);
6067 gen_jmp_im(s->pc - s->cs_base);
6068 gen_eob(s);
6069 } else {
6070 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6071 gen_op_mov_reg_T0[ot][rm]();
6072 }
6073 }
6074 break;
6075 case 0x106: /* clts */
6076 if (s->cpl != 0) {
6077 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6078 } else {
6079 gen_op_clts();
6080 /* abort block because static cpu state changed */
6081 gen_jmp_im(s->pc - s->cs_base);
6082 gen_eob(s);
6083 }
6084 break;
6085 /* MMX/SSE/SSE2/PNI support */
6086 case 0x1c3: /* MOVNTI reg, mem */
6087 if (!(s->cpuid_features & CPUID_SSE2))
6088 goto illegal_op;
6089 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6090 modrm = ldub_code(s->pc++);
6091 mod = (modrm >> 6) & 3;
6092 if (mod == 3)
6093 goto illegal_op;
6094 reg = ((modrm >> 3) & 7) | rex_r;
6095 /* generate a generic store */
6096 gen_ldst_modrm(s, modrm, ot, reg, 1);
6097 break;
6098 case 0x1ae:
6099 modrm = ldub_code(s->pc++);
6100 mod = (modrm >> 6) & 3;
6101 op = (modrm >> 3) & 7;
6102 switch(op) {
6103 case 0: /* fxsave */
6104 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6105 (s->flags & HF_EM_MASK))
6106 goto illegal_op;
6107 if (s->flags & HF_TS_MASK) {
6108 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6109 break;
6110 }
6111 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6112 gen_op_fxsave_A0((s->dflag == 2));
6113 break;
6114 case 1: /* fxrstor */
6115 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6116 (s->flags & HF_EM_MASK))
6117 goto illegal_op;
6118 if (s->flags & HF_TS_MASK) {
6119 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6120 break;
6121 }
6122 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6123 gen_op_fxrstor_A0((s->dflag == 2));
6124 break;
6125 case 2: /* ldmxcsr */
6126 case 3: /* stmxcsr */
6127 if (s->flags & HF_TS_MASK) {
6128 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6129 break;
6130 }
6131 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6132 mod == 3)
6133 goto illegal_op;
6134 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6135 if (op == 2) {
6136 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
6137 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6138 } else {
6139 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6140 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
6141 }
6142 break;
6143 case 5: /* lfence */
6144 case 6: /* mfence */
6145 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6146 goto illegal_op;
6147 break;
6148 case 7: /* sfence / clflush */
6149 if ((modrm & 0xc7) == 0xc0) {
6150 /* sfence */
6151 if (!(s->cpuid_features & CPUID_SSE))
6152 goto illegal_op;
6153 } else {
6154 /* clflush */
6155 if (!(s->cpuid_features & CPUID_CLFLUSH))
6156 goto illegal_op;
6157 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6158 }
6159 break;
6160 default:
6161 goto illegal_op;
6162 }
6163 break;
6164 case 0x10d: /* prefetch */
6165 modrm = ldub_code(s->pc++);
6166 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6167 /* ignore for now */
6168 break;
6169 case 0x1aa: /* rsm */
6170 if (!(s->flags & HF_SMM_MASK))
6171 goto illegal_op;
6172 if (s->cc_op != CC_OP_DYNAMIC) {
6173 gen_op_set_cc_op(s->cc_op);
6174 s->cc_op = CC_OP_DYNAMIC;
6175 }
6176 gen_jmp_im(s->pc - s->cs_base);
6177 gen_op_rsm();
6178 gen_eob(s);
6179 break;
6180 case 0x110 ... 0x117:
6181 case 0x128 ... 0x12f:
6182 case 0x150 ... 0x177:
6183 case 0x17c ... 0x17f:
6184 case 0x1c2:
6185 case 0x1c4 ... 0x1c6:
6186 case 0x1d0 ... 0x1fe:
6187 gen_sse(s, b, pc_start, rex_r);
6188 break;
6189 default:
6190 goto illegal_op;
6191 }
6192 /* lock generation */
6193 if (s->prefix & PREFIX_LOCK)
6194 gen_op_unlock();
6195 return s->pc;
6196 illegal_op:
6197 if (s->prefix & PREFIX_LOCK)
6198 gen_op_unlock();
6199 /* XXX: ensure that no lock was generated */
6200 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6201 return s->pc;
6202}
6203
6204#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6205#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6206
6207/* flags read by an operation */
6208static uint16_t opc_read_flags[NB_OPS] = {
6209 [INDEX_op_aas] = CC_A,
6210 [INDEX_op_aaa] = CC_A,
6211 [INDEX_op_das] = CC_A | CC_C,
6212 [INDEX_op_daa] = CC_A | CC_C,
6213
6214 /* subtle: due to the incl/decl implementation, C is used */
6215 [INDEX_op_update_inc_cc] = CC_C,
6216
6217 [INDEX_op_into] = CC_O,
6218
6219 [INDEX_op_jb_subb] = CC_C,
6220 [INDEX_op_jb_subw] = CC_C,
6221 [INDEX_op_jb_subl] = CC_C,
6222
6223 [INDEX_op_jz_subb] = CC_Z,
6224 [INDEX_op_jz_subw] = CC_Z,
6225 [INDEX_op_jz_subl] = CC_Z,
6226
6227 [INDEX_op_jbe_subb] = CC_Z | CC_C,
6228 [INDEX_op_jbe_subw] = CC_Z | CC_C,
6229 [INDEX_op_jbe_subl] = CC_Z | CC_C,
6230
6231 [INDEX_op_js_subb] = CC_S,
6232 [INDEX_op_js_subw] = CC_S,
6233 [INDEX_op_js_subl] = CC_S,
6234
6235 [INDEX_op_jl_subb] = CC_O | CC_S,
6236 [INDEX_op_jl_subw] = CC_O | CC_S,
6237 [INDEX_op_jl_subl] = CC_O | CC_S,
6238
6239 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6240 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6241 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6242
6243 [INDEX_op_loopnzw] = CC_Z,
6244 [INDEX_op_loopnzl] = CC_Z,
6245 [INDEX_op_loopzw] = CC_Z,
6246 [INDEX_op_loopzl] = CC_Z,
6247
6248 [INDEX_op_seto_T0_cc] = CC_O,
6249 [INDEX_op_setb_T0_cc] = CC_C,
6250 [INDEX_op_setz_T0_cc] = CC_Z,
6251 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6252 [INDEX_op_sets_T0_cc] = CC_S,
6253 [INDEX_op_setp_T0_cc] = CC_P,
6254 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6255 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6256
6257 [INDEX_op_setb_T0_subb] = CC_C,
6258 [INDEX_op_setb_T0_subw] = CC_C,
6259 [INDEX_op_setb_T0_subl] = CC_C,
6260
6261 [INDEX_op_setz_T0_subb] = CC_Z,
6262 [INDEX_op_setz_T0_subw] = CC_Z,
6263 [INDEX_op_setz_T0_subl] = CC_Z,
6264
6265 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6266 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6267 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6268
6269 [INDEX_op_sets_T0_subb] = CC_S,
6270 [INDEX_op_sets_T0_subw] = CC_S,
6271 [INDEX_op_sets_T0_subl] = CC_S,
6272
6273 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6274 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6275 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6276
6277 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6278 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6279 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6280
6281 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6282 [INDEX_op_cmc] = CC_C,
6283 [INDEX_op_salc] = CC_C,
6284
6285 /* needed for correct flag optimisation before string ops */
6286 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6287 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6288 [INDEX_op_jz_ecxw] = CC_OSZAPC,
6289 [INDEX_op_jz_ecxl] = CC_OSZAPC,
6290
6291#ifdef TARGET_X86_64
6292 [INDEX_op_jb_subq] = CC_C,
6293 [INDEX_op_jz_subq] = CC_Z,
6294 [INDEX_op_jbe_subq] = CC_Z | CC_C,
6295 [INDEX_op_js_subq] = CC_S,
6296 [INDEX_op_jl_subq] = CC_O | CC_S,
6297 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6298
6299 [INDEX_op_loopnzq] = CC_Z,
6300 [INDEX_op_loopzq] = CC_Z,
6301
6302 [INDEX_op_setb_T0_subq] = CC_C,
6303 [INDEX_op_setz_T0_subq] = CC_Z,
6304 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6305 [INDEX_op_sets_T0_subq] = CC_S,
6306 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6307 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6308
6309 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6310 [INDEX_op_jz_ecxq] = CC_OSZAPC,
6311#endif
6312
6313#define DEF_READF(SUFFIX)\
6314 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6315 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6316 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6317 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6318 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6319 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6320 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6321 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6322\
6323 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6324 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6325 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6326 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6327 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6328 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6329 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6330 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6331
6332 DEF_READF( )
6333 DEF_READF(_raw)
6334#ifndef CONFIG_USER_ONLY
6335 DEF_READF(_kernel)
6336 DEF_READF(_user)
6337#endif
6338};
6339
6340/* flags written by an operation */
6341static uint16_t opc_write_flags[NB_OPS] = {
6342 [INDEX_op_update2_cc] = CC_OSZAPC,
6343 [INDEX_op_update1_cc] = CC_OSZAPC,
6344 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6345 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6346 /* subtle: due to the incl/decl implementation, C is used */
6347 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6348 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6349
6350 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6351 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6352 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6353 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6354 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6355 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6356 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6357 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6358 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6359 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6360 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6361
6362 /* sse */
6363 [INDEX_op_ucomiss] = CC_OSZAPC,
6364 [INDEX_op_ucomisd] = CC_OSZAPC,
6365 [INDEX_op_comiss] = CC_OSZAPC,
6366 [INDEX_op_comisd] = CC_OSZAPC,
6367
6368 /* bcd */
6369 [INDEX_op_aam] = CC_OSZAPC,
6370 [INDEX_op_aad] = CC_OSZAPC,
6371 [INDEX_op_aas] = CC_OSZAPC,
6372 [INDEX_op_aaa] = CC_OSZAPC,
6373 [INDEX_op_das] = CC_OSZAPC,
6374 [INDEX_op_daa] = CC_OSZAPC,
6375
6376 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6377 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6378 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6379 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6380 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6381 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6382 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6383 [INDEX_op_clc] = CC_C,
6384 [INDEX_op_stc] = CC_C,
6385 [INDEX_op_cmc] = CC_C,
6386
6387 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6388 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6389 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6390 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6391 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6392 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6393 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6394 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6395 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6396 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6397 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6398 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6399
6400 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6401 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6402 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6403 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6404 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6405 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6406
6407 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6408 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6409 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6410 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6411
6412 [INDEX_op_cmpxchg8b] = CC_Z,
6413 [INDEX_op_lar] = CC_Z,
6414 [INDEX_op_lsl] = CC_Z,
6415 [INDEX_op_verr] = CC_Z,
6416 [INDEX_op_verw] = CC_Z,
6417 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6418 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6419
6420#define DEF_WRITEF(SUFFIX)\
6421 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6422 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6423 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6424 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6425 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6426 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6427 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6428 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6429\
6430 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6431 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6432 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6433 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6434 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6435 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6436 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6437 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6438\
6439 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6440 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6441 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6442 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6443 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6444 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6445 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6446 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6447\
6448 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6449 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6450 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6451 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6452\
6453 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6454 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6455 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6456 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6457\
6458 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6459 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6460 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6461 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6462\
6463 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6464 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6465 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6466 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6467 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6468 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6469\
6470 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6471 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6472 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6473 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6474 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6475 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6476\
6477 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6478 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6479 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6480 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6481
6482
6483 DEF_WRITEF( )
6484 DEF_WRITEF(_raw)
6485#ifndef CONFIG_USER_ONLY
6486 DEF_WRITEF(_kernel)
6487 DEF_WRITEF(_user)
6488#endif
6489};
6490
6491/* simpler form of an operation if no flags need to be generated */
6492static uint16_t opc_simpler[NB_OPS] = {
6493 [INDEX_op_update2_cc] = INDEX_op_nop,
6494 [INDEX_op_update1_cc] = INDEX_op_nop,
6495 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6496#if 0
6497 /* broken: CC_OP logic must be rewritten */
6498 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6499#endif
6500
6501 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6502 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6503 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6504 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6505
6506 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6507 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6508 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6509 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6510
6511 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6512 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6513 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6514 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6515
6516#define DEF_SIMPLER(SUFFIX)\
6517 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6518 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6519 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6520 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6521\
6522 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6523 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6524 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6525 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6526
6527 DEF_SIMPLER( )
6528 DEF_SIMPLER(_raw)
6529#ifndef CONFIG_USER_ONLY
6530 DEF_SIMPLER(_kernel)
6531 DEF_SIMPLER(_user)
6532#endif
6533};
6534
6535void optimize_flags_init(void)
6536{
6537 int i;
6538 /* put default values in arrays */
6539 for(i = 0; i < NB_OPS; i++) {
6540 if (opc_simpler[i] == 0)
6541 opc_simpler[i] = i;
6542 }
6543}
6544
6545/* CPU flags computation optimization: we move backward thru the
6546 generated code to see which flags are needed. The operation is
6547 modified if suitable */
6548static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6549{
6550 uint16_t *opc_ptr;
6551 int live_flags, write_flags, op;
6552
6553 opc_ptr = opc_buf + opc_buf_len;
6554 /* live_flags contains the flags needed by the next instructions
6555 in the code. At the end of the bloc, we consider that all the
6556 flags are live. */
6557 live_flags = CC_OSZAPC;
6558 while (opc_ptr > opc_buf) {
6559 op = *--opc_ptr;
6560 /* if none of the flags written by the instruction is used,
6561 then we can try to find a simpler instruction */
6562 write_flags = opc_write_flags[op];
6563 if ((live_flags & write_flags) == 0) {
6564 *opc_ptr = opc_simpler[op];
6565 }
6566 /* compute the live flags before the instruction */
6567 live_flags &= ~write_flags;
6568 live_flags |= opc_read_flags[op];
6569 }
6570}
6571
6572/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6573 basic block 'tb'. If search_pc is TRUE, also generate PC
6574 information for each intermediate instruction. */
6575static inline int gen_intermediate_code_internal(CPUState *env,
6576 TranslationBlock *tb,
6577 int search_pc)
6578{
6579 DisasContext dc1, *dc = &dc1;
6580 target_ulong pc_ptr;
6581 uint16_t *gen_opc_end;
6582 int flags, j, lj, cflags;
6583 target_ulong pc_start;
6584 target_ulong cs_base;
6585
6586 /* generate intermediate code */
6587 pc_start = tb->pc;
6588 cs_base = tb->cs_base;
6589 flags = tb->flags;
6590 cflags = tb->cflags;
6591
6592 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6593 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6594 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6595 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6596 dc->f_st = 0;
6597 dc->vm86 = (flags >> VM_SHIFT) & 1;
6598#ifdef VBOX
6599 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
6600#endif
6601 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6602 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6603 dc->tf = (flags >> TF_SHIFT) & 1;
6604 dc->singlestep_enabled = env->singlestep_enabled;
6605 dc->cc_op = CC_OP_DYNAMIC;
6606 dc->cs_base = cs_base;
6607 dc->tb = tb;
6608 dc->popl_esp_hack = 0;
6609 /* select memory access functions */
6610 dc->mem_index = 0;
6611 if (flags & HF_SOFTMMU_MASK) {
6612 if (dc->cpl == 3)
6613 dc->mem_index = 2 * 4;
6614 else
6615 dc->mem_index = 1 * 4;
6616 }
6617 dc->cpuid_features = env->cpuid_features;
6618 dc->cpuid_ext_features = env->cpuid_ext_features;
6619#ifdef TARGET_X86_64
6620 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6621 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6622#endif
6623 dc->flags = flags;
6624 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6625 (flags & HF_INHIBIT_IRQ_MASK)
6626#ifndef CONFIG_SOFTMMU
6627 || (flags & HF_SOFTMMU_MASK)
6628#endif
6629 );
6630#if 0
6631 /* check addseg logic */
6632 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6633 printf("ERROR addseg\n");
6634#endif
6635
6636 gen_opc_ptr = gen_opc_buf;
6637 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6638 gen_opparam_ptr = gen_opparam_buf;
6639 nb_gen_labels = 0;
6640
6641 dc->is_jmp = DISAS_NEXT;
6642 pc_ptr = pc_start;
6643 lj = -1;
6644
6645 for(;;) {
6646 if (env->nb_breakpoints > 0) {
6647 for(j = 0; j < env->nb_breakpoints; j++) {
6648 if (env->breakpoints[j] == pc_ptr) {
6649 gen_debug(dc, pc_ptr - dc->cs_base);
6650 break;
6651 }
6652 }
6653 }
6654 if (search_pc) {
6655 j = gen_opc_ptr - gen_opc_buf;
6656 if (lj < j) {
6657 lj++;
6658 while (lj < j)
6659 gen_opc_instr_start[lj++] = 0;
6660 }
6661 gen_opc_pc[lj] = pc_ptr;
6662 gen_opc_cc_op[lj] = dc->cc_op;
6663 gen_opc_instr_start[lj] = 1;
6664 }
6665 pc_ptr = disas_insn(dc, pc_ptr);
6666 /* stop translation if indicated */
6667 if (dc->is_jmp)
6668 break;
6669
6670#ifdef VBOX
6671#ifdef DEBUG
6672/*
6673 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
6674 {
6675 //should never happen as the jump to the patch code terminates the translation block
6676 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
6677 }
6678*/
6679#endif
6680 if (env->state & CPU_EMULATE_SINGLE_INSTR)
6681 {
6682 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
6683 gen_jmp_im(pc_ptr - dc->cs_base);
6684 gen_eob(dc);
6685 break;
6686 }
6687#endif /* VBOX */
6688
6689 /* if single step mode, we generate only one instruction and
6690 generate an exception */
6691 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6692 the flag and abort the translation to give the irqs a
6693 change to be happen */
6694 if (dc->tf || dc->singlestep_enabled ||
6695 (flags & HF_INHIBIT_IRQ_MASK) ||
6696 (cflags & CF_SINGLE_INSN)) {
6697 gen_jmp_im(pc_ptr - dc->cs_base);
6698 gen_eob(dc);
6699 break;
6700 }
6701 /* if too long translation, stop generation too */
6702 if (gen_opc_ptr >= gen_opc_end ||
6703 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6704 gen_jmp_im(pc_ptr - dc->cs_base);
6705 gen_eob(dc);
6706 break;
6707 }
6708 }
6709 *gen_opc_ptr = INDEX_op_end;
6710 /* we don't forget to fill the last values */
6711 if (search_pc) {
6712 j = gen_opc_ptr - gen_opc_buf;
6713 lj++;
6714 while (lj <= j)
6715 gen_opc_instr_start[lj++] = 0;
6716 }
6717
6718#ifdef DEBUG_DISAS
6719 if (loglevel & CPU_LOG_TB_CPU) {
6720 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6721 }
6722 if (loglevel & CPU_LOG_TB_IN_ASM) {
6723 int disas_flags;
6724 fprintf(logfile, "----------------\n");
6725 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6726#ifdef TARGET_X86_64
6727 if (dc->code64)
6728 disas_flags = 2;
6729 else
6730#endif
6731 disas_flags = !dc->code32;
6732 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6733 fprintf(logfile, "\n");
6734 if (loglevel & CPU_LOG_TB_OP) {
6735 fprintf(logfile, "OP:\n");
6736 dump_ops(gen_opc_buf, gen_opparam_buf);
6737 fprintf(logfile, "\n");
6738 }
6739 }
6740#endif
6741
6742 /* optimize flag computations */
6743 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6744
6745#ifdef DEBUG_DISAS
6746 if (loglevel & CPU_LOG_TB_OP_OPT) {
6747 fprintf(logfile, "AFTER FLAGS OPT:\n");
6748 dump_ops(gen_opc_buf, gen_opparam_buf);
6749 fprintf(logfile, "\n");
6750 }
6751#endif
6752 if (!search_pc)
6753 tb->size = pc_ptr - pc_start;
6754 return 0;
6755}
6756
6757int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6758{
6759 return gen_intermediate_code_internal(env, tb, 0);
6760}
6761
6762int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6763{
6764 return gen_intermediate_code_internal(env, tb, 1);
6765}
6766
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette