VirtualBox

source: vbox/trunk/src/recompiler/target-i386/translate.c@ 8722

Last change on this file since 8722 was 6726, checked in by vboxsync, 17 years ago

2 fixes from qemu: fix cmpxchg8b detection and fix DR6 single step exception status bit

  • Property svn:eol-style set to native
File size: 200.9 KB
Line 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20#include <stdarg.h>
21#include <stdlib.h>
22#include <stdio.h>
23#include <string.h>
24#include <inttypes.h>
25#ifndef VBOX
26#include <signal.h>
27#include <assert.h>
28#endif /* !VBOX */
29
30#include "cpu.h"
31#include "exec-all.h"
32#include "disas.h"
33
34/* XXX: move that elsewhere */
35static uint16_t *gen_opc_ptr;
36static uint32_t *gen_opparam_ptr;
37
38#define PREFIX_REPZ 0x01
39#define PREFIX_REPNZ 0x02
40#define PREFIX_LOCK 0x04
41#define PREFIX_DATA 0x08
42#define PREFIX_ADR 0x10
43
44#ifdef TARGET_X86_64
45#define X86_64_ONLY(x) x
46#define X86_64_DEF(x...) x
47#define CODE64(s) ((s)->code64)
48#define REX_X(s) ((s)->rex_x)
49#define REX_B(s) ((s)->rex_b)
50/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
51#if 1
52#define BUGGY_64(x) NULL
53#endif
54#else
55#define X86_64_ONLY(x) NULL
56#define X86_64_DEF(x...)
57#define CODE64(s) 0
58#define REX_X(s) 0
59#define REX_B(s) 0
60#endif
61
62#ifdef TARGET_X86_64
63static int x86_64_hregs;
64#endif
65
66#ifdef USE_DIRECT_JUMP
67#define TBPARAM(x)
68#else
69#define TBPARAM(x) (long)(x)
70#endif
71
72#ifdef VBOX
73/* Special/override code readers to hide patched code. */
74
75uint8_t ldub_code_raw(target_ulong pc)
76{
77 uint8_t b;
78
79 if (!remR3GetOpcode(cpu_single_env, pc, &b))
80 b = ldub_code(pc);
81 return b;
82}
83#define ldub_code(a) ldub_code_raw(a)
84
85uint16_t lduw_code_raw(target_ulong pc)
86{
87 return (ldub_code(pc+1) << 8) | ldub_code(pc);
88}
89#define lduw_code(a) lduw_code_raw(a)
90
91
92uint32_t ldl_code_raw(target_ulong pc)
93{
94 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
95}
96#define ldl_code(a) ldl_code_raw(a)
97
98#endif /* VBOX */
99
100
101typedef struct DisasContext {
102 /* current insn context */
103 int override; /* -1 if no override */
104 int prefix;
105 int aflag, dflag;
106 target_ulong pc; /* pc = eip + cs_base */
107 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
108 static state change (stop translation) */
109 /* current block context */
110 target_ulong cs_base; /* base of CS segment */
111 int pe; /* protected mode */
112 int code32; /* 32 bit code segment */
113#ifdef TARGET_X86_64
114 int lma; /* long mode active */
115 int code64; /* 64 bit code segment */
116 int rex_x, rex_b;
117#endif
118 int ss32; /* 32 bit stack segment */
119 int cc_op; /* current CC operation */
120 int addseg; /* non zero if either DS/ES/SS have a non zero base */
121 int f_st; /* currently unused */
122 int vm86; /* vm86 mode */
123#ifdef VBOX
124 int vme; /* CR4.VME */
125 int record_call; /* record calls for CSAM or not? */
126#endif
127 int cpl;
128 int iopl;
129 int tf; /* TF cpu flag */
130 int singlestep_enabled; /* "hardware" single step enabled */
131 int jmp_opt; /* use direct block chaining for direct jumps */
132 int mem_index; /* select memory access functions */
133 int flags; /* all execution flags */
134 struct TranslationBlock *tb;
135 int popl_esp_hack; /* for correct popl with esp base handling */
136 int rip_offset; /* only used in x86_64, but left for simplicity */
137 int cpuid_features;
138 int cpuid_ext_features;
139} DisasContext;
140
141static void gen_eob(DisasContext *s);
142static void gen_jmp(DisasContext *s, target_ulong eip);
143static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
144
145/* i386 arith/logic operations */
146enum {
147 OP_ADDL,
148 OP_ORL,
149 OP_ADCL,
150 OP_SBBL,
151 OP_ANDL,
152 OP_SUBL,
153 OP_XORL,
154 OP_CMPL,
155};
156
157/* i386 shift ops */
158enum {
159 OP_ROL,
160 OP_ROR,
161 OP_RCL,
162 OP_RCR,
163 OP_SHL,
164 OP_SHR,
165 OP_SHL1, /* undocumented */
166 OP_SAR = 7,
167};
168
169enum {
170#define DEF(s, n, copy_size) INDEX_op_ ## s,
171#include "opc.h"
172#undef DEF
173 NB_OPS,
174};
175
176#include "gen-op.h"
177
178/* operand size */
179enum {
180 OT_BYTE = 0,
181 OT_WORD,
182 OT_LONG,
183 OT_QUAD,
184};
185
186enum {
187 /* I386 int registers */
188 OR_EAX, /* MUST be even numbered */
189 OR_ECX,
190 OR_EDX,
191 OR_EBX,
192 OR_ESP,
193 OR_EBP,
194 OR_ESI,
195 OR_EDI,
196
197 OR_TMP0 = 16, /* temporary operand register */
198 OR_TMP1,
199 OR_A0, /* temporary register used when doing address evaluation */
200};
201
202#ifdef TARGET_X86_64
203
204#define NB_OP_SIZES 4
205
206#define DEF_REGS(prefix, suffix) \
207 prefix ## EAX ## suffix,\
208 prefix ## ECX ## suffix,\
209 prefix ## EDX ## suffix,\
210 prefix ## EBX ## suffix,\
211 prefix ## ESP ## suffix,\
212 prefix ## EBP ## suffix,\
213 prefix ## ESI ## suffix,\
214 prefix ## EDI ## suffix,\
215 prefix ## R8 ## suffix,\
216 prefix ## R9 ## suffix,\
217 prefix ## R10 ## suffix,\
218 prefix ## R11 ## suffix,\
219 prefix ## R12 ## suffix,\
220 prefix ## R13 ## suffix,\
221 prefix ## R14 ## suffix,\
222 prefix ## R15 ## suffix,
223
224#define DEF_BREGS(prefixb, prefixh, suffix) \
225 \
226static void prefixb ## ESP ## suffix ## _wrapper(void) \
227{ \
228 if (x86_64_hregs) \
229 prefixb ## ESP ## suffix (); \
230 else \
231 prefixh ## EAX ## suffix (); \
232} \
233 \
234static void prefixb ## EBP ## suffix ## _wrapper(void) \
235{ \
236 if (x86_64_hregs) \
237 prefixb ## EBP ## suffix (); \
238 else \
239 prefixh ## ECX ## suffix (); \
240} \
241 \
242static void prefixb ## ESI ## suffix ## _wrapper(void) \
243{ \
244 if (x86_64_hregs) \
245 prefixb ## ESI ## suffix (); \
246 else \
247 prefixh ## EDX ## suffix (); \
248} \
249 \
250static void prefixb ## EDI ## suffix ## _wrapper(void) \
251{ \
252 if (x86_64_hregs) \
253 prefixb ## EDI ## suffix (); \
254 else \
255 prefixh ## EBX ## suffix (); \
256}
257
258DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
259DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
260DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
261DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
262
263#else /* !TARGET_X86_64 */
264
265#define NB_OP_SIZES 3
266
267#define DEF_REGS(prefix, suffix) \
268 prefix ## EAX ## suffix,\
269 prefix ## ECX ## suffix,\
270 prefix ## EDX ## suffix,\
271 prefix ## EBX ## suffix,\
272 prefix ## ESP ## suffix,\
273 prefix ## EBP ## suffix,\
274 prefix ## ESI ## suffix,\
275 prefix ## EDI ## suffix,
276
277#endif /* !TARGET_X86_64 */
278
279static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
280 [OT_BYTE] = {
281 gen_op_movb_EAX_T0,
282 gen_op_movb_ECX_T0,
283 gen_op_movb_EDX_T0,
284 gen_op_movb_EBX_T0,
285#ifdef TARGET_X86_64
286 gen_op_movb_ESP_T0_wrapper,
287 gen_op_movb_EBP_T0_wrapper,
288 gen_op_movb_ESI_T0_wrapper,
289 gen_op_movb_EDI_T0_wrapper,
290 gen_op_movb_R8_T0,
291 gen_op_movb_R9_T0,
292 gen_op_movb_R10_T0,
293 gen_op_movb_R11_T0,
294 gen_op_movb_R12_T0,
295 gen_op_movb_R13_T0,
296 gen_op_movb_R14_T0,
297 gen_op_movb_R15_T0,
298#else
299 gen_op_movh_EAX_T0,
300 gen_op_movh_ECX_T0,
301 gen_op_movh_EDX_T0,
302 gen_op_movh_EBX_T0,
303#endif
304 },
305 [OT_WORD] = {
306 DEF_REGS(gen_op_movw_, _T0)
307 },
308 [OT_LONG] = {
309 DEF_REGS(gen_op_movl_, _T0)
310 },
311#ifdef TARGET_X86_64
312 [OT_QUAD] = {
313 DEF_REGS(gen_op_movq_, _T0)
314 },
315#endif
316};
317
318static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
319 [OT_BYTE] = {
320 gen_op_movb_EAX_T1,
321 gen_op_movb_ECX_T1,
322 gen_op_movb_EDX_T1,
323 gen_op_movb_EBX_T1,
324#ifdef TARGET_X86_64
325 gen_op_movb_ESP_T1_wrapper,
326 gen_op_movb_EBP_T1_wrapper,
327 gen_op_movb_ESI_T1_wrapper,
328 gen_op_movb_EDI_T1_wrapper,
329 gen_op_movb_R8_T1,
330 gen_op_movb_R9_T1,
331 gen_op_movb_R10_T1,
332 gen_op_movb_R11_T1,
333 gen_op_movb_R12_T1,
334 gen_op_movb_R13_T1,
335 gen_op_movb_R14_T1,
336 gen_op_movb_R15_T1,
337#else
338 gen_op_movh_EAX_T1,
339 gen_op_movh_ECX_T1,
340 gen_op_movh_EDX_T1,
341 gen_op_movh_EBX_T1,
342#endif
343 },
344 [OT_WORD] = {
345 DEF_REGS(gen_op_movw_, _T1)
346 },
347 [OT_LONG] = {
348 DEF_REGS(gen_op_movl_, _T1)
349 },
350#ifdef TARGET_X86_64
351 [OT_QUAD] = {
352 DEF_REGS(gen_op_movq_, _T1)
353 },
354#endif
355};
356
357static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
358 [0] = {
359 DEF_REGS(gen_op_movw_, _A0)
360 },
361 [1] = {
362 DEF_REGS(gen_op_movl_, _A0)
363 },
364#ifdef TARGET_X86_64
365 [2] = {
366 DEF_REGS(gen_op_movq_, _A0)
367 },
368#endif
369};
370
371static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] =
372{
373 [OT_BYTE] = {
374 {
375 gen_op_movl_T0_EAX,
376 gen_op_movl_T0_ECX,
377 gen_op_movl_T0_EDX,
378 gen_op_movl_T0_EBX,
379#ifdef TARGET_X86_64
380 gen_op_movl_T0_ESP_wrapper,
381 gen_op_movl_T0_EBP_wrapper,
382 gen_op_movl_T0_ESI_wrapper,
383 gen_op_movl_T0_EDI_wrapper,
384 gen_op_movl_T0_R8,
385 gen_op_movl_T0_R9,
386 gen_op_movl_T0_R10,
387 gen_op_movl_T0_R11,
388 gen_op_movl_T0_R12,
389 gen_op_movl_T0_R13,
390 gen_op_movl_T0_R14,
391 gen_op_movl_T0_R15,
392#else
393 gen_op_movh_T0_EAX,
394 gen_op_movh_T0_ECX,
395 gen_op_movh_T0_EDX,
396 gen_op_movh_T0_EBX,
397#endif
398 },
399 {
400 gen_op_movl_T1_EAX,
401 gen_op_movl_T1_ECX,
402 gen_op_movl_T1_EDX,
403 gen_op_movl_T1_EBX,
404#ifdef TARGET_X86_64
405 gen_op_movl_T1_ESP_wrapper,
406 gen_op_movl_T1_EBP_wrapper,
407 gen_op_movl_T1_ESI_wrapper,
408 gen_op_movl_T1_EDI_wrapper,
409 gen_op_movl_T1_R8,
410 gen_op_movl_T1_R9,
411 gen_op_movl_T1_R10,
412 gen_op_movl_T1_R11,
413 gen_op_movl_T1_R12,
414 gen_op_movl_T1_R13,
415 gen_op_movl_T1_R14,
416 gen_op_movl_T1_R15,
417#else
418 gen_op_movh_T1_EAX,
419 gen_op_movh_T1_ECX,
420 gen_op_movh_T1_EDX,
421 gen_op_movh_T1_EBX,
422#endif
423 },
424 },
425 [OT_WORD] = {
426 {
427 DEF_REGS(gen_op_movl_T0_, )
428 },
429 {
430 DEF_REGS(gen_op_movl_T1_, )
431 },
432 },
433 [OT_LONG] = {
434 {
435 DEF_REGS(gen_op_movl_T0_, )
436 },
437 {
438 DEF_REGS(gen_op_movl_T1_, )
439 },
440 },
441#ifdef TARGET_X86_64
442 [OT_QUAD] = {
443 {
444 DEF_REGS(gen_op_movl_T0_, )
445 },
446 {
447 DEF_REGS(gen_op_movl_T1_, )
448 },
449 },
450#endif
451};
452
453static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
454 DEF_REGS(gen_op_movl_A0_, )
455};
456
457static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
458 [0] = {
459 DEF_REGS(gen_op_addl_A0_, )
460 },
461 [1] = {
462 DEF_REGS(gen_op_addl_A0_, _s1)
463 },
464 [2] = {
465 DEF_REGS(gen_op_addl_A0_, _s2)
466 },
467 [3] = {
468 DEF_REGS(gen_op_addl_A0_, _s3)
469 },
470};
471
472#ifdef TARGET_X86_64
473static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
474 DEF_REGS(gen_op_movq_A0_, )
475};
476
477static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
478 [0] = {
479 DEF_REGS(gen_op_addq_A0_, )
480 },
481 [1] = {
482 DEF_REGS(gen_op_addq_A0_, _s1)
483 },
484 [2] = {
485 DEF_REGS(gen_op_addq_A0_, _s2)
486 },
487 [3] = {
488 DEF_REGS(gen_op_addq_A0_, _s3)
489 },
490};
491#endif
492
493static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
494 [0] = {
495 DEF_REGS(gen_op_cmovw_, _T1_T0)
496 },
497 [1] = {
498 DEF_REGS(gen_op_cmovl_, _T1_T0)
499 },
500#ifdef TARGET_X86_64
501 [2] = {
502 DEF_REGS(gen_op_cmovq_, _T1_T0)
503 },
504#endif
505};
506
507static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
508 NULL,
509 gen_op_orl_T0_T1,
510 NULL,
511 NULL,
512 gen_op_andl_T0_T1,
513 NULL,
514 gen_op_xorl_T0_T1,
515 NULL,
516};
517
518#define DEF_ARITHC(SUFFIX)\
519 {\
520 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
521 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
522 },\
523 {\
524 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
525 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
526 },\
527 {\
528 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
529 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
530 },\
531 {\
532 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
533 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
534 },
535
536static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
537 DEF_ARITHC( )
538};
539
540static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
541 DEF_ARITHC(_raw)
542#ifndef CONFIG_USER_ONLY
543 DEF_ARITHC(_kernel)
544 DEF_ARITHC(_user)
545#endif
546};
547
548static const int cc_op_arithb[8] = {
549 CC_OP_ADDB,
550 CC_OP_LOGICB,
551 CC_OP_ADDB,
552 CC_OP_SUBB,
553 CC_OP_LOGICB,
554 CC_OP_SUBB,
555 CC_OP_LOGICB,
556 CC_OP_SUBB,
557};
558
559#define DEF_CMPXCHG(SUFFIX)\
560 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
561 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
562 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
563 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
564
565static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
566 DEF_CMPXCHG( )
567};
568
569static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
570 DEF_CMPXCHG(_raw)
571#ifndef CONFIG_USER_ONLY
572 DEF_CMPXCHG(_kernel)
573 DEF_CMPXCHG(_user)
574#endif
575};
576
577#define DEF_SHIFT(SUFFIX)\
578 {\
579 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
580 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
581 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
582 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
583 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
584 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
585 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
586 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
587 },\
588 {\
589 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
590 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
591 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
592 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
593 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
594 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
595 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
596 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
597 },\
598 {\
599 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
600 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
601 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
602 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
603 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
604 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
605 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
606 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
607 },\
608 {\
609 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
610 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
611 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
612 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
613 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
614 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
615 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
616 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
617 },
618
619static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
620 DEF_SHIFT( )
621};
622
623static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
624 DEF_SHIFT(_raw)
625#ifndef CONFIG_USER_ONLY
626 DEF_SHIFT(_kernel)
627 DEF_SHIFT(_user)
628#endif
629};
630
631#define DEF_SHIFTD(SUFFIX, op)\
632 {\
633 NULL,\
634 NULL,\
635 },\
636 {\
637 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
638 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
639 },\
640 {\
641 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
642 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
643 },\
644 {\
645X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
646 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
647 },
648
649static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
650 DEF_SHIFTD(, im)
651};
652
653static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
654 DEF_SHIFTD(, ECX)
655};
656
657static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
658 DEF_SHIFTD(_raw, im)
659#ifndef CONFIG_USER_ONLY
660 DEF_SHIFTD(_kernel, im)
661 DEF_SHIFTD(_user, im)
662#endif
663};
664
665static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
666 DEF_SHIFTD(_raw, ECX)
667#ifndef CONFIG_USER_ONLY
668 DEF_SHIFTD(_kernel, ECX)
669 DEF_SHIFTD(_user, ECX)
670#endif
671};
672
673static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
674 [0] = {
675 gen_op_btw_T0_T1_cc,
676 gen_op_btsw_T0_T1_cc,
677 gen_op_btrw_T0_T1_cc,
678 gen_op_btcw_T0_T1_cc,
679 },
680 [1] = {
681 gen_op_btl_T0_T1_cc,
682 gen_op_btsl_T0_T1_cc,
683 gen_op_btrl_T0_T1_cc,
684 gen_op_btcl_T0_T1_cc,
685 },
686#ifdef TARGET_X86_64
687 [2] = {
688 gen_op_btq_T0_T1_cc,
689 gen_op_btsq_T0_T1_cc,
690 gen_op_btrq_T0_T1_cc,
691 gen_op_btcq_T0_T1_cc,
692 },
693#endif
694};
695
696static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
697 gen_op_add_bitw_A0_T1,
698 gen_op_add_bitl_A0_T1,
699 X86_64_ONLY(gen_op_add_bitq_A0_T1),
700};
701
702static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
703 [0] = {
704 gen_op_bsfw_T0_cc,
705 gen_op_bsrw_T0_cc,
706 },
707 [1] = {
708 gen_op_bsfl_T0_cc,
709 gen_op_bsrl_T0_cc,
710 },
711#ifdef TARGET_X86_64
712 [2] = {
713 gen_op_bsfq_T0_cc,
714 gen_op_bsrq_T0_cc,
715 },
716#endif
717};
718
719static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
720 gen_op_ldsb_raw_T0_A0,
721 gen_op_ldsw_raw_T0_A0,
722 X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
723 NULL,
724#ifndef CONFIG_USER_ONLY
725 gen_op_ldsb_kernel_T0_A0,
726 gen_op_ldsw_kernel_T0_A0,
727 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
728 NULL,
729
730 gen_op_ldsb_user_T0_A0,
731 gen_op_ldsw_user_T0_A0,
732 X86_64_ONLY(gen_op_ldsl_user_T0_A0),
733 NULL,
734#endif
735};
736
737static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
738 gen_op_ldub_raw_T0_A0,
739 gen_op_lduw_raw_T0_A0,
740 NULL,
741 NULL,
742
743#ifndef CONFIG_USER_ONLY
744 gen_op_ldub_kernel_T0_A0,
745 gen_op_lduw_kernel_T0_A0,
746 NULL,
747 NULL,
748
749 gen_op_ldub_user_T0_A0,
750 gen_op_lduw_user_T0_A0,
751 NULL,
752 NULL,
753#endif
754};
755
756/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
757static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
758 gen_op_ldub_raw_T0_A0,
759 gen_op_lduw_raw_T0_A0,
760 gen_op_ldl_raw_T0_A0,
761 X86_64_ONLY(gen_op_ldq_raw_T0_A0),
762
763#ifndef CONFIG_USER_ONLY
764 gen_op_ldub_kernel_T0_A0,
765 gen_op_lduw_kernel_T0_A0,
766 gen_op_ldl_kernel_T0_A0,
767 X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
768
769 gen_op_ldub_user_T0_A0,
770 gen_op_lduw_user_T0_A0,
771 gen_op_ldl_user_T0_A0,
772 X86_64_ONLY(gen_op_ldq_user_T0_A0),
773#endif
774};
775
776static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
777 gen_op_ldub_raw_T1_A0,
778 gen_op_lduw_raw_T1_A0,
779 gen_op_ldl_raw_T1_A0,
780 X86_64_ONLY(gen_op_ldq_raw_T1_A0),
781
782#ifndef CONFIG_USER_ONLY
783 gen_op_ldub_kernel_T1_A0,
784 gen_op_lduw_kernel_T1_A0,
785 gen_op_ldl_kernel_T1_A0,
786 X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
787
788 gen_op_ldub_user_T1_A0,
789 gen_op_lduw_user_T1_A0,
790 gen_op_ldl_user_T1_A0,
791 X86_64_ONLY(gen_op_ldq_user_T1_A0),
792#endif
793};
794
795static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
796 gen_op_stb_raw_T0_A0,
797 gen_op_stw_raw_T0_A0,
798 gen_op_stl_raw_T0_A0,
799 X86_64_ONLY(gen_op_stq_raw_T0_A0),
800
801#ifndef CONFIG_USER_ONLY
802 gen_op_stb_kernel_T0_A0,
803 gen_op_stw_kernel_T0_A0,
804 gen_op_stl_kernel_T0_A0,
805 X86_64_ONLY(gen_op_stq_kernel_T0_A0),
806
807 gen_op_stb_user_T0_A0,
808 gen_op_stw_user_T0_A0,
809 gen_op_stl_user_T0_A0,
810 X86_64_ONLY(gen_op_stq_user_T0_A0),
811#endif
812};
813
814static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
815 NULL,
816 gen_op_stw_raw_T1_A0,
817 gen_op_stl_raw_T1_A0,
818 X86_64_ONLY(gen_op_stq_raw_T1_A0),
819
820#ifndef CONFIG_USER_ONLY
821 NULL,
822 gen_op_stw_kernel_T1_A0,
823 gen_op_stl_kernel_T1_A0,
824 X86_64_ONLY(gen_op_stq_kernel_T1_A0),
825
826 NULL,
827 gen_op_stw_user_T1_A0,
828 gen_op_stl_user_T1_A0,
829 X86_64_ONLY(gen_op_stq_user_T1_A0),
830#endif
831};
832
833#ifdef VBOX
834static void gen_check_external_event()
835{
836 gen_op_check_external_event();
837}
838
839static inline void gen_update_eip(target_ulong pc)
840{
841#ifdef TARGET_X86_64
842 if (pc == (uint32_t)pc) {
843 gen_op_movl_eip_im(pc);
844 } else if (pc == (int32_t)pc) {
845 gen_op_movq_eip_im(pc);
846 } else {
847 gen_op_movq_eip_im64(pc >> 32, pc);
848 }
849#else
850 gen_op_movl_eip_im(pc);
851#endif
852}
853
854#endif /* VBOX */
855
856static inline void gen_jmp_im(target_ulong pc)
857{
858#ifdef VBOX
859 gen_check_external_event();
860#endif /* VBOX */
861#ifdef TARGET_X86_64
862 if (pc == (uint32_t)pc) {
863 gen_op_movl_eip_im(pc);
864 } else if (pc == (int32_t)pc) {
865 gen_op_movq_eip_im(pc);
866 } else {
867 gen_op_movq_eip_im64(pc >> 32, pc);
868 }
869#else
870 gen_op_movl_eip_im(pc);
871#endif
872}
873
874static inline void gen_string_movl_A0_ESI(DisasContext *s)
875{
876 int override;
877
878 override = s->override;
879#ifdef TARGET_X86_64
880 if (s->aflag == 2) {
881 if (override >= 0) {
882 gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
883 gen_op_addq_A0_reg_sN[0][R_ESI]();
884 } else {
885 gen_op_movq_A0_reg[R_ESI]();
886 }
887 } else
888#endif
889 if (s->aflag) {
890 /* 32 bit address */
891 if (s->addseg && override < 0)
892 override = R_DS;
893 if (override >= 0) {
894 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
895 gen_op_addl_A0_reg_sN[0][R_ESI]();
896 } else {
897 gen_op_movl_A0_reg[R_ESI]();
898 }
899 } else {
900 /* 16 address, always override */
901 if (override < 0)
902 override = R_DS;
903 gen_op_movl_A0_reg[R_ESI]();
904 gen_op_andl_A0_ffff();
905 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
906 }
907}
908
909static inline void gen_string_movl_A0_EDI(DisasContext *s)
910{
911#ifdef TARGET_X86_64
912 if (s->aflag == 2) {
913 gen_op_movq_A0_reg[R_EDI]();
914 } else
915#endif
916 if (s->aflag) {
917 if (s->addseg) {
918 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
919 gen_op_addl_A0_reg_sN[0][R_EDI]();
920 } else {
921 gen_op_movl_A0_reg[R_EDI]();
922 }
923 } else {
924 gen_op_movl_A0_reg[R_EDI]();
925 gen_op_andl_A0_ffff();
926 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
927 }
928}
929
930static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
931 gen_op_movl_T0_Dshiftb,
932 gen_op_movl_T0_Dshiftw,
933 gen_op_movl_T0_Dshiftl,
934 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
935};
936
937static GenOpFunc1 *gen_op_jnz_ecx[3] = {
938 gen_op_jnz_ecxw,
939 gen_op_jnz_ecxl,
940 X86_64_ONLY(gen_op_jnz_ecxq),
941};
942
943static GenOpFunc1 *gen_op_jz_ecx[3] = {
944 gen_op_jz_ecxw,
945 gen_op_jz_ecxl,
946 X86_64_ONLY(gen_op_jz_ecxq),
947};
948
949static GenOpFunc *gen_op_dec_ECX[3] = {
950 gen_op_decw_ECX,
951 gen_op_decl_ECX,
952 X86_64_ONLY(gen_op_decq_ECX),
953};
954
955static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
956 {
957 gen_op_jnz_subb,
958 gen_op_jnz_subw,
959 gen_op_jnz_subl,
960 X86_64_ONLY(gen_op_jnz_subq),
961 },
962 {
963 gen_op_jz_subb,
964 gen_op_jz_subw,
965 gen_op_jz_subl,
966 X86_64_ONLY(gen_op_jz_subq),
967 },
968};
969
970static GenOpFunc *gen_op_in_DX_T0[3] = {
971 gen_op_inb_DX_T0,
972 gen_op_inw_DX_T0,
973 gen_op_inl_DX_T0,
974};
975
976static GenOpFunc *gen_op_out_DX_T0[3] = {
977 gen_op_outb_DX_T0,
978 gen_op_outw_DX_T0,
979 gen_op_outl_DX_T0,
980};
981
982static GenOpFunc *gen_op_in[3] = {
983 gen_op_inb_T0_T1,
984 gen_op_inw_T0_T1,
985 gen_op_inl_T0_T1,
986};
987
988static GenOpFunc *gen_op_out[3] = {
989 gen_op_outb_T0_T1,
990 gen_op_outw_T0_T1,
991 gen_op_outl_T0_T1,
992};
993
994static GenOpFunc *gen_check_io_T0[3] = {
995 gen_op_check_iob_T0,
996 gen_op_check_iow_T0,
997 gen_op_check_iol_T0,
998};
999
1000static GenOpFunc *gen_check_io_DX[3] = {
1001 gen_op_check_iob_DX,
1002 gen_op_check_iow_DX,
1003 gen_op_check_iol_DX,
1004};
1005
1006static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
1007{
1008 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1009 if (s->cc_op != CC_OP_DYNAMIC)
1010 gen_op_set_cc_op(s->cc_op);
1011 gen_jmp_im(cur_eip);
1012 if (use_dx)
1013 gen_check_io_DX[ot]();
1014 else
1015 gen_check_io_T0[ot]();
1016 }
1017}
1018
1019static inline void gen_movs(DisasContext *s, int ot)
1020{
1021 gen_string_movl_A0_ESI(s);
1022 gen_op_ld_T0_A0[ot + s->mem_index]();
1023 gen_string_movl_A0_EDI(s);
1024 gen_op_st_T0_A0[ot + s->mem_index]();
1025 gen_op_movl_T0_Dshift[ot]();
1026#ifdef TARGET_X86_64
1027 if (s->aflag == 2) {
1028 gen_op_addq_ESI_T0();
1029 gen_op_addq_EDI_T0();
1030 } else
1031#endif
1032 if (s->aflag) {
1033 gen_op_addl_ESI_T0();
1034 gen_op_addl_EDI_T0();
1035 } else {
1036 gen_op_addw_ESI_T0();
1037 gen_op_addw_EDI_T0();
1038 }
1039}
1040
1041static inline void gen_update_cc_op(DisasContext *s)
1042{
1043 if (s->cc_op != CC_OP_DYNAMIC) {
1044 gen_op_set_cc_op(s->cc_op);
1045 s->cc_op = CC_OP_DYNAMIC;
1046 }
1047}
1048
1049/* XXX: does not work with gdbstub "ice" single step - not a
1050 serious problem */
1051static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1052{
1053 int l1, l2;
1054
1055 l1 = gen_new_label();
1056 l2 = gen_new_label();
1057 gen_op_jnz_ecx[s->aflag](l1);
1058 gen_set_label(l2);
1059 gen_jmp_tb(s, next_eip, 1);
1060 gen_set_label(l1);
1061 return l2;
1062}
1063
1064static inline void gen_stos(DisasContext *s, int ot)
1065{
1066 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1067 gen_string_movl_A0_EDI(s);
1068 gen_op_st_T0_A0[ot + s->mem_index]();
1069 gen_op_movl_T0_Dshift[ot]();
1070#ifdef TARGET_X86_64
1071 if (s->aflag == 2) {
1072 gen_op_addq_EDI_T0();
1073 } else
1074#endif
1075 if (s->aflag) {
1076 gen_op_addl_EDI_T0();
1077 } else {
1078 gen_op_addw_EDI_T0();
1079 }
1080}
1081
1082static inline void gen_lods(DisasContext *s, int ot)
1083{
1084 gen_string_movl_A0_ESI(s);
1085 gen_op_ld_T0_A0[ot + s->mem_index]();
1086 gen_op_mov_reg_T0[ot][R_EAX]();
1087 gen_op_movl_T0_Dshift[ot]();
1088#ifdef TARGET_X86_64
1089 if (s->aflag == 2) {
1090 gen_op_addq_ESI_T0();
1091 } else
1092#endif
1093 if (s->aflag) {
1094 gen_op_addl_ESI_T0();
1095 } else {
1096 gen_op_addw_ESI_T0();
1097 }
1098}
1099
1100static inline void gen_scas(DisasContext *s, int ot)
1101{
1102 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1103 gen_string_movl_A0_EDI(s);
1104 gen_op_ld_T1_A0[ot + s->mem_index]();
1105 gen_op_cmpl_T0_T1_cc();
1106 gen_op_movl_T0_Dshift[ot]();
1107#ifdef TARGET_X86_64
1108 if (s->aflag == 2) {
1109 gen_op_addq_EDI_T0();
1110 } else
1111#endif
1112 if (s->aflag) {
1113 gen_op_addl_EDI_T0();
1114 } else {
1115 gen_op_addw_EDI_T0();
1116 }
1117}
1118
1119static inline void gen_cmps(DisasContext *s, int ot)
1120{
1121 gen_string_movl_A0_ESI(s);
1122 gen_op_ld_T0_A0[ot + s->mem_index]();
1123 gen_string_movl_A0_EDI(s);
1124 gen_op_ld_T1_A0[ot + s->mem_index]();
1125 gen_op_cmpl_T0_T1_cc();
1126 gen_op_movl_T0_Dshift[ot]();
1127#ifdef TARGET_X86_64
1128 if (s->aflag == 2) {
1129 gen_op_addq_ESI_T0();
1130 gen_op_addq_EDI_T0();
1131 } else
1132#endif
1133 if (s->aflag) {
1134 gen_op_addl_ESI_T0();
1135 gen_op_addl_EDI_T0();
1136 } else {
1137 gen_op_addw_ESI_T0();
1138 gen_op_addw_EDI_T0();
1139 }
1140}
1141
1142static inline void gen_ins(DisasContext *s, int ot)
1143{
1144 gen_string_movl_A0_EDI(s);
1145 gen_op_movl_T0_0();
1146 gen_op_st_T0_A0[ot + s->mem_index]();
1147 gen_op_in_DX_T0[ot]();
1148 gen_op_st_T0_A0[ot + s->mem_index]();
1149 gen_op_movl_T0_Dshift[ot]();
1150#ifdef TARGET_X86_64
1151 if (s->aflag == 2) {
1152 gen_op_addq_EDI_T0();
1153 } else
1154#endif
1155 if (s->aflag) {
1156 gen_op_addl_EDI_T0();
1157 } else {
1158 gen_op_addw_EDI_T0();
1159 }
1160}
1161
1162static inline void gen_outs(DisasContext *s, int ot)
1163{
1164 gen_string_movl_A0_ESI(s);
1165 gen_op_ld_T0_A0[ot + s->mem_index]();
1166 gen_op_out_DX_T0[ot]();
1167 gen_op_movl_T0_Dshift[ot]();
1168#ifdef TARGET_X86_64
1169 if (s->aflag == 2) {
1170 gen_op_addq_ESI_T0();
1171 } else
1172#endif
1173 if (s->aflag) {
1174 gen_op_addl_ESI_T0();
1175 } else {
1176 gen_op_addw_ESI_T0();
1177 }
1178}
1179
1180/* same method as Valgrind : we generate jumps to current or next
1181 instruction */
1182#define GEN_REPZ(op) \
1183static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1184 target_ulong cur_eip, target_ulong next_eip) \
1185{ \
1186 int l2;\
1187 gen_update_cc_op(s); \
1188 l2 = gen_jz_ecx_string(s, next_eip); \
1189 gen_ ## op(s, ot); \
1190 gen_op_dec_ECX[s->aflag](); \
1191 /* a loop would cause two single step exceptions if ECX = 1 \
1192 before rep string_insn */ \
1193 if (!s->jmp_opt) \
1194 gen_op_jz_ecx[s->aflag](l2); \
1195 gen_jmp(s, cur_eip); \
1196}
1197
1198#define GEN_REPZ2(op) \
1199static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1200 target_ulong cur_eip, \
1201 target_ulong next_eip, \
1202 int nz) \
1203{ \
1204 int l2;\
1205 gen_update_cc_op(s); \
1206 l2 = gen_jz_ecx_string(s, next_eip); \
1207 gen_ ## op(s, ot); \
1208 gen_op_dec_ECX[s->aflag](); \
1209 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1210 gen_op_string_jnz_sub[nz][ot](l2);\
1211 if (!s->jmp_opt) \
1212 gen_op_jz_ecx[s->aflag](l2); \
1213 gen_jmp(s, cur_eip); \
1214}
1215
1216GEN_REPZ(movs)
1217GEN_REPZ(stos)
1218GEN_REPZ(lods)
1219GEN_REPZ(ins)
1220GEN_REPZ(outs)
1221GEN_REPZ2(scas)
1222GEN_REPZ2(cmps)
1223
1224enum {
1225 JCC_O,
1226 JCC_B,
1227 JCC_Z,
1228 JCC_BE,
1229 JCC_S,
1230 JCC_P,
1231 JCC_L,
1232 JCC_LE,
1233};
1234
1235static GenOpFunc1 *gen_jcc_sub[4][8] = {
1236 [OT_BYTE] = {
1237 NULL,
1238 gen_op_jb_subb,
1239 gen_op_jz_subb,
1240 gen_op_jbe_subb,
1241 gen_op_js_subb,
1242 NULL,
1243 gen_op_jl_subb,
1244 gen_op_jle_subb,
1245 },
1246 [OT_WORD] = {
1247 NULL,
1248 gen_op_jb_subw,
1249 gen_op_jz_subw,
1250 gen_op_jbe_subw,
1251 gen_op_js_subw,
1252 NULL,
1253 gen_op_jl_subw,
1254 gen_op_jle_subw,
1255 },
1256 [OT_LONG] = {
1257 NULL,
1258 gen_op_jb_subl,
1259 gen_op_jz_subl,
1260 gen_op_jbe_subl,
1261 gen_op_js_subl,
1262 NULL,
1263 gen_op_jl_subl,
1264 gen_op_jle_subl,
1265 },
1266#ifdef TARGET_X86_64
1267 [OT_QUAD] = {
1268 NULL,
1269 BUGGY_64(gen_op_jb_subq),
1270 gen_op_jz_subq,
1271 BUGGY_64(gen_op_jbe_subq),
1272 gen_op_js_subq,
1273 NULL,
1274 BUGGY_64(gen_op_jl_subq),
1275 BUGGY_64(gen_op_jle_subq),
1276 },
1277#endif
1278};
1279static GenOpFunc1 *gen_op_loop[3][4] = {
1280 [0] = {
1281 gen_op_loopnzw,
1282 gen_op_loopzw,
1283 gen_op_jnz_ecxw,
1284 },
1285 [1] = {
1286 gen_op_loopnzl,
1287 gen_op_loopzl,
1288 gen_op_jnz_ecxl,
1289 },
1290#ifdef TARGET_X86_64
1291 [2] = {
1292 gen_op_loopnzq,
1293 gen_op_loopzq,
1294 gen_op_jnz_ecxq,
1295 },
1296#endif
1297};
1298
1299static GenOpFunc *gen_setcc_slow[8] = {
1300 gen_op_seto_T0_cc,
1301 gen_op_setb_T0_cc,
1302 gen_op_setz_T0_cc,
1303 gen_op_setbe_T0_cc,
1304 gen_op_sets_T0_cc,
1305 gen_op_setp_T0_cc,
1306 gen_op_setl_T0_cc,
1307 gen_op_setle_T0_cc,
1308};
1309
1310static GenOpFunc *gen_setcc_sub[4][8] = {
1311 [OT_BYTE] = {
1312 NULL,
1313 gen_op_setb_T0_subb,
1314 gen_op_setz_T0_subb,
1315 gen_op_setbe_T0_subb,
1316 gen_op_sets_T0_subb,
1317 NULL,
1318 gen_op_setl_T0_subb,
1319 gen_op_setle_T0_subb,
1320 },
1321 [OT_WORD] = {
1322 NULL,
1323 gen_op_setb_T0_subw,
1324 gen_op_setz_T0_subw,
1325 gen_op_setbe_T0_subw,
1326 gen_op_sets_T0_subw,
1327 NULL,
1328 gen_op_setl_T0_subw,
1329 gen_op_setle_T0_subw,
1330 },
1331 [OT_LONG] = {
1332 NULL,
1333 gen_op_setb_T0_subl,
1334 gen_op_setz_T0_subl,
1335 gen_op_setbe_T0_subl,
1336 gen_op_sets_T0_subl,
1337 NULL,
1338 gen_op_setl_T0_subl,
1339 gen_op_setle_T0_subl,
1340 },
1341#ifdef TARGET_X86_64
1342 [OT_QUAD] = {
1343 NULL,
1344 gen_op_setb_T0_subq,
1345 gen_op_setz_T0_subq,
1346 gen_op_setbe_T0_subq,
1347 gen_op_sets_T0_subq,
1348 NULL,
1349 gen_op_setl_T0_subq,
1350 gen_op_setle_T0_subq,
1351 },
1352#endif
1353};
1354
1355static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1356 gen_op_fadd_ST0_FT0,
1357 gen_op_fmul_ST0_FT0,
1358 gen_op_fcom_ST0_FT0,
1359 gen_op_fcom_ST0_FT0,
1360 gen_op_fsub_ST0_FT0,
1361 gen_op_fsubr_ST0_FT0,
1362 gen_op_fdiv_ST0_FT0,
1363 gen_op_fdivr_ST0_FT0,
1364};
1365
1366/* NOTE the exception in "r" op ordering */
1367static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1368 gen_op_fadd_STN_ST0,
1369 gen_op_fmul_STN_ST0,
1370 NULL,
1371 NULL,
1372 gen_op_fsubr_STN_ST0,
1373 gen_op_fsub_STN_ST0,
1374 gen_op_fdivr_STN_ST0,
1375 gen_op_fdiv_STN_ST0,
1376};
1377
1378/* if d == OR_TMP0, it means memory operand (address in A0) */
1379static void gen_op(DisasContext *s1, int op, int ot, int d)
1380{
1381 GenOpFunc *gen_update_cc;
1382
1383 if (d != OR_TMP0) {
1384 gen_op_mov_TN_reg[ot][0][d]();
1385 } else {
1386 gen_op_ld_T0_A0[ot + s1->mem_index]();
1387 }
1388 switch(op) {
1389 case OP_ADCL:
1390 case OP_SBBL:
1391 if (s1->cc_op != CC_OP_DYNAMIC)
1392 gen_op_set_cc_op(s1->cc_op);
1393 if (d != OR_TMP0) {
1394 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1395 gen_op_mov_reg_T0[ot][d]();
1396 } else {
1397 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1398 }
1399 s1->cc_op = CC_OP_DYNAMIC;
1400 goto the_end;
1401 case OP_ADDL:
1402 gen_op_addl_T0_T1();
1403 s1->cc_op = CC_OP_ADDB + ot;
1404 gen_update_cc = gen_op_update2_cc;
1405 break;
1406 case OP_SUBL:
1407 gen_op_subl_T0_T1();
1408 s1->cc_op = CC_OP_SUBB + ot;
1409 gen_update_cc = gen_op_update2_cc;
1410 break;
1411 default:
1412 case OP_ANDL:
1413 case OP_ORL:
1414 case OP_XORL:
1415 gen_op_arith_T0_T1_cc[op]();
1416 s1->cc_op = CC_OP_LOGICB + ot;
1417 gen_update_cc = gen_op_update1_cc;
1418 break;
1419 case OP_CMPL:
1420 gen_op_cmpl_T0_T1_cc();
1421 s1->cc_op = CC_OP_SUBB + ot;
1422 gen_update_cc = NULL;
1423 break;
1424 }
1425 if (op != OP_CMPL) {
1426 if (d != OR_TMP0)
1427 gen_op_mov_reg_T0[ot][d]();
1428 else
1429 gen_op_st_T0_A0[ot + s1->mem_index]();
1430 }
1431 /* the flags update must happen after the memory write (precise
1432 exception support) */
1433 if (gen_update_cc)
1434 gen_update_cc();
1435 the_end: ;
1436}
1437
1438/* if d == OR_TMP0, it means memory operand (address in A0) */
1439static void gen_inc(DisasContext *s1, int ot, int d, int c)
1440{
1441 if (d != OR_TMP0)
1442 gen_op_mov_TN_reg[ot][0][d]();
1443 else
1444 gen_op_ld_T0_A0[ot + s1->mem_index]();
1445 if (s1->cc_op != CC_OP_DYNAMIC)
1446 gen_op_set_cc_op(s1->cc_op);
1447 if (c > 0) {
1448 gen_op_incl_T0();
1449 s1->cc_op = CC_OP_INCB + ot;
1450 } else {
1451 gen_op_decl_T0();
1452 s1->cc_op = CC_OP_DECB + ot;
1453 }
1454 if (d != OR_TMP0)
1455 gen_op_mov_reg_T0[ot][d]();
1456 else
1457 gen_op_st_T0_A0[ot + s1->mem_index]();
1458 gen_op_update_inc_cc();
1459}
1460
1461static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1462{
1463 if (d != OR_TMP0)
1464 gen_op_mov_TN_reg[ot][0][d]();
1465 else
1466 gen_op_ld_T0_A0[ot + s1->mem_index]();
1467 if (s != OR_TMP1)
1468 gen_op_mov_TN_reg[ot][1][s]();
1469 /* for zero counts, flags are not updated, so must do it dynamically */
1470 if (s1->cc_op != CC_OP_DYNAMIC)
1471 gen_op_set_cc_op(s1->cc_op);
1472
1473 if (d != OR_TMP0)
1474 gen_op_shift_T0_T1_cc[ot][op]();
1475 else
1476 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1477 if (d != OR_TMP0)
1478 gen_op_mov_reg_T0[ot][d]();
1479 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1480}
1481
1482static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1483{
1484 /* currently not optimized */
1485 gen_op_movl_T1_im(c);
1486 gen_shift(s1, op, ot, d, OR_TMP1);
1487}
1488
1489static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1490{
1491 target_long disp;
1492 int havesib;
1493 int base;
1494 int index;
1495 int scale;
1496 int opreg;
1497 int mod, rm, code, override, must_add_seg;
1498
1499 override = s->override;
1500 must_add_seg = s->addseg;
1501 if (override >= 0)
1502 must_add_seg = 1;
1503 mod = (modrm >> 6) & 3;
1504 rm = modrm & 7;
1505
1506 if (s->aflag) {
1507
1508 havesib = 0;
1509 base = rm;
1510 index = 0;
1511 scale = 0;
1512
1513 if (base == 4) {
1514 havesib = 1;
1515 code = ldub_code(s->pc++);
1516 scale = (code >> 6) & 3;
1517 index = ((code >> 3) & 7) | REX_X(s);
1518 base = (code & 7);
1519 }
1520 base |= REX_B(s);
1521
1522 switch (mod) {
1523 case 0:
1524 if ((base & 7) == 5) {
1525 base = -1;
1526 disp = (int32_t)ldl_code(s->pc);
1527 s->pc += 4;
1528 if (CODE64(s) && !havesib) {
1529 disp += s->pc + s->rip_offset;
1530 }
1531 } else {
1532 disp = 0;
1533 }
1534 break;
1535 case 1:
1536 disp = (int8_t)ldub_code(s->pc++);
1537 break;
1538 default:
1539 case 2:
1540 disp = ldl_code(s->pc);
1541 s->pc += 4;
1542 break;
1543 }
1544
1545 if (base >= 0) {
1546 /* for correct popl handling with esp */
1547 if (base == 4 && s->popl_esp_hack)
1548 disp += s->popl_esp_hack;
1549#ifdef TARGET_X86_64
1550 if (s->aflag == 2) {
1551 gen_op_movq_A0_reg[base]();
1552 if (disp != 0) {
1553 if ((int32_t)disp == disp)
1554 gen_op_addq_A0_im(disp);
1555 else
1556 gen_op_addq_A0_im64(disp >> 32, disp);
1557 }
1558 } else
1559#endif
1560 {
1561 gen_op_movl_A0_reg[base]();
1562 if (disp != 0)
1563 gen_op_addl_A0_im(disp);
1564 }
1565 } else {
1566#ifdef TARGET_X86_64
1567 if (s->aflag == 2) {
1568 if ((int32_t)disp == disp)
1569 gen_op_movq_A0_im(disp);
1570 else
1571 gen_op_movq_A0_im64(disp >> 32, disp);
1572 } else
1573#endif
1574 {
1575 gen_op_movl_A0_im(disp);
1576 }
1577 }
1578 /* XXX: index == 4 is always invalid */
1579 if (havesib && (index != 4 || scale != 0)) {
1580#ifdef TARGET_X86_64
1581 if (s->aflag == 2) {
1582 gen_op_addq_A0_reg_sN[scale][index]();
1583 } else
1584#endif
1585 {
1586 gen_op_addl_A0_reg_sN[scale][index]();
1587 }
1588 }
1589 if (must_add_seg) {
1590 if (override < 0) {
1591 if (base == R_EBP || base == R_ESP)
1592 override = R_SS;
1593 else
1594 override = R_DS;
1595 }
1596#ifdef TARGET_X86_64
1597 if (s->aflag == 2) {
1598 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1599 } else
1600#endif
1601 {
1602 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1603 }
1604 }
1605 } else {
1606 switch (mod) {
1607 case 0:
1608 if (rm == 6) {
1609 disp = lduw_code(s->pc);
1610 s->pc += 2;
1611 gen_op_movl_A0_im(disp);
1612 rm = 0; /* avoid SS override */
1613 goto no_rm;
1614 } else {
1615 disp = 0;
1616 }
1617 break;
1618 case 1:
1619 disp = (int8_t)ldub_code(s->pc++);
1620 break;
1621 default:
1622 case 2:
1623 disp = lduw_code(s->pc);
1624 s->pc += 2;
1625 break;
1626 }
1627 switch(rm) {
1628 case 0:
1629 gen_op_movl_A0_reg[R_EBX]();
1630 gen_op_addl_A0_reg_sN[0][R_ESI]();
1631 break;
1632 case 1:
1633 gen_op_movl_A0_reg[R_EBX]();
1634 gen_op_addl_A0_reg_sN[0][R_EDI]();
1635 break;
1636 case 2:
1637 gen_op_movl_A0_reg[R_EBP]();
1638 gen_op_addl_A0_reg_sN[0][R_ESI]();
1639 break;
1640 case 3:
1641 gen_op_movl_A0_reg[R_EBP]();
1642 gen_op_addl_A0_reg_sN[0][R_EDI]();
1643 break;
1644 case 4:
1645 gen_op_movl_A0_reg[R_ESI]();
1646 break;
1647 case 5:
1648 gen_op_movl_A0_reg[R_EDI]();
1649 break;
1650 case 6:
1651 gen_op_movl_A0_reg[R_EBP]();
1652 break;
1653 default:
1654 case 7:
1655 gen_op_movl_A0_reg[R_EBX]();
1656 break;
1657 }
1658 if (disp != 0)
1659 gen_op_addl_A0_im(disp);
1660 gen_op_andl_A0_ffff();
1661 no_rm:
1662 if (must_add_seg) {
1663 if (override < 0) {
1664 if (rm == 2 || rm == 3 || rm == 6)
1665 override = R_SS;
1666 else
1667 override = R_DS;
1668 }
1669 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1670 }
1671 }
1672
1673 opreg = OR_A0;
1674 disp = 0;
1675 *reg_ptr = opreg;
1676 *offset_ptr = disp;
1677}
1678
1679static void gen_nop_modrm(DisasContext *s, int modrm)
1680{
1681 int mod, rm, base, code;
1682
1683 mod = (modrm >> 6) & 3;
1684 if (mod == 3)
1685 return;
1686 rm = modrm & 7;
1687
1688 if (s->aflag) {
1689
1690 base = rm;
1691
1692 if (base == 4) {
1693 code = ldub_code(s->pc++);
1694 base = (code & 7);
1695 }
1696
1697 switch (mod) {
1698 case 0:
1699 if (base == 5) {
1700 s->pc += 4;
1701 }
1702 break;
1703 case 1:
1704 s->pc++;
1705 break;
1706 default:
1707 case 2:
1708 s->pc += 4;
1709 break;
1710 }
1711 } else {
1712 switch (mod) {
1713 case 0:
1714 if (rm == 6) {
1715 s->pc += 2;
1716 }
1717 break;
1718 case 1:
1719 s->pc++;
1720 break;
1721 default:
1722 case 2:
1723 s->pc += 2;
1724 break;
1725 }
1726 }
1727}
1728
1729/* used for LEA and MOV AX, mem */
1730static void gen_add_A0_ds_seg(DisasContext *s)
1731{
1732 int override, must_add_seg;
1733 must_add_seg = s->addseg;
1734 override = R_DS;
1735 if (s->override >= 0) {
1736 override = s->override;
1737 must_add_seg = 1;
1738 } else {
1739 override = R_DS;
1740 }
1741 if (must_add_seg) {
1742#ifdef TARGET_X86_64
1743 if (CODE64(s)) {
1744 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1745 } else
1746#endif
1747 {
1748 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1749 }
1750 }
1751}
1752
1753/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1754 OR_TMP0 */
1755static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1756{
1757 int mod, rm, opreg, disp;
1758
1759 mod = (modrm >> 6) & 3;
1760 rm = (modrm & 7) | REX_B(s);
1761 if (mod == 3) {
1762 if (is_store) {
1763 if (reg != OR_TMP0)
1764 gen_op_mov_TN_reg[ot][0][reg]();
1765 gen_op_mov_reg_T0[ot][rm]();
1766 } else {
1767 gen_op_mov_TN_reg[ot][0][rm]();
1768 if (reg != OR_TMP0)
1769 gen_op_mov_reg_T0[ot][reg]();
1770 }
1771 } else {
1772 gen_lea_modrm(s, modrm, &opreg, &disp);
1773 if (is_store) {
1774 if (reg != OR_TMP0)
1775 gen_op_mov_TN_reg[ot][0][reg]();
1776 gen_op_st_T0_A0[ot + s->mem_index]();
1777 } else {
1778 gen_op_ld_T0_A0[ot + s->mem_index]();
1779 if (reg != OR_TMP0)
1780 gen_op_mov_reg_T0[ot][reg]();
1781 }
1782 }
1783}
1784
1785static inline uint32_t insn_get(DisasContext *s, int ot)
1786{
1787 uint32_t ret;
1788
1789 switch(ot) {
1790 case OT_BYTE:
1791 ret = ldub_code(s->pc);
1792 s->pc++;
1793 break;
1794 case OT_WORD:
1795 ret = lduw_code(s->pc);
1796 s->pc += 2;
1797 break;
1798 default:
1799 case OT_LONG:
1800 ret = ldl_code(s->pc);
1801 s->pc += 4;
1802 break;
1803 }
1804 return ret;
1805}
1806
1807static inline int insn_const_size(unsigned int ot)
1808{
1809 if (ot <= OT_LONG)
1810 return 1 << ot;
1811 else
1812 return 4;
1813}
1814
1815static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1816{
1817 TranslationBlock *tb;
1818 target_ulong pc;
1819
1820 pc = s->cs_base + eip;
1821 tb = s->tb;
1822 /* NOTE: we handle the case where the TB spans two pages here */
1823 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1824 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1825 /* jump to same page: we can use a direct jump */
1826 if (tb_num == 0)
1827 gen_op_goto_tb0(TBPARAM(tb));
1828 else
1829 gen_op_goto_tb1(TBPARAM(tb));
1830 gen_jmp_im(eip);
1831 gen_op_movl_T0_im((long)tb + tb_num);
1832 gen_op_exit_tb();
1833 } else {
1834 /* jump to another page: currently not optimized */
1835 gen_jmp_im(eip);
1836 gen_eob(s);
1837 }
1838}
1839
1840static inline void gen_jcc(DisasContext *s, int b,
1841 target_ulong val, target_ulong next_eip)
1842{
1843 TranslationBlock *tb;
1844 int inv, jcc_op;
1845 GenOpFunc1 *func;
1846 target_ulong tmp;
1847 int l1, l2;
1848
1849 inv = b & 1;
1850 jcc_op = (b >> 1) & 7;
1851
1852 if (s->jmp_opt) {
1853#ifdef VBOX
1854 gen_check_external_event(s);
1855#endif /* VBOX */
1856 switch(s->cc_op) {
1857 /* we optimize the cmp/jcc case */
1858 case CC_OP_SUBB:
1859 case CC_OP_SUBW:
1860 case CC_OP_SUBL:
1861 case CC_OP_SUBQ:
1862 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1863 break;
1864
1865 /* some jumps are easy to compute */
1866 case CC_OP_ADDB:
1867 case CC_OP_ADDW:
1868 case CC_OP_ADDL:
1869 case CC_OP_ADDQ:
1870
1871 case CC_OP_ADCB:
1872 case CC_OP_ADCW:
1873 case CC_OP_ADCL:
1874 case CC_OP_ADCQ:
1875
1876 case CC_OP_SBBB:
1877 case CC_OP_SBBW:
1878 case CC_OP_SBBL:
1879 case CC_OP_SBBQ:
1880
1881 case CC_OP_LOGICB:
1882 case CC_OP_LOGICW:
1883 case CC_OP_LOGICL:
1884 case CC_OP_LOGICQ:
1885
1886 case CC_OP_INCB:
1887 case CC_OP_INCW:
1888 case CC_OP_INCL:
1889 case CC_OP_INCQ:
1890
1891 case CC_OP_DECB:
1892 case CC_OP_DECW:
1893 case CC_OP_DECL:
1894 case CC_OP_DECQ:
1895
1896 case CC_OP_SHLB:
1897 case CC_OP_SHLW:
1898 case CC_OP_SHLL:
1899 case CC_OP_SHLQ:
1900
1901 case CC_OP_SARB:
1902 case CC_OP_SARW:
1903 case CC_OP_SARL:
1904 case CC_OP_SARQ:
1905 switch(jcc_op) {
1906 case JCC_Z:
1907 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1908 break;
1909 case JCC_S:
1910 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1911 break;
1912 default:
1913 func = NULL;
1914 break;
1915 }
1916 break;
1917 default:
1918 func = NULL;
1919 break;
1920 }
1921
1922 if (s->cc_op != CC_OP_DYNAMIC) {
1923 gen_op_set_cc_op(s->cc_op);
1924 s->cc_op = CC_OP_DYNAMIC;
1925 }
1926
1927 if (!func) {
1928 gen_setcc_slow[jcc_op]();
1929 func = gen_op_jnz_T0_label;
1930 }
1931
1932 if (inv) {
1933 tmp = val;
1934 val = next_eip;
1935 next_eip = tmp;
1936 }
1937 tb = s->tb;
1938
1939 l1 = gen_new_label();
1940 func(l1);
1941
1942 gen_goto_tb(s, 0, next_eip);
1943
1944 gen_set_label(l1);
1945 gen_goto_tb(s, 1, val);
1946
1947 s->is_jmp = 3;
1948 } else {
1949
1950 if (s->cc_op != CC_OP_DYNAMIC) {
1951 gen_op_set_cc_op(s->cc_op);
1952 s->cc_op = CC_OP_DYNAMIC;
1953 }
1954 gen_setcc_slow[jcc_op]();
1955 if (inv) {
1956 tmp = val;
1957 val = next_eip;
1958 next_eip = tmp;
1959 }
1960 l1 = gen_new_label();
1961 l2 = gen_new_label();
1962 gen_op_jnz_T0_label(l1);
1963 gen_jmp_im(next_eip);
1964 gen_op_jmp_label(l2);
1965 gen_set_label(l1);
1966 gen_jmp_im(val);
1967 gen_set_label(l2);
1968 gen_eob(s);
1969 }
1970}
1971
1972static void gen_setcc(DisasContext *s, int b)
1973{
1974 int inv, jcc_op;
1975 GenOpFunc *func;
1976
1977 inv = b & 1;
1978 jcc_op = (b >> 1) & 7;
1979 switch(s->cc_op) {
1980 /* we optimize the cmp/jcc case */
1981 case CC_OP_SUBB:
1982 case CC_OP_SUBW:
1983 case CC_OP_SUBL:
1984 case CC_OP_SUBQ:
1985 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1986 if (!func)
1987 goto slow_jcc;
1988 break;
1989
1990 /* some jumps are easy to compute */
1991 case CC_OP_ADDB:
1992 case CC_OP_ADDW:
1993 case CC_OP_ADDL:
1994 case CC_OP_ADDQ:
1995
1996 case CC_OP_LOGICB:
1997 case CC_OP_LOGICW:
1998 case CC_OP_LOGICL:
1999 case CC_OP_LOGICQ:
2000
2001 case CC_OP_INCB:
2002 case CC_OP_INCW:
2003 case CC_OP_INCL:
2004 case CC_OP_INCQ:
2005
2006 case CC_OP_DECB:
2007 case CC_OP_DECW:
2008 case CC_OP_DECL:
2009 case CC_OP_DECQ:
2010
2011 case CC_OP_SHLB:
2012 case CC_OP_SHLW:
2013 case CC_OP_SHLL:
2014 case CC_OP_SHLQ:
2015 switch(jcc_op) {
2016 case JCC_Z:
2017 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2018 break;
2019 case JCC_S:
2020 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2021 break;
2022 default:
2023 goto slow_jcc;
2024 }
2025 break;
2026 default:
2027 slow_jcc:
2028 if (s->cc_op != CC_OP_DYNAMIC)
2029 gen_op_set_cc_op(s->cc_op);
2030 func = gen_setcc_slow[jcc_op];
2031 break;
2032 }
2033 func();
2034 if (inv) {
2035 gen_op_xor_T0_1();
2036 }
2037}
2038
2039/* move T0 to seg_reg and compute if the CPU state may change. Never
2040 call this function with seg_reg == R_CS */
2041static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2042{
2043 if (s->pe && !s->vm86) {
2044 /* XXX: optimize by finding processor state dynamically */
2045 if (s->cc_op != CC_OP_DYNAMIC)
2046 gen_op_set_cc_op(s->cc_op);
2047 gen_jmp_im(cur_eip);
2048 gen_op_movl_seg_T0(seg_reg);
2049 /* abort translation because the addseg value may change or
2050 because ss32 may change. For R_SS, translation must always
2051 stop as a special handling must be done to disable hardware
2052 interrupts for the next instruction */
2053 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2054 s->is_jmp = 3;
2055 } else {
2056 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
2057 if (seg_reg == R_SS)
2058 s->is_jmp = 3;
2059 }
2060}
2061
2062static inline void gen_stack_update(DisasContext *s, int addend)
2063{
2064#ifdef TARGET_X86_64
2065 if (CODE64(s)) {
2066 if (addend == 8)
2067 gen_op_addq_ESP_8();
2068 else
2069 gen_op_addq_ESP_im(addend);
2070 } else
2071#endif
2072 if (s->ss32) {
2073 if (addend == 2)
2074 gen_op_addl_ESP_2();
2075 else if (addend == 4)
2076 gen_op_addl_ESP_4();
2077 else
2078 gen_op_addl_ESP_im(addend);
2079 } else {
2080 if (addend == 2)
2081 gen_op_addw_ESP_2();
2082 else if (addend == 4)
2083 gen_op_addw_ESP_4();
2084 else
2085 gen_op_addw_ESP_im(addend);
2086 }
2087}
2088
2089/* generate a push. It depends on ss32, addseg and dflag */
2090static void gen_push_T0(DisasContext *s)
2091{
2092#ifdef TARGET_X86_64
2093 if (CODE64(s)) {
2094 gen_op_movq_A0_reg[R_ESP]();
2095 if (s->dflag) {
2096 gen_op_subq_A0_8();
2097 gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
2098 } else {
2099 gen_op_subq_A0_2();
2100 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2101 }
2102 gen_op_movq_ESP_A0();
2103 } else
2104#endif
2105 {
2106 gen_op_movl_A0_reg[R_ESP]();
2107 if (!s->dflag)
2108 gen_op_subl_A0_2();
2109 else
2110 gen_op_subl_A0_4();
2111 if (s->ss32) {
2112 if (s->addseg) {
2113 gen_op_movl_T1_A0();
2114 gen_op_addl_A0_SS();
2115 }
2116 } else {
2117 gen_op_andl_A0_ffff();
2118 gen_op_movl_T1_A0();
2119 gen_op_addl_A0_SS();
2120 }
2121 gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
2122 if (s->ss32 && !s->addseg)
2123 gen_op_movl_ESP_A0();
2124 else
2125 gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
2126 }
2127}
2128
2129/* generate a push. It depends on ss32, addseg and dflag */
2130/* slower version for T1, only used for call Ev */
2131static void gen_push_T1(DisasContext *s)
2132{
2133#ifdef TARGET_X86_64
2134 if (CODE64(s)) {
2135 gen_op_movq_A0_reg[R_ESP]();
2136 if (s->dflag) {
2137 gen_op_subq_A0_8();
2138 gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
2139 } else {
2140 gen_op_subq_A0_2();
2141 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2142 }
2143 gen_op_movq_ESP_A0();
2144 } else
2145#endif
2146 {
2147 gen_op_movl_A0_reg[R_ESP]();
2148 if (!s->dflag)
2149 gen_op_subl_A0_2();
2150 else
2151 gen_op_subl_A0_4();
2152 if (s->ss32) {
2153 if (s->addseg) {
2154 gen_op_addl_A0_SS();
2155 }
2156 } else {
2157 gen_op_andl_A0_ffff();
2158 gen_op_addl_A0_SS();
2159 }
2160 gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2161
2162 if (s->ss32 && !s->addseg)
2163 gen_op_movl_ESP_A0();
2164 else
2165 gen_stack_update(s, (-2) << s->dflag);
2166 }
2167}
2168
2169/* two step pop is necessary for precise exceptions */
2170static void gen_pop_T0(DisasContext *s)
2171{
2172#ifdef TARGET_X86_64
2173 if (CODE64(s)) {
2174 gen_op_movq_A0_reg[R_ESP]();
2175 gen_op_ld_T0_A0[(s->dflag ? OT_QUAD : OT_WORD) + s->mem_index]();
2176 } else
2177#endif
2178 {
2179 gen_op_movl_A0_reg[R_ESP]();
2180 if (s->ss32) {
2181 if (s->addseg)
2182 gen_op_addl_A0_SS();
2183 } else {
2184 gen_op_andl_A0_ffff();
2185 gen_op_addl_A0_SS();
2186 }
2187 gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2188 }
2189}
2190
2191static void gen_pop_update(DisasContext *s)
2192{
2193#ifdef TARGET_X86_64
2194 if (CODE64(s) && s->dflag) {
2195 gen_stack_update(s, 8);
2196 } else
2197#endif
2198 {
2199 gen_stack_update(s, 2 << s->dflag);
2200 }
2201}
2202
2203static void gen_stack_A0(DisasContext *s)
2204{
2205 gen_op_movl_A0_ESP();
2206 if (!s->ss32)
2207 gen_op_andl_A0_ffff();
2208 gen_op_movl_T1_A0();
2209 if (s->addseg)
2210 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2211}
2212
2213/* NOTE: wrap around in 16 bit not fully handled */
2214static void gen_pusha(DisasContext *s)
2215{
2216 int i;
2217 gen_op_movl_A0_ESP();
2218 gen_op_addl_A0_im(-16 << s->dflag);
2219 if (!s->ss32)
2220 gen_op_andl_A0_ffff();
2221 gen_op_movl_T1_A0();
2222 if (s->addseg)
2223 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2224 for(i = 0;i < 8; i++) {
2225 gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2226 gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2227 gen_op_addl_A0_im(2 << s->dflag);
2228 }
2229 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2230}
2231
2232/* NOTE: wrap around in 16 bit not fully handled */
2233static void gen_popa(DisasContext *s)
2234{
2235 int i;
2236 gen_op_movl_A0_ESP();
2237 if (!s->ss32)
2238 gen_op_andl_A0_ffff();
2239 gen_op_movl_T1_A0();
2240 gen_op_addl_T1_im(16 << s->dflag);
2241 if (s->addseg)
2242 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2243 for(i = 0;i < 8; i++) {
2244 /* ESP is not reloaded */
2245 if (i != 3) {
2246 gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2247 gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2248 }
2249 gen_op_addl_A0_im(2 << s->dflag);
2250 }
2251 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2252}
2253
2254static void gen_enter(DisasContext *s, int esp_addend, int level)
2255{
2256 int ot, opsize;
2257
2258 level &= 0x1f;
2259#ifdef TARGET_X86_64
2260 if (CODE64(s)) {
2261 ot = s->dflag ? OT_QUAD : OT_WORD;
2262 opsize = 1 << ot;
2263
2264 gen_op_movl_A0_ESP();
2265 gen_op_addq_A0_im(-opsize);
2266 gen_op_movl_T1_A0();
2267
2268 /* push bp */
2269 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2270 gen_op_st_T0_A0[ot + s->mem_index]();
2271 if (level) {
2272 gen_op_enter64_level(level, (ot == OT_QUAD));
2273 }
2274 gen_op_mov_reg_T1[ot][R_EBP]();
2275 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2276 gen_op_mov_reg_T1[OT_QUAD][R_ESP]();
2277 } else
2278#endif
2279 {
2280 ot = s->dflag + OT_WORD;
2281 opsize = 2 << s->dflag;
2282
2283 gen_op_movl_A0_ESP();
2284 gen_op_addl_A0_im(-opsize);
2285 if (!s->ss32)
2286 gen_op_andl_A0_ffff();
2287 gen_op_movl_T1_A0();
2288 if (s->addseg)
2289 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2290 /* push bp */
2291 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2292 gen_op_st_T0_A0[ot + s->mem_index]();
2293 if (level) {
2294 gen_op_enter_level(level, s->dflag);
2295 }
2296 gen_op_mov_reg_T1[ot][R_EBP]();
2297 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2298 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2299 }
2300}
2301
2302static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2303{
2304 if (s->cc_op != CC_OP_DYNAMIC)
2305 gen_op_set_cc_op(s->cc_op);
2306 gen_jmp_im(cur_eip);
2307 gen_op_raise_exception(trapno);
2308 s->is_jmp = 3;
2309}
2310
2311/* an interrupt is different from an exception because of the
2312 priviledge checks */
2313static void gen_interrupt(DisasContext *s, int intno,
2314 target_ulong cur_eip, target_ulong next_eip)
2315{
2316 if (s->cc_op != CC_OP_DYNAMIC)
2317 gen_op_set_cc_op(s->cc_op);
2318 gen_jmp_im(cur_eip);
2319 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2320 s->is_jmp = 3;
2321}
2322
2323static void gen_debug(DisasContext *s, target_ulong cur_eip)
2324{
2325 if (s->cc_op != CC_OP_DYNAMIC)
2326 gen_op_set_cc_op(s->cc_op);
2327 gen_jmp_im(cur_eip);
2328 gen_op_debug();
2329 s->is_jmp = 3;
2330}
2331
2332/* generate a generic end of block. Trace exception is also generated
2333 if needed */
2334static void gen_eob(DisasContext *s)
2335{
2336 if (s->cc_op != CC_OP_DYNAMIC)
2337 gen_op_set_cc_op(s->cc_op);
2338 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2339 gen_op_reset_inhibit_irq();
2340 }
2341 if (s->singlestep_enabled) {
2342 gen_op_debug();
2343 } else if (s->tf) {
2344 gen_op_single_step();
2345 } else {
2346 gen_op_movl_T0_0();
2347 gen_op_exit_tb();
2348 }
2349 s->is_jmp = 3;
2350}
2351
2352/* generate a jump to eip. No segment change must happen before as a
2353 direct call to the next block may occur */
2354static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2355{
2356 if (s->jmp_opt) {
2357#ifdef VBOX
2358 gen_check_external_event(s);
2359#endif /* VBOX */
2360 if (s->cc_op != CC_OP_DYNAMIC) {
2361 gen_op_set_cc_op(s->cc_op);
2362 s->cc_op = CC_OP_DYNAMIC;
2363 }
2364 gen_goto_tb(s, tb_num, eip);
2365 s->is_jmp = 3;
2366 } else {
2367 gen_jmp_im(eip);
2368 gen_eob(s);
2369 }
2370}
2371
2372static void gen_jmp(DisasContext *s, target_ulong eip)
2373{
2374 gen_jmp_tb(s, eip, 0);
2375}
2376
2377static void gen_movtl_T0_im(target_ulong val)
2378{
2379#ifdef TARGET_X86_64
2380 if ((int32_t)val == val) {
2381 gen_op_movl_T0_im(val);
2382 } else {
2383 gen_op_movq_T0_im64(val >> 32, val);
2384 }
2385#else
2386 gen_op_movl_T0_im(val);
2387#endif
2388}
2389
2390static void gen_movtl_T1_im(target_ulong val)
2391{
2392#ifdef TARGET_X86_64
2393 if ((int32_t)val == val) {
2394 gen_op_movl_T1_im(val);
2395 } else {
2396 gen_op_movq_T1_im64(val >> 32, val);
2397 }
2398#else
2399 gen_op_movl_T1_im(val);
2400#endif
2401}
2402
2403static void gen_add_A0_im(DisasContext *s, int val)
2404{
2405#ifdef TARGET_X86_64
2406 if (CODE64(s))
2407 gen_op_addq_A0_im(val);
2408 else
2409#endif
2410 gen_op_addl_A0_im(val);
2411}
2412
2413static GenOpFunc1 *gen_ldq_env_A0[3] = {
2414 gen_op_ldq_raw_env_A0,
2415#ifndef CONFIG_USER_ONLY
2416 gen_op_ldq_kernel_env_A0,
2417 gen_op_ldq_user_env_A0,
2418#endif
2419};
2420
2421static GenOpFunc1 *gen_stq_env_A0[3] = {
2422 gen_op_stq_raw_env_A0,
2423#ifndef CONFIG_USER_ONLY
2424 gen_op_stq_kernel_env_A0,
2425 gen_op_stq_user_env_A0,
2426#endif
2427};
2428
2429static GenOpFunc1 *gen_ldo_env_A0[3] = {
2430 gen_op_ldo_raw_env_A0,
2431#ifndef CONFIG_USER_ONLY
2432 gen_op_ldo_kernel_env_A0,
2433 gen_op_ldo_user_env_A0,
2434#endif
2435};
2436
2437static GenOpFunc1 *gen_sto_env_A0[3] = {
2438 gen_op_sto_raw_env_A0,
2439#ifndef CONFIG_USER_ONLY
2440 gen_op_sto_kernel_env_A0,
2441 gen_op_sto_user_env_A0,
2442#endif
2443};
2444
2445#define SSE_SPECIAL ((GenOpFunc2 *)1)
2446
2447#define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2448#define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2449 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2450
2451static GenOpFunc2 *sse_op_table1[256][4] = {
2452 /* pure SSE operations */
2453 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2454 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2455 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2456 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2457 [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2458 [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2459 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2460 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2461
2462 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2463 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2464 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2465 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2466 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2467 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2468 [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2469 [0x2f] = { gen_op_comiss, gen_op_comisd },
2470 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2471 [0x51] = SSE_FOP(sqrt),
2472 [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2473 [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2474 [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2475 [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2476 [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2477 [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2478 [0x58] = SSE_FOP(add),
2479 [0x59] = SSE_FOP(mul),
2480 [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2481 gen_op_cvtss2sd, gen_op_cvtsd2ss },
2482 [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2483 [0x5c] = SSE_FOP(sub),
2484 [0x5d] = SSE_FOP(min),
2485 [0x5e] = SSE_FOP(div),
2486 [0x5f] = SSE_FOP(max),
2487
2488 [0xc2] = SSE_FOP(cmpeq),
2489 [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2490
2491 /* MMX ops and their SSE extensions */
2492 [0x60] = MMX_OP2(punpcklbw),
2493 [0x61] = MMX_OP2(punpcklwd),
2494 [0x62] = MMX_OP2(punpckldq),
2495 [0x63] = MMX_OP2(packsswb),
2496 [0x64] = MMX_OP2(pcmpgtb),
2497 [0x65] = MMX_OP2(pcmpgtw),
2498 [0x66] = MMX_OP2(pcmpgtl),
2499 [0x67] = MMX_OP2(packuswb),
2500 [0x68] = MMX_OP2(punpckhbw),
2501 [0x69] = MMX_OP2(punpckhwd),
2502 [0x6a] = MMX_OP2(punpckhdq),
2503 [0x6b] = MMX_OP2(packssdw),
2504 [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2505 [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2506 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2507 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2508 [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2509 (GenOpFunc2 *)gen_op_pshufd_xmm,
2510 (GenOpFunc2 *)gen_op_pshufhw_xmm,
2511 (GenOpFunc2 *)gen_op_pshuflw_xmm },
2512 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2513 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2514 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2515 [0x74] = MMX_OP2(pcmpeqb),
2516 [0x75] = MMX_OP2(pcmpeqw),
2517 [0x76] = MMX_OP2(pcmpeql),
2518 [0x77] = { SSE_SPECIAL }, /* emms */
2519 [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2520 [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2521 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2522 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2523 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2524 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2525 [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2526 [0xd1] = MMX_OP2(psrlw),
2527 [0xd2] = MMX_OP2(psrld),
2528 [0xd3] = MMX_OP2(psrlq),
2529 [0xd4] = MMX_OP2(paddq),
2530 [0xd5] = MMX_OP2(pmullw),
2531 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2532 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2533 [0xd8] = MMX_OP2(psubusb),
2534 [0xd9] = MMX_OP2(psubusw),
2535 [0xda] = MMX_OP2(pminub),
2536 [0xdb] = MMX_OP2(pand),
2537 [0xdc] = MMX_OP2(paddusb),
2538 [0xdd] = MMX_OP2(paddusw),
2539 [0xde] = MMX_OP2(pmaxub),
2540 [0xdf] = MMX_OP2(pandn),
2541 [0xe0] = MMX_OP2(pavgb),
2542 [0xe1] = MMX_OP2(psraw),
2543 [0xe2] = MMX_OP2(psrad),
2544 [0xe3] = MMX_OP2(pavgw),
2545 [0xe4] = MMX_OP2(pmulhuw),
2546 [0xe5] = MMX_OP2(pmulhw),
2547 [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2548 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2549 [0xe8] = MMX_OP2(psubsb),
2550 [0xe9] = MMX_OP2(psubsw),
2551 [0xea] = MMX_OP2(pminsw),
2552 [0xeb] = MMX_OP2(por),
2553 [0xec] = MMX_OP2(paddsb),
2554 [0xed] = MMX_OP2(paddsw),
2555 [0xee] = MMX_OP2(pmaxsw),
2556 [0xef] = MMX_OP2(pxor),
2557 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2558 [0xf1] = MMX_OP2(psllw),
2559 [0xf2] = MMX_OP2(pslld),
2560 [0xf3] = MMX_OP2(psllq),
2561 [0xf4] = MMX_OP2(pmuludq),
2562 [0xf5] = MMX_OP2(pmaddwd),
2563 [0xf6] = MMX_OP2(psadbw),
2564 [0xf7] = MMX_OP2(maskmov),
2565 [0xf8] = MMX_OP2(psubb),
2566 [0xf9] = MMX_OP2(psubw),
2567 [0xfa] = MMX_OP2(psubl),
2568 [0xfb] = MMX_OP2(psubq),
2569 [0xfc] = MMX_OP2(paddb),
2570 [0xfd] = MMX_OP2(paddw),
2571 [0xfe] = MMX_OP2(paddl),
2572};
2573
2574static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2575 [0 + 2] = MMX_OP2(psrlw),
2576 [0 + 4] = MMX_OP2(psraw),
2577 [0 + 6] = MMX_OP2(psllw),
2578 [8 + 2] = MMX_OP2(psrld),
2579 [8 + 4] = MMX_OP2(psrad),
2580 [8 + 6] = MMX_OP2(pslld),
2581 [16 + 2] = MMX_OP2(psrlq),
2582 [16 + 3] = { NULL, gen_op_psrldq_xmm },
2583 [16 + 6] = MMX_OP2(psllq),
2584 [16 + 7] = { NULL, gen_op_pslldq_xmm },
2585};
2586
2587static GenOpFunc1 *sse_op_table3[4 * 3] = {
2588 gen_op_cvtsi2ss,
2589 gen_op_cvtsi2sd,
2590 X86_64_ONLY(gen_op_cvtsq2ss),
2591 X86_64_ONLY(gen_op_cvtsq2sd),
2592
2593 gen_op_cvttss2si,
2594 gen_op_cvttsd2si,
2595 X86_64_ONLY(gen_op_cvttss2sq),
2596 X86_64_ONLY(gen_op_cvttsd2sq),
2597
2598 gen_op_cvtss2si,
2599 gen_op_cvtsd2si,
2600 X86_64_ONLY(gen_op_cvtss2sq),
2601 X86_64_ONLY(gen_op_cvtsd2sq),
2602};
2603
2604static GenOpFunc2 *sse_op_table4[8][4] = {
2605 SSE_FOP(cmpeq),
2606 SSE_FOP(cmplt),
2607 SSE_FOP(cmple),
2608 SSE_FOP(cmpunord),
2609 SSE_FOP(cmpneq),
2610 SSE_FOP(cmpnlt),
2611 SSE_FOP(cmpnle),
2612 SSE_FOP(cmpord),
2613};
2614
2615static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2616{
2617 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2618 int modrm, mod, rm, reg, reg_addr, offset_addr;
2619 GenOpFunc2 *sse_op2;
2620 GenOpFunc3 *sse_op3;
2621
2622 b &= 0xff;
2623 if (s->prefix & PREFIX_DATA)
2624 b1 = 1;
2625 else if (s->prefix & PREFIX_REPZ)
2626 b1 = 2;
2627 else if (s->prefix & PREFIX_REPNZ)
2628 b1 = 3;
2629 else
2630 b1 = 0;
2631 sse_op2 = sse_op_table1[b][b1];
2632 if (!sse_op2)
2633 goto illegal_op;
2634 if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2635 is_xmm = 1;
2636 } else {
2637 if (b1 == 0) {
2638 /* MMX case */
2639 is_xmm = 0;
2640 } else {
2641 is_xmm = 1;
2642 }
2643 }
2644 /* simple MMX/SSE operation */
2645 if (s->flags & HF_TS_MASK) {
2646 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2647 return;
2648 }
2649 if (s->flags & HF_EM_MASK) {
2650 illegal_op:
2651 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2652 return;
2653 }
2654 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2655 goto illegal_op;
2656 if (b == 0x77) {
2657 /* emms */
2658 gen_op_emms();
2659 return;
2660 }
2661 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2662 the static cpu state) */
2663 if (!is_xmm) {
2664 gen_op_enter_mmx();
2665 }
2666
2667 modrm = ldub_code(s->pc++);
2668 reg = ((modrm >> 3) & 7);
2669 if (is_xmm)
2670 reg |= rex_r;
2671 mod = (modrm >> 6) & 3;
2672 if (sse_op2 == SSE_SPECIAL) {
2673 b |= (b1 << 8);
2674 switch(b) {
2675 case 0x0e7: /* movntq */
2676 if (mod == 3)
2677 goto illegal_op;
2678 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2679 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2680 break;
2681 case 0x1e7: /* movntdq */
2682 case 0x02b: /* movntps */
2683 case 0x12b: /* movntps */
2684 case 0x3f0: /* lddqu */
2685 if (mod == 3)
2686 goto illegal_op;
2687 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2688 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2689 break;
2690 case 0x6e: /* movd mm, ea */
2691#ifdef TARGET_X86_64
2692 if (s->dflag == 2) {
2693 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2694 gen_op_movq_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2695 } else
2696#endif
2697 {
2698 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2699 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2700 }
2701 break;
2702 case 0x16e: /* movd xmm, ea */
2703#ifdef TARGET_X86_64
2704 if (s->dflag == 2) {
2705 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2706 gen_op_movq_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2707 } else
2708#endif
2709 {
2710 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2711 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2712 }
2713 break;
2714 case 0x6f: /* movq mm, ea */
2715 if (mod != 3) {
2716 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2717 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2718 } else {
2719 rm = (modrm & 7);
2720 gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2721 offsetof(CPUX86State,fpregs[rm].mmx));
2722 }
2723 break;
2724 case 0x010: /* movups */
2725 case 0x110: /* movupd */
2726 case 0x028: /* movaps */
2727 case 0x128: /* movapd */
2728 case 0x16f: /* movdqa xmm, ea */
2729 case 0x26f: /* movdqu xmm, ea */
2730 if (mod != 3) {
2731 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2732 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2733 } else {
2734 rm = (modrm & 7) | REX_B(s);
2735 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2736 offsetof(CPUX86State,xmm_regs[rm]));
2737 }
2738 break;
2739 case 0x210: /* movss xmm, ea */
2740 if (mod != 3) {
2741 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2742 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2743 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2744 gen_op_movl_T0_0();
2745 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2746 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2747 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2748 } else {
2749 rm = (modrm & 7) | REX_B(s);
2750 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2751 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2752 }
2753 break;
2754 case 0x310: /* movsd xmm, ea */
2755 if (mod != 3) {
2756 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2757 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2758 gen_op_movl_T0_0();
2759 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2760 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2761 } else {
2762 rm = (modrm & 7) | REX_B(s);
2763 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2764 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2765 }
2766 break;
2767 case 0x012: /* movlps */
2768 case 0x112: /* movlpd */
2769 if (mod != 3) {
2770 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2771 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2772 } else {
2773 /* movhlps */
2774 rm = (modrm & 7) | REX_B(s);
2775 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2776 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2777 }
2778 break;
2779 case 0x212: /* movsldup */
2780 if (mod != 3) {
2781 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2782 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2783 } else {
2784 rm = (modrm & 7) | REX_B(s);
2785 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2786 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2787 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2788 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2789 }
2790 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2791 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2792 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2793 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2794 break;
2795 case 0x312: /* movddup */
2796 if (mod != 3) {
2797 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2798 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2799 } else {
2800 rm = (modrm & 7) | REX_B(s);
2801 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2802 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2803 }
2804 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2805 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2806 break;
2807 case 0x016: /* movhps */
2808 case 0x116: /* movhpd */
2809 if (mod != 3) {
2810 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2811 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2812 } else {
2813 /* movlhps */
2814 rm = (modrm & 7) | REX_B(s);
2815 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2816 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2817 }
2818 break;
2819 case 0x216: /* movshdup */
2820 if (mod != 3) {
2821 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2822 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2823 } else {
2824 rm = (modrm & 7) | REX_B(s);
2825 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2826 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2827 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2828 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2829 }
2830 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2831 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2832 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2833 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2834 break;
2835 case 0x7e: /* movd ea, mm */
2836#ifdef TARGET_X86_64
2837 if (s->dflag == 2) {
2838 gen_op_movq_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2839 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2840 } else
2841#endif
2842 {
2843 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2844 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2845 }
2846 break;
2847 case 0x17e: /* movd ea, xmm */
2848#ifdef TARGET_X86_64
2849 if (s->dflag == 2) {
2850 gen_op_movq_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2851 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2852 } else
2853#endif
2854 {
2855 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2856 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2857 }
2858 break;
2859 case 0x27e: /* movq xmm, ea */
2860 if (mod != 3) {
2861 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2862 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2863 } else {
2864 rm = (modrm & 7) | REX_B(s);
2865 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2866 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2867 }
2868 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2869 break;
2870 case 0x7f: /* movq ea, mm */
2871 if (mod != 3) {
2872 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2873 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2874 } else {
2875 rm = (modrm & 7);
2876 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2877 offsetof(CPUX86State,fpregs[reg].mmx));
2878 }
2879 break;
2880 case 0x011: /* movups */
2881 case 0x111: /* movupd */
2882 case 0x029: /* movaps */
2883 case 0x129: /* movapd */
2884 case 0x17f: /* movdqa ea, xmm */
2885 case 0x27f: /* movdqu ea, xmm */
2886 if (mod != 3) {
2887 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2888 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2889 } else {
2890 rm = (modrm & 7) | REX_B(s);
2891 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2892 offsetof(CPUX86State,xmm_regs[reg]));
2893 }
2894 break;
2895 case 0x211: /* movss ea, xmm */
2896 if (mod != 3) {
2897 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2898 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2899 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2900 } else {
2901 rm = (modrm & 7) | REX_B(s);
2902 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2903 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2904 }
2905 break;
2906 case 0x311: /* movsd ea, xmm */
2907 if (mod != 3) {
2908 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2909 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2910 } else {
2911 rm = (modrm & 7) | REX_B(s);
2912 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2913 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2914 }
2915 break;
2916 case 0x013: /* movlps */
2917 case 0x113: /* movlpd */
2918 if (mod != 3) {
2919 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2920 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2921 } else {
2922 goto illegal_op;
2923 }
2924 break;
2925 case 0x017: /* movhps */
2926 case 0x117: /* movhpd */
2927 if (mod != 3) {
2928 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2929 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2930 } else {
2931 goto illegal_op;
2932 }
2933 break;
2934 case 0x71: /* shift mm, im */
2935 case 0x72:
2936 case 0x73:
2937 case 0x171: /* shift xmm, im */
2938 case 0x172:
2939 case 0x173:
2940 val = ldub_code(s->pc++);
2941 if (is_xmm) {
2942 gen_op_movl_T0_im(val);
2943 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2944 gen_op_movl_T0_0();
2945 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2946 op1_offset = offsetof(CPUX86State,xmm_t0);
2947 } else {
2948 gen_op_movl_T0_im(val);
2949 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2950 gen_op_movl_T0_0();
2951 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2952 op1_offset = offsetof(CPUX86State,mmx_t0);
2953 }
2954 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2955 if (!sse_op2)
2956 goto illegal_op;
2957 if (is_xmm) {
2958 rm = (modrm & 7) | REX_B(s);
2959 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2960 } else {
2961 rm = (modrm & 7);
2962 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2963 }
2964 sse_op2(op2_offset, op1_offset);
2965 break;
2966 case 0x050: /* movmskps */
2967 rm = (modrm & 7) | REX_B(s);
2968 gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2969 gen_op_mov_reg_T0[OT_LONG][reg]();
2970 break;
2971 case 0x150: /* movmskpd */
2972 rm = (modrm & 7) | REX_B(s);
2973 gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2974 gen_op_mov_reg_T0[OT_LONG][reg]();
2975 break;
2976 case 0x02a: /* cvtpi2ps */
2977 case 0x12a: /* cvtpi2pd */
2978 gen_op_enter_mmx();
2979 if (mod != 3) {
2980 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2981 op2_offset = offsetof(CPUX86State,mmx_t0);
2982 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2983 } else {
2984 rm = (modrm & 7);
2985 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2986 }
2987 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2988 switch(b >> 8) {
2989 case 0x0:
2990 gen_op_cvtpi2ps(op1_offset, op2_offset);
2991 break;
2992 default:
2993 case 0x1:
2994 gen_op_cvtpi2pd(op1_offset, op2_offset);
2995 break;
2996 }
2997 break;
2998 case 0x22a: /* cvtsi2ss */
2999 case 0x32a: /* cvtsi2sd */
3000 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3001 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3002 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3003 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
3004 break;
3005 case 0x02c: /* cvttps2pi */
3006 case 0x12c: /* cvttpd2pi */
3007 case 0x02d: /* cvtps2pi */
3008 case 0x12d: /* cvtpd2pi */
3009 gen_op_enter_mmx();
3010 if (mod != 3) {
3011 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3012 op2_offset = offsetof(CPUX86State,xmm_t0);
3013 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3014 } else {
3015 rm = (modrm & 7) | REX_B(s);
3016 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3017 }
3018 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3019 switch(b) {
3020 case 0x02c:
3021 gen_op_cvttps2pi(op1_offset, op2_offset);
3022 break;
3023 case 0x12c:
3024 gen_op_cvttpd2pi(op1_offset, op2_offset);
3025 break;
3026 case 0x02d:
3027 gen_op_cvtps2pi(op1_offset, op2_offset);
3028 break;
3029 case 0x12d:
3030 gen_op_cvtpd2pi(op1_offset, op2_offset);
3031 break;
3032 }
3033 break;
3034 case 0x22c: /* cvttss2si */
3035 case 0x32c: /* cvttsd2si */
3036 case 0x22d: /* cvtss2si */
3037 case 0x32d: /* cvtsd2si */
3038 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3039 if (mod != 3) {
3040 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3041 if ((b >> 8) & 1) {
3042 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3043 } else {
3044 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3045 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3046 }
3047 op2_offset = offsetof(CPUX86State,xmm_t0);
3048 } else {
3049 rm = (modrm & 7) | REX_B(s);
3050 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3051 }
3052 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3053 (b & 1) * 4](op2_offset);
3054 gen_op_mov_reg_T0[ot][reg]();
3055 break;
3056 case 0xc4: /* pinsrw */
3057 case 0x1c4:
3058 s->rip_offset = 1;
3059 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3060 val = ldub_code(s->pc++);
3061 if (b1) {
3062 val &= 7;
3063 gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
3064 } else {
3065 val &= 3;
3066 gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
3067 }
3068 break;
3069 case 0xc5: /* pextrw */
3070 case 0x1c5:
3071 if (mod != 3)
3072 goto illegal_op;
3073 val = ldub_code(s->pc++);
3074 if (b1) {
3075 val &= 7;
3076 rm = (modrm & 7) | REX_B(s);
3077 gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
3078 } else {
3079 val &= 3;
3080 rm = (modrm & 7);
3081 gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
3082 }
3083 reg = ((modrm >> 3) & 7) | rex_r;
3084 gen_op_mov_reg_T0[OT_LONG][reg]();
3085 break;
3086 case 0x1d6: /* movq ea, xmm */
3087 if (mod != 3) {
3088 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3089 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3090 } else {
3091 rm = (modrm & 7) | REX_B(s);
3092 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3093 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3094 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3095 }
3096 break;
3097 case 0x2d6: /* movq2dq */
3098 gen_op_enter_mmx();
3099 rm = (modrm & 7);
3100 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3101 offsetof(CPUX86State,fpregs[rm].mmx));
3102 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3103 break;
3104 case 0x3d6: /* movdq2q */
3105 gen_op_enter_mmx();
3106 rm = (modrm & 7) | REX_B(s);
3107 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3108 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3109 break;
3110 case 0xd7: /* pmovmskb */
3111 case 0x1d7:
3112 if (mod != 3)
3113 goto illegal_op;
3114 if (b1) {
3115 rm = (modrm & 7) | REX_B(s);
3116 gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3117 } else {
3118 rm = (modrm & 7);
3119 gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3120 }
3121 reg = ((modrm >> 3) & 7) | rex_r;
3122 gen_op_mov_reg_T0[OT_LONG][reg]();
3123 break;
3124 default:
3125 goto illegal_op;
3126 }
3127 } else {
3128 /* generic MMX or SSE operation */
3129 switch(b) {
3130 case 0xf7:
3131 /* maskmov : we must prepare A0 */
3132 if (mod != 3)
3133 goto illegal_op;
3134#ifdef TARGET_X86_64
3135 if (s->aflag == 2) {
3136 gen_op_movq_A0_reg[R_EDI]();
3137 } else
3138#endif
3139 {
3140 gen_op_movl_A0_reg[R_EDI]();
3141 if (s->aflag == 0)
3142 gen_op_andl_A0_ffff();
3143 }
3144 gen_add_A0_ds_seg(s);
3145 break;
3146 case 0x70: /* pshufx insn */
3147 case 0xc6: /* pshufx insn */
3148 case 0xc2: /* compare insns */
3149 s->rip_offset = 1;
3150 break;
3151 default:
3152 break;
3153 }
3154 if (is_xmm) {
3155 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3156 if (mod != 3) {
3157 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3158 op2_offset = offsetof(CPUX86State,xmm_t0);
3159 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3160 b == 0xc2)) {
3161 /* specific case for SSE single instructions */
3162 if (b1 == 2) {
3163 /* 32 bit access */
3164 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3165 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3166 } else {
3167 /* 64 bit access */
3168 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3169 }
3170 } else {
3171 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3172 }
3173 } else {
3174 rm = (modrm & 7) | REX_B(s);
3175 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3176 }
3177 } else {
3178 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3179 if (mod != 3) {
3180 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3181 op2_offset = offsetof(CPUX86State,mmx_t0);
3182 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3183 } else {
3184 rm = (modrm & 7);
3185 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3186 }
3187 }
3188 switch(b) {
3189 case 0x70: /* pshufx insn */
3190 case 0xc6: /* pshufx insn */
3191 val = ldub_code(s->pc++);
3192 sse_op3 = (GenOpFunc3 *)sse_op2;
3193 sse_op3(op1_offset, op2_offset, val);
3194 break;
3195 case 0xc2:
3196 /* compare insns */
3197 val = ldub_code(s->pc++);
3198 if (val >= 8)
3199 goto illegal_op;
3200 sse_op2 = sse_op_table4[val][b1];
3201 sse_op2(op1_offset, op2_offset);
3202 break;
3203 default:
3204 sse_op2(op1_offset, op2_offset);
3205 break;
3206 }
3207 if (b == 0x2e || b == 0x2f) {
3208 s->cc_op = CC_OP_EFLAGS;
3209 }
3210 }
3211}
3212
3213
3214/* convert one instruction. s->is_jmp is set if the translation must
3215 be stopped. Return the next pc value */
3216static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3217{
3218 int b, prefixes, aflag, dflag;
3219 int shift, ot;
3220 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3221 target_ulong next_eip, tval;
3222 int rex_w, rex_r;
3223
3224 s->pc = pc_start;
3225 prefixes = 0;
3226 aflag = s->code32;
3227 dflag = s->code32;
3228 s->override = -1;
3229 rex_w = -1;
3230 rex_r = 0;
3231#ifdef TARGET_X86_64
3232 s->rex_x = 0;
3233 s->rex_b = 0;
3234 x86_64_hregs = 0;
3235#endif
3236 s->rip_offset = 0; /* for relative ip address */
3237
3238#ifdef VBOX
3239 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
3240 gen_update_eip(pc_start - s->cs_base);
3241#endif
3242
3243 next_byte:
3244 b = ldub_code(s->pc);
3245 s->pc++;
3246 /* check prefixes */
3247#ifdef TARGET_X86_64
3248 if (CODE64(s)) {
3249 switch (b) {
3250 case 0xf3:
3251 prefixes |= PREFIX_REPZ;
3252 goto next_byte;
3253 case 0xf2:
3254 prefixes |= PREFIX_REPNZ;
3255 goto next_byte;
3256 case 0xf0:
3257 prefixes |= PREFIX_LOCK;
3258 goto next_byte;
3259 case 0x2e:
3260 s->override = R_CS;
3261 goto next_byte;
3262 case 0x36:
3263 s->override = R_SS;
3264 goto next_byte;
3265 case 0x3e:
3266 s->override = R_DS;
3267 goto next_byte;
3268 case 0x26:
3269 s->override = R_ES;
3270 goto next_byte;
3271 case 0x64:
3272 s->override = R_FS;
3273 goto next_byte;
3274 case 0x65:
3275 s->override = R_GS;
3276 goto next_byte;
3277 case 0x66:
3278 prefixes |= PREFIX_DATA;
3279 goto next_byte;
3280 case 0x67:
3281 prefixes |= PREFIX_ADR;
3282 goto next_byte;
3283 case 0x40 ... 0x4f:
3284 /* REX prefix */
3285 rex_w = (b >> 3) & 1;
3286 rex_r = (b & 0x4) << 1;
3287 s->rex_x = (b & 0x2) << 2;
3288 REX_B(s) = (b & 0x1) << 3;
3289 x86_64_hregs = 1; /* select uniform byte register addressing */
3290 goto next_byte;
3291 }
3292 if (rex_w == 1) {
3293 /* 0x66 is ignored if rex.w is set */
3294 dflag = 2;
3295 } else {
3296 if (prefixes & PREFIX_DATA)
3297 dflag ^= 1;
3298 }
3299 if (!(prefixes & PREFIX_ADR))
3300 aflag = 2;
3301 } else
3302#endif
3303 {
3304 switch (b) {
3305 case 0xf3:
3306 prefixes |= PREFIX_REPZ;
3307 goto next_byte;
3308 case 0xf2:
3309 prefixes |= PREFIX_REPNZ;
3310 goto next_byte;
3311 case 0xf0:
3312 prefixes |= PREFIX_LOCK;
3313 goto next_byte;
3314 case 0x2e:
3315 s->override = R_CS;
3316 goto next_byte;
3317 case 0x36:
3318 s->override = R_SS;
3319 goto next_byte;
3320 case 0x3e:
3321 s->override = R_DS;
3322 goto next_byte;
3323 case 0x26:
3324 s->override = R_ES;
3325 goto next_byte;
3326 case 0x64:
3327 s->override = R_FS;
3328 goto next_byte;
3329 case 0x65:
3330 s->override = R_GS;
3331 goto next_byte;
3332 case 0x66:
3333 prefixes |= PREFIX_DATA;
3334 goto next_byte;
3335 case 0x67:
3336 prefixes |= PREFIX_ADR;
3337 goto next_byte;
3338 }
3339 if (prefixes & PREFIX_DATA)
3340 dflag ^= 1;
3341 if (prefixes & PREFIX_ADR)
3342 aflag ^= 1;
3343 }
3344
3345 s->prefix = prefixes;
3346 s->aflag = aflag;
3347 s->dflag = dflag;
3348
3349 /* lock generation */
3350 if (prefixes & PREFIX_LOCK)
3351 gen_op_lock();
3352
3353 /* now check op code */
3354 reswitch:
3355 switch(b) {
3356 case 0x0f:
3357 /**************************/
3358 /* extended op code */
3359 b = ldub_code(s->pc++) | 0x100;
3360 goto reswitch;
3361
3362 /**************************/
3363 /* arith & logic */
3364 case 0x00 ... 0x05:
3365 case 0x08 ... 0x0d:
3366 case 0x10 ... 0x15:
3367 case 0x18 ... 0x1d:
3368 case 0x20 ... 0x25:
3369 case 0x28 ... 0x2d:
3370 case 0x30 ... 0x35:
3371 case 0x38 ... 0x3d:
3372 {
3373 int op, f, val;
3374 op = (b >> 3) & 7;
3375 f = (b >> 1) & 3;
3376
3377 if ((b & 1) == 0)
3378 ot = OT_BYTE;
3379 else
3380 ot = dflag + OT_WORD;
3381
3382 switch(f) {
3383 case 0: /* OP Ev, Gv */
3384 modrm = ldub_code(s->pc++);
3385 reg = ((modrm >> 3) & 7) | rex_r;
3386 mod = (modrm >> 6) & 3;
3387 rm = (modrm & 7) | REX_B(s);
3388 if (mod != 3) {
3389 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3390 opreg = OR_TMP0;
3391 } else if (op == OP_XORL && rm == reg) {
3392 xor_zero:
3393 /* xor reg, reg optimisation */
3394 gen_op_movl_T0_0();
3395 s->cc_op = CC_OP_LOGICB + ot;
3396 gen_op_mov_reg_T0[ot][reg]();
3397 gen_op_update1_cc();
3398 break;
3399 } else {
3400 opreg = rm;
3401 }
3402 gen_op_mov_TN_reg[ot][1][reg]();
3403 gen_op(s, op, ot, opreg);
3404 break;
3405 case 1: /* OP Gv, Ev */
3406 modrm = ldub_code(s->pc++);
3407 mod = (modrm >> 6) & 3;
3408 reg = ((modrm >> 3) & 7) | rex_r;
3409 rm = (modrm & 7) | REX_B(s);
3410 if (mod != 3) {
3411 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3412 gen_op_ld_T1_A0[ot + s->mem_index]();
3413 } else if (op == OP_XORL && rm == reg) {
3414 goto xor_zero;
3415 } else {
3416 gen_op_mov_TN_reg[ot][1][rm]();
3417 }
3418 gen_op(s, op, ot, reg);
3419 break;
3420 case 2: /* OP A, Iv */
3421 val = insn_get(s, ot);
3422 gen_op_movl_T1_im(val);
3423 gen_op(s, op, ot, OR_EAX);
3424 break;
3425 }
3426 }
3427 break;
3428
3429 case 0x80: /* GRP1 */
3430 case 0x81:
3431 case 0x82:
3432 case 0x83:
3433 {
3434 int val;
3435
3436 if ((b & 1) == 0)
3437 ot = OT_BYTE;
3438 else
3439 ot = dflag + OT_WORD;
3440
3441 modrm = ldub_code(s->pc++);
3442 mod = (modrm >> 6) & 3;
3443 rm = (modrm & 7) | REX_B(s);
3444 op = (modrm >> 3) & 7;
3445
3446 if (mod != 3) {
3447 if (b == 0x83)
3448 s->rip_offset = 1;
3449 else
3450 s->rip_offset = insn_const_size(ot);
3451 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3452 opreg = OR_TMP0;
3453 } else {
3454 opreg = rm;
3455 }
3456
3457 switch(b) {
3458 default:
3459 case 0x80:
3460 case 0x81:
3461 case 0x82:
3462 val = insn_get(s, ot);
3463 break;
3464 case 0x83:
3465 val = (int8_t)insn_get(s, OT_BYTE);
3466 break;
3467 }
3468 gen_op_movl_T1_im(val);
3469 gen_op(s, op, ot, opreg);
3470 }
3471 break;
3472
3473 /**************************/
3474 /* inc, dec, and other misc arith */
3475 case 0x40 ... 0x47: /* inc Gv */
3476 ot = dflag ? OT_LONG : OT_WORD;
3477 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3478 break;
3479 case 0x48 ... 0x4f: /* dec Gv */
3480 ot = dflag ? OT_LONG : OT_WORD;
3481 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3482 break;
3483 case 0xf6: /* GRP3 */
3484 case 0xf7:
3485 if ((b & 1) == 0)
3486 ot = OT_BYTE;
3487 else
3488 ot = dflag + OT_WORD;
3489
3490 modrm = ldub_code(s->pc++);
3491 mod = (modrm >> 6) & 3;
3492 rm = (modrm & 7) | REX_B(s);
3493 op = (modrm >> 3) & 7;
3494 if (mod != 3) {
3495 if (op == 0)
3496 s->rip_offset = insn_const_size(ot);
3497 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3498 gen_op_ld_T0_A0[ot + s->mem_index]();
3499 } else {
3500 gen_op_mov_TN_reg[ot][0][rm]();
3501 }
3502
3503 switch(op) {
3504 case 0: /* test */
3505 val = insn_get(s, ot);
3506 gen_op_movl_T1_im(val);
3507 gen_op_testl_T0_T1_cc();
3508 s->cc_op = CC_OP_LOGICB + ot;
3509 break;
3510 case 2: /* not */
3511 gen_op_notl_T0();
3512 if (mod != 3) {
3513 gen_op_st_T0_A0[ot + s->mem_index]();
3514 } else {
3515 gen_op_mov_reg_T0[ot][rm]();
3516 }
3517 break;
3518 case 3: /* neg */
3519 gen_op_negl_T0();
3520 if (mod != 3) {
3521 gen_op_st_T0_A0[ot + s->mem_index]();
3522 } else {
3523 gen_op_mov_reg_T0[ot][rm]();
3524 }
3525 gen_op_update_neg_cc();
3526 s->cc_op = CC_OP_SUBB + ot;
3527 break;
3528 case 4: /* mul */
3529 switch(ot) {
3530 case OT_BYTE:
3531 gen_op_mulb_AL_T0();
3532 s->cc_op = CC_OP_MULB;
3533 break;
3534 case OT_WORD:
3535 gen_op_mulw_AX_T0();
3536 s->cc_op = CC_OP_MULW;
3537 break;
3538 default:
3539 case OT_LONG:
3540 gen_op_mull_EAX_T0();
3541 s->cc_op = CC_OP_MULL;
3542 break;
3543#ifdef TARGET_X86_64
3544 case OT_QUAD:
3545 gen_op_mulq_EAX_T0();
3546 s->cc_op = CC_OP_MULQ;
3547 break;
3548#endif
3549 }
3550 break;
3551 case 5: /* imul */
3552 switch(ot) {
3553 case OT_BYTE:
3554 gen_op_imulb_AL_T0();
3555 s->cc_op = CC_OP_MULB;
3556 break;
3557 case OT_WORD:
3558 gen_op_imulw_AX_T0();
3559 s->cc_op = CC_OP_MULW;
3560 break;
3561 default:
3562 case OT_LONG:
3563 gen_op_imull_EAX_T0();
3564 s->cc_op = CC_OP_MULL;
3565 break;
3566#ifdef TARGET_X86_64
3567 case OT_QUAD:
3568 gen_op_imulq_EAX_T0();
3569 s->cc_op = CC_OP_MULQ;
3570 break;
3571#endif
3572 }
3573 break;
3574 case 6: /* div */
3575 switch(ot) {
3576 case OT_BYTE:
3577 gen_jmp_im(pc_start - s->cs_base);
3578 gen_op_divb_AL_T0();
3579 break;
3580 case OT_WORD:
3581 gen_jmp_im(pc_start - s->cs_base);
3582 gen_op_divw_AX_T0();
3583 break;
3584 default:
3585 case OT_LONG:
3586 gen_jmp_im(pc_start - s->cs_base);
3587 gen_op_divl_EAX_T0();
3588 break;
3589#ifdef TARGET_X86_64
3590 case OT_QUAD:
3591 gen_jmp_im(pc_start - s->cs_base);
3592 gen_op_divq_EAX_T0();
3593 break;
3594#endif
3595 }
3596 break;
3597 case 7: /* idiv */
3598 switch(ot) {
3599 case OT_BYTE:
3600 gen_jmp_im(pc_start - s->cs_base);
3601 gen_op_idivb_AL_T0();
3602 break;
3603 case OT_WORD:
3604 gen_jmp_im(pc_start - s->cs_base);
3605 gen_op_idivw_AX_T0();
3606 break;
3607 default:
3608 case OT_LONG:
3609 gen_jmp_im(pc_start - s->cs_base);
3610 gen_op_idivl_EAX_T0();
3611 break;
3612#ifdef TARGET_X86_64
3613 case OT_QUAD:
3614 gen_jmp_im(pc_start - s->cs_base);
3615 gen_op_idivq_EAX_T0();
3616 break;
3617#endif
3618 }
3619 break;
3620 default:
3621 goto illegal_op;
3622 }
3623 break;
3624
3625 case 0xfe: /* GRP4 */
3626 case 0xff: /* GRP5 */
3627 if ((b & 1) == 0)
3628 ot = OT_BYTE;
3629 else
3630 ot = dflag + OT_WORD;
3631
3632 modrm = ldub_code(s->pc++);
3633 mod = (modrm >> 6) & 3;
3634 rm = (modrm & 7) | REX_B(s);
3635 op = (modrm >> 3) & 7;
3636 if (op >= 2 && b == 0xfe) {
3637 goto illegal_op;
3638 }
3639 if (CODE64(s)) {
3640 if (op == 2 || op == 4) {
3641 /* operand size for jumps is 64 bit */
3642 ot = OT_QUAD;
3643 } else if (op == 3 || op == 5) {
3644 /* for call calls, the operand is 16 or 32 bit, even
3645 in long mode */
3646 ot = dflag ? OT_LONG : OT_WORD;
3647 } else if (op == 6) {
3648 /* default push size is 64 bit */
3649 ot = dflag ? OT_QUAD : OT_WORD;
3650 }
3651 }
3652 if (mod != 3) {
3653 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3654 if (op >= 2 && op != 3 && op != 5)
3655 gen_op_ld_T0_A0[ot + s->mem_index]();
3656 } else {
3657 gen_op_mov_TN_reg[ot][0][rm]();
3658 }
3659
3660 switch(op) {
3661 case 0: /* inc Ev */
3662 if (mod != 3)
3663 opreg = OR_TMP0;
3664 else
3665 opreg = rm;
3666 gen_inc(s, ot, opreg, 1);
3667 break;
3668 case 1: /* dec Ev */
3669 if (mod != 3)
3670 opreg = OR_TMP0;
3671 else
3672 opreg = rm;
3673 gen_inc(s, ot, opreg, -1);
3674 break;
3675 case 2: /* call Ev */
3676 /* XXX: optimize if memory (no 'and' is necessary) */
3677#ifdef VBOX_WITH_CALL_RECORD
3678 if (s->record_call)
3679 gen_op_record_call();
3680#endif
3681 if (s->dflag == 0)
3682 gen_op_andl_T0_ffff();
3683 next_eip = s->pc - s->cs_base;
3684 gen_movtl_T1_im(next_eip);
3685 gen_push_T1(s);
3686 gen_op_jmp_T0();
3687 gen_eob(s);
3688 break;
3689 case 3: /* lcall Ev */
3690 gen_op_ld_T1_A0[ot + s->mem_index]();
3691 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3692 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3693 do_lcall:
3694 if (s->pe && !s->vm86) {
3695 if (s->cc_op != CC_OP_DYNAMIC)
3696 gen_op_set_cc_op(s->cc_op);
3697 gen_jmp_im(pc_start - s->cs_base);
3698 gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3699 } else {
3700 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3701 }
3702 gen_eob(s);
3703 break;
3704 case 4: /* jmp Ev */
3705 if (s->dflag == 0)
3706 gen_op_andl_T0_ffff();
3707 gen_op_jmp_T0();
3708 gen_eob(s);
3709 break;
3710 case 5: /* ljmp Ev */
3711 gen_op_ld_T1_A0[ot + s->mem_index]();
3712 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3713 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3714 do_ljmp:
3715 if (s->pe && !s->vm86) {
3716 if (s->cc_op != CC_OP_DYNAMIC)
3717 gen_op_set_cc_op(s->cc_op);
3718 gen_jmp_im(pc_start - s->cs_base);
3719 gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3720 } else {
3721 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3722 gen_op_movl_T0_T1();
3723 gen_op_jmp_T0();
3724 }
3725 gen_eob(s);
3726 break;
3727 case 6: /* push Ev */
3728 gen_push_T0(s);
3729 break;
3730 default:
3731 goto illegal_op;
3732 }
3733 break;
3734
3735 case 0x84: /* test Ev, Gv */
3736 case 0x85:
3737 if ((b & 1) == 0)
3738 ot = OT_BYTE;
3739 else
3740 ot = dflag + OT_WORD;
3741
3742 modrm = ldub_code(s->pc++);
3743 mod = (modrm >> 6) & 3;
3744 rm = (modrm & 7) | REX_B(s);
3745 reg = ((modrm >> 3) & 7) | rex_r;
3746
3747 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3748 gen_op_mov_TN_reg[ot][1][reg]();
3749 gen_op_testl_T0_T1_cc();
3750 s->cc_op = CC_OP_LOGICB + ot;
3751 break;
3752
3753 case 0xa8: /* test eAX, Iv */
3754 case 0xa9:
3755 if ((b & 1) == 0)
3756 ot = OT_BYTE;
3757 else
3758 ot = dflag + OT_WORD;
3759 val = insn_get(s, ot);
3760
3761 gen_op_mov_TN_reg[ot][0][OR_EAX]();
3762 gen_op_movl_T1_im(val);
3763 gen_op_testl_T0_T1_cc();
3764 s->cc_op = CC_OP_LOGICB + ot;
3765 break;
3766
3767 case 0x98: /* CWDE/CBW */
3768#ifdef TARGET_X86_64
3769 if (dflag == 2) {
3770 gen_op_movslq_RAX_EAX();
3771 } else
3772#endif
3773 if (dflag == 1)
3774 gen_op_movswl_EAX_AX();
3775 else
3776 gen_op_movsbw_AX_AL();
3777 break;
3778 case 0x99: /* CDQ/CWD */
3779#ifdef TARGET_X86_64
3780 if (dflag == 2) {
3781 gen_op_movsqo_RDX_RAX();
3782 } else
3783#endif
3784 if (dflag == 1)
3785 gen_op_movslq_EDX_EAX();
3786 else
3787 gen_op_movswl_DX_AX();
3788 break;
3789 case 0x1af: /* imul Gv, Ev */
3790 case 0x69: /* imul Gv, Ev, I */
3791 case 0x6b:
3792 ot = dflag + OT_WORD;
3793 modrm = ldub_code(s->pc++);
3794 reg = ((modrm >> 3) & 7) | rex_r;
3795 if (b == 0x69)
3796 s->rip_offset = insn_const_size(ot);
3797 else if (b == 0x6b)
3798 s->rip_offset = 1;
3799 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3800 if (b == 0x69) {
3801 val = insn_get(s, ot);
3802 gen_op_movl_T1_im(val);
3803 } else if (b == 0x6b) {
3804 val = (int8_t)insn_get(s, OT_BYTE);
3805 gen_op_movl_T1_im(val);
3806 } else {
3807 gen_op_mov_TN_reg[ot][1][reg]();
3808 }
3809
3810#ifdef TARGET_X86_64
3811 if (ot == OT_QUAD) {
3812 gen_op_imulq_T0_T1();
3813 } else
3814#endif
3815 if (ot == OT_LONG) {
3816 gen_op_imull_T0_T1();
3817 } else {
3818 gen_op_imulw_T0_T1();
3819 }
3820 gen_op_mov_reg_T0[ot][reg]();
3821 s->cc_op = CC_OP_MULB + ot;
3822 break;
3823 case 0x1c0:
3824 case 0x1c1: /* xadd Ev, Gv */
3825 if ((b & 1) == 0)
3826 ot = OT_BYTE;
3827 else
3828 ot = dflag + OT_WORD;
3829 modrm = ldub_code(s->pc++);
3830 reg = ((modrm >> 3) & 7) | rex_r;
3831 mod = (modrm >> 6) & 3;
3832 if (mod == 3) {
3833 rm = (modrm & 7) | REX_B(s);
3834 gen_op_mov_TN_reg[ot][0][reg]();
3835 gen_op_mov_TN_reg[ot][1][rm]();
3836 gen_op_addl_T0_T1();
3837 gen_op_mov_reg_T1[ot][reg]();
3838 gen_op_mov_reg_T0[ot][rm]();
3839 } else {
3840 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3841 gen_op_mov_TN_reg[ot][0][reg]();
3842 gen_op_ld_T1_A0[ot + s->mem_index]();
3843 gen_op_addl_T0_T1();
3844 gen_op_st_T0_A0[ot + s->mem_index]();
3845 gen_op_mov_reg_T1[ot][reg]();
3846 }
3847 gen_op_update2_cc();
3848 s->cc_op = CC_OP_ADDB + ot;
3849 break;
3850 case 0x1b0:
3851 case 0x1b1: /* cmpxchg Ev, Gv */
3852 if ((b & 1) == 0)
3853 ot = OT_BYTE;
3854 else
3855 ot = dflag + OT_WORD;
3856 modrm = ldub_code(s->pc++);
3857 reg = ((modrm >> 3) & 7) | rex_r;
3858 mod = (modrm >> 6) & 3;
3859 gen_op_mov_TN_reg[ot][1][reg]();
3860 if (mod == 3) {
3861 rm = (modrm & 7) | REX_B(s);
3862 gen_op_mov_TN_reg[ot][0][rm]();
3863 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3864 gen_op_mov_reg_T0[ot][rm]();
3865 } else {
3866 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3867 gen_op_ld_T0_A0[ot + s->mem_index]();
3868 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3869 }
3870 s->cc_op = CC_OP_SUBB + ot;
3871 break;
3872 case 0x1c7: /* cmpxchg8b */
3873 modrm = ldub_code(s->pc++);
3874 mod = (modrm >> 6) & 3;
3875 if ((mod == 3) || ((modrm & 0x38) != 0x8))
3876 goto illegal_op;
3877 if (s->cc_op != CC_OP_DYNAMIC)
3878 gen_op_set_cc_op(s->cc_op);
3879 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3880 gen_op_cmpxchg8b();
3881 s->cc_op = CC_OP_EFLAGS;
3882 break;
3883
3884 /**************************/
3885 /* push/pop */
3886 case 0x50 ... 0x57: /* push */
3887 gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
3888 gen_push_T0(s);
3889 break;
3890 case 0x58 ... 0x5f: /* pop */
3891 if (CODE64(s)) {
3892 ot = dflag ? OT_QUAD : OT_WORD;
3893 } else {
3894 ot = dflag + OT_WORD;
3895 }
3896 gen_pop_T0(s);
3897 /* NOTE: order is important for pop %sp */
3898 gen_pop_update(s);
3899 gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
3900 break;
3901 case 0x60: /* pusha */
3902 if (CODE64(s))
3903 goto illegal_op;
3904 gen_pusha(s);
3905 break;
3906 case 0x61: /* popa */
3907 if (CODE64(s))
3908 goto illegal_op;
3909 gen_popa(s);
3910 break;
3911 case 0x68: /* push Iv */
3912 case 0x6a:
3913 if (CODE64(s)) {
3914 ot = dflag ? OT_QUAD : OT_WORD;
3915 } else {
3916 ot = dflag + OT_WORD;
3917 }
3918 if (b == 0x68)
3919 val = insn_get(s, ot);
3920 else
3921 val = (int8_t)insn_get(s, OT_BYTE);
3922 gen_op_movl_T0_im(val);
3923 gen_push_T0(s);
3924 break;
3925 case 0x8f: /* pop Ev */
3926 if (CODE64(s)) {
3927 ot = dflag ? OT_QUAD : OT_WORD;
3928 } else {
3929 ot = dflag + OT_WORD;
3930 }
3931 modrm = ldub_code(s->pc++);
3932 mod = (modrm >> 6) & 3;
3933 gen_pop_T0(s);
3934 if (mod == 3) {
3935 /* NOTE: order is important for pop %sp */
3936 gen_pop_update(s);
3937 rm = (modrm & 7) | REX_B(s);
3938 gen_op_mov_reg_T0[ot][rm]();
3939 } else {
3940 /* NOTE: order is important too for MMU exceptions */
3941 s->popl_esp_hack = 1 << ot;
3942 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3943 s->popl_esp_hack = 0;
3944 gen_pop_update(s);
3945 }
3946 break;
3947 case 0xc8: /* enter */
3948 {
3949 int level;
3950 val = lduw_code(s->pc);
3951 s->pc += 2;
3952 level = ldub_code(s->pc++);
3953 gen_enter(s, val, level);
3954 }
3955 break;
3956 case 0xc9: /* leave */
3957 /* XXX: exception not precise (ESP is updated before potential exception) */
3958 if (CODE64(s)) {
3959 gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
3960 gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
3961 } else if (s->ss32) {
3962 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
3963 gen_op_mov_reg_T0[OT_LONG][R_ESP]();
3964 } else {
3965 gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
3966 gen_op_mov_reg_T0[OT_WORD][R_ESP]();
3967 }
3968 gen_pop_T0(s);
3969 if (CODE64(s)) {
3970 ot = dflag ? OT_QUAD : OT_WORD;
3971 } else {
3972 ot = dflag + OT_WORD;
3973 }
3974 gen_op_mov_reg_T0[ot][R_EBP]();
3975 gen_pop_update(s);
3976 break;
3977 case 0x06: /* push es */
3978 case 0x0e: /* push cs */
3979 case 0x16: /* push ss */
3980 case 0x1e: /* push ds */
3981 if (CODE64(s))
3982 goto illegal_op;
3983 gen_op_movl_T0_seg(b >> 3);
3984 gen_push_T0(s);
3985 break;
3986 case 0x1a0: /* push fs */
3987 case 0x1a8: /* push gs */
3988 gen_op_movl_T0_seg((b >> 3) & 7);
3989 gen_push_T0(s);
3990 break;
3991 case 0x07: /* pop es */
3992 case 0x17: /* pop ss */
3993 case 0x1f: /* pop ds */
3994 if (CODE64(s))
3995 goto illegal_op;
3996 reg = b >> 3;
3997 gen_pop_T0(s);
3998 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3999 gen_pop_update(s);
4000 if (reg == R_SS) {
4001 /* if reg == SS, inhibit interrupts/trace. */
4002 /* If several instructions disable interrupts, only the
4003 _first_ does it */
4004 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4005 gen_op_set_inhibit_irq();
4006 s->tf = 0;
4007 }
4008 if (s->is_jmp) {
4009 gen_jmp_im(s->pc - s->cs_base);
4010 gen_eob(s);
4011 }
4012 break;
4013 case 0x1a1: /* pop fs */
4014 case 0x1a9: /* pop gs */
4015 gen_pop_T0(s);
4016 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4017 gen_pop_update(s);
4018 if (s->is_jmp) {
4019 gen_jmp_im(s->pc - s->cs_base);
4020 gen_eob(s);
4021 }
4022 break;
4023
4024 /**************************/
4025 /* mov */
4026 case 0x88:
4027 case 0x89: /* mov Gv, Ev */
4028 if ((b & 1) == 0)
4029 ot = OT_BYTE;
4030 else
4031 ot = dflag + OT_WORD;
4032 modrm = ldub_code(s->pc++);
4033 reg = ((modrm >> 3) & 7) | rex_r;
4034
4035 /* generate a generic store */
4036 gen_ldst_modrm(s, modrm, ot, reg, 1);
4037 break;
4038 case 0xc6:
4039 case 0xc7: /* mov Ev, Iv */
4040 if ((b & 1) == 0)
4041 ot = OT_BYTE;
4042 else
4043 ot = dflag + OT_WORD;
4044 modrm = ldub_code(s->pc++);
4045 mod = (modrm >> 6) & 3;
4046 if (mod != 3) {
4047 s->rip_offset = insn_const_size(ot);
4048 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4049 }
4050 val = insn_get(s, ot);
4051 gen_op_movl_T0_im(val);
4052 if (mod != 3)
4053 gen_op_st_T0_A0[ot + s->mem_index]();
4054 else
4055 gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
4056 break;
4057 case 0x8a:
4058 case 0x8b: /* mov Ev, Gv */
4059 if ((b & 1) == 0)
4060 ot = OT_BYTE;
4061 else
4062 ot = OT_WORD + dflag;
4063 modrm = ldub_code(s->pc++);
4064 reg = ((modrm >> 3) & 7) | rex_r;
4065
4066 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4067 gen_op_mov_reg_T0[ot][reg]();
4068 break;
4069 case 0x8e: /* mov seg, Gv */
4070 modrm = ldub_code(s->pc++);
4071 reg = (modrm >> 3) & 7;
4072 if (reg >= 6 || reg == R_CS)
4073 goto illegal_op;
4074 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4075 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4076 if (reg == R_SS) {
4077 /* if reg == SS, inhibit interrupts/trace */
4078 /* If several instructions disable interrupts, only the
4079 _first_ does it */
4080 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4081 gen_op_set_inhibit_irq();
4082 s->tf = 0;
4083 }
4084 if (s->is_jmp) {
4085 gen_jmp_im(s->pc - s->cs_base);
4086 gen_eob(s);
4087 }
4088 break;
4089 case 0x8c: /* mov Gv, seg */
4090 modrm = ldub_code(s->pc++);
4091 reg = (modrm >> 3) & 7;
4092 mod = (modrm >> 6) & 3;
4093 if (reg >= 6)
4094 goto illegal_op;
4095 gen_op_movl_T0_seg(reg);
4096 if (mod == 3)
4097 ot = OT_WORD + dflag;
4098 else
4099 ot = OT_WORD;
4100 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4101 break;
4102
4103 case 0x1b6: /* movzbS Gv, Eb */
4104 case 0x1b7: /* movzwS Gv, Eb */
4105 case 0x1be: /* movsbS Gv, Eb */
4106 case 0x1bf: /* movswS Gv, Eb */
4107 {
4108 int d_ot;
4109 /* d_ot is the size of destination */
4110 d_ot = dflag + OT_WORD;
4111 /* ot is the size of source */
4112 ot = (b & 1) + OT_BYTE;
4113 modrm = ldub_code(s->pc++);
4114 reg = ((modrm >> 3) & 7) | rex_r;
4115 mod = (modrm >> 6) & 3;
4116 rm = (modrm & 7) | REX_B(s);
4117
4118 if (mod == 3) {
4119 gen_op_mov_TN_reg[ot][0][rm]();
4120 switch(ot | (b & 8)) {
4121 case OT_BYTE:
4122 gen_op_movzbl_T0_T0();
4123 break;
4124 case OT_BYTE | 8:
4125 gen_op_movsbl_T0_T0();
4126 break;
4127 case OT_WORD:
4128 gen_op_movzwl_T0_T0();
4129 break;
4130 default:
4131 case OT_WORD | 8:
4132 gen_op_movswl_T0_T0();
4133 break;
4134 }
4135 gen_op_mov_reg_T0[d_ot][reg]();
4136 } else {
4137 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4138 if (b & 8) {
4139 gen_op_lds_T0_A0[ot + s->mem_index]();
4140 } else {
4141 gen_op_ldu_T0_A0[ot + s->mem_index]();
4142 }
4143 gen_op_mov_reg_T0[d_ot][reg]();
4144 }
4145 }
4146 break;
4147
4148 case 0x8d: /* lea */
4149 ot = dflag + OT_WORD;
4150 modrm = ldub_code(s->pc++);
4151 mod = (modrm >> 6) & 3;
4152 if (mod == 3)
4153 goto illegal_op;
4154 reg = ((modrm >> 3) & 7) | rex_r;
4155 /* we must ensure that no segment is added */
4156 s->override = -1;
4157 val = s->addseg;
4158 s->addseg = 0;
4159 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4160 s->addseg = val;
4161 gen_op_mov_reg_A0[ot - OT_WORD][reg]();
4162 break;
4163
4164 case 0xa0: /* mov EAX, Ov */
4165 case 0xa1:
4166 case 0xa2: /* mov Ov, EAX */
4167 case 0xa3:
4168 {
4169 target_ulong offset_addr;
4170
4171 if ((b & 1) == 0)
4172 ot = OT_BYTE;
4173 else
4174 ot = dflag + OT_WORD;
4175#ifdef TARGET_X86_64
4176 if (s->aflag == 2) {
4177 offset_addr = ldq_code(s->pc);
4178 s->pc += 8;
4179 if (offset_addr == (int32_t)offset_addr)
4180 gen_op_movq_A0_im(offset_addr);
4181 else
4182 gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
4183 } else
4184#endif
4185 {
4186 if (s->aflag) {
4187 offset_addr = insn_get(s, OT_LONG);
4188 } else {
4189 offset_addr = insn_get(s, OT_WORD);
4190 }
4191 gen_op_movl_A0_im(offset_addr);
4192 }
4193 gen_add_A0_ds_seg(s);
4194 if ((b & 2) == 0) {
4195 gen_op_ld_T0_A0[ot + s->mem_index]();
4196 gen_op_mov_reg_T0[ot][R_EAX]();
4197 } else {
4198 gen_op_mov_TN_reg[ot][0][R_EAX]();
4199 gen_op_st_T0_A0[ot + s->mem_index]();
4200 }
4201 }
4202 break;
4203 case 0xd7: /* xlat */
4204#ifdef TARGET_X86_64
4205 if (s->aflag == 2) {
4206 gen_op_movq_A0_reg[R_EBX]();
4207 gen_op_addq_A0_AL();
4208 } else
4209#endif
4210 {
4211 gen_op_movl_A0_reg[R_EBX]();
4212 gen_op_addl_A0_AL();
4213 if (s->aflag == 0)
4214 gen_op_andl_A0_ffff();
4215 }
4216 gen_add_A0_ds_seg(s);
4217 gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
4218 gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
4219 break;
4220 case 0xb0 ... 0xb7: /* mov R, Ib */
4221 val = insn_get(s, OT_BYTE);
4222 gen_op_movl_T0_im(val);
4223 gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
4224 break;
4225 case 0xb8 ... 0xbf: /* mov R, Iv */
4226#ifdef TARGET_X86_64
4227 if (dflag == 2) {
4228 uint64_t tmp;
4229 /* 64 bit case */
4230 tmp = ldq_code(s->pc);
4231 s->pc += 8;
4232 reg = (b & 7) | REX_B(s);
4233 gen_movtl_T0_im(tmp);
4234 gen_op_mov_reg_T0[OT_QUAD][reg]();
4235 } else
4236#endif
4237 {
4238 ot = dflag ? OT_LONG : OT_WORD;
4239 val = insn_get(s, ot);
4240 reg = (b & 7) | REX_B(s);
4241 gen_op_movl_T0_im(val);
4242 gen_op_mov_reg_T0[ot][reg]();
4243 }
4244 break;
4245
4246 case 0x91 ... 0x97: /* xchg R, EAX */
4247 ot = dflag + OT_WORD;
4248 reg = (b & 7) | REX_B(s);
4249 rm = R_EAX;
4250 goto do_xchg_reg;
4251 case 0x86:
4252 case 0x87: /* xchg Ev, Gv */
4253 if ((b & 1) == 0)
4254 ot = OT_BYTE;
4255 else
4256 ot = dflag + OT_WORD;
4257 modrm = ldub_code(s->pc++);
4258 reg = ((modrm >> 3) & 7) | rex_r;
4259 mod = (modrm >> 6) & 3;
4260 if (mod == 3) {
4261 rm = (modrm & 7) | REX_B(s);
4262 do_xchg_reg:
4263 gen_op_mov_TN_reg[ot][0][reg]();
4264 gen_op_mov_TN_reg[ot][1][rm]();
4265 gen_op_mov_reg_T0[ot][rm]();
4266 gen_op_mov_reg_T1[ot][reg]();
4267 } else {
4268 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4269 gen_op_mov_TN_reg[ot][0][reg]();
4270 /* for xchg, lock is implicit */
4271 if (!(prefixes & PREFIX_LOCK))
4272 gen_op_lock();
4273 gen_op_ld_T1_A0[ot + s->mem_index]();
4274 gen_op_st_T0_A0[ot + s->mem_index]();
4275 if (!(prefixes & PREFIX_LOCK))
4276 gen_op_unlock();
4277 gen_op_mov_reg_T1[ot][reg]();
4278 }
4279 break;
4280 case 0xc4: /* les Gv */
4281 if (CODE64(s))
4282 goto illegal_op;
4283 op = R_ES;
4284 goto do_lxx;
4285 case 0xc5: /* lds Gv */
4286 if (CODE64(s))
4287 goto illegal_op;
4288 op = R_DS;
4289 goto do_lxx;
4290 case 0x1b2: /* lss Gv */
4291 op = R_SS;
4292 goto do_lxx;
4293 case 0x1b4: /* lfs Gv */
4294 op = R_FS;
4295 goto do_lxx;
4296 case 0x1b5: /* lgs Gv */
4297 op = R_GS;
4298 do_lxx:
4299 ot = dflag ? OT_LONG : OT_WORD;
4300 modrm = ldub_code(s->pc++);
4301 reg = ((modrm >> 3) & 7) | rex_r;
4302 mod = (modrm >> 6) & 3;
4303 if (mod == 3)
4304 goto illegal_op;
4305 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4306 gen_op_ld_T1_A0[ot + s->mem_index]();
4307 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4308 /* load the segment first to handle exceptions properly */
4309 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4310 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4311 /* then put the data */
4312 gen_op_mov_reg_T1[ot][reg]();
4313 if (s->is_jmp) {
4314 gen_jmp_im(s->pc - s->cs_base);
4315 gen_eob(s);
4316 }
4317 break;
4318
4319 /************************/
4320 /* shifts */
4321 case 0xc0:
4322 case 0xc1:
4323 /* shift Ev,Ib */
4324 shift = 2;
4325 grp2:
4326 {
4327 if ((b & 1) == 0)
4328 ot = OT_BYTE;
4329 else
4330 ot = dflag + OT_WORD;
4331
4332 modrm = ldub_code(s->pc++);
4333 mod = (modrm >> 6) & 3;
4334 op = (modrm >> 3) & 7;
4335
4336 if (mod != 3) {
4337 if (shift == 2) {
4338 s->rip_offset = 1;
4339 }
4340 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4341 opreg = OR_TMP0;
4342 } else {
4343 opreg = (modrm & 7) | REX_B(s);
4344 }
4345
4346 /* simpler op */
4347 if (shift == 0) {
4348 gen_shift(s, op, ot, opreg, OR_ECX);
4349 } else {
4350 if (shift == 2) {
4351 shift = ldub_code(s->pc++);
4352 }
4353 gen_shifti(s, op, ot, opreg, shift);
4354 }
4355 }
4356 break;
4357 case 0xd0:
4358 case 0xd1:
4359 /* shift Ev,1 */
4360 shift = 1;
4361 goto grp2;
4362 case 0xd2:
4363 case 0xd3:
4364 /* shift Ev,cl */
4365 shift = 0;
4366 goto grp2;
4367
4368 case 0x1a4: /* shld imm */
4369 op = 0;
4370 shift = 1;
4371 goto do_shiftd;
4372 case 0x1a5: /* shld cl */
4373 op = 0;
4374 shift = 0;
4375 goto do_shiftd;
4376 case 0x1ac: /* shrd imm */
4377 op = 1;
4378 shift = 1;
4379 goto do_shiftd;
4380 case 0x1ad: /* shrd cl */
4381 op = 1;
4382 shift = 0;
4383 do_shiftd:
4384 ot = dflag + OT_WORD;
4385 modrm = ldub_code(s->pc++);
4386 mod = (modrm >> 6) & 3;
4387 rm = (modrm & 7) | REX_B(s);
4388 reg = ((modrm >> 3) & 7) | rex_r;
4389
4390 if (mod != 3) {
4391 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4392 gen_op_ld_T0_A0[ot + s->mem_index]();
4393 } else {
4394 gen_op_mov_TN_reg[ot][0][rm]();
4395 }
4396 gen_op_mov_TN_reg[ot][1][reg]();
4397
4398 if (shift) {
4399 val = ldub_code(s->pc++);
4400 if (ot == OT_QUAD)
4401 val &= 0x3f;
4402 else
4403 val &= 0x1f;
4404 if (val) {
4405 if (mod == 3)
4406 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4407 else
4408 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4409 if (op == 0 && ot != OT_WORD)
4410 s->cc_op = CC_OP_SHLB + ot;
4411 else
4412 s->cc_op = CC_OP_SARB + ot;
4413 }
4414 } else {
4415 if (s->cc_op != CC_OP_DYNAMIC)
4416 gen_op_set_cc_op(s->cc_op);
4417 if (mod == 3)
4418 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4419 else
4420 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4421 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4422 }
4423 if (mod == 3) {
4424 gen_op_mov_reg_T0[ot][rm]();
4425 }
4426 break;
4427
4428 /************************/
4429 /* floats */
4430 case 0xd8 ... 0xdf:
4431 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4432 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4433 /* XXX: what to do if illegal op ? */
4434 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4435 break;
4436 }
4437 modrm = ldub_code(s->pc++);
4438 mod = (modrm >> 6) & 3;
4439 rm = modrm & 7;
4440 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4441 if (mod != 3) {
4442 /* memory op */
4443 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4444 switch(op) {
4445 case 0x00 ... 0x07: /* fxxxs */
4446 case 0x10 ... 0x17: /* fixxxl */
4447 case 0x20 ... 0x27: /* fxxxl */
4448 case 0x30 ... 0x37: /* fixxx */
4449 {
4450 int op1;
4451 op1 = op & 7;
4452
4453 switch(op >> 4) {
4454 case 0:
4455 gen_op_flds_FT0_A0();
4456 break;
4457 case 1:
4458 gen_op_fildl_FT0_A0();
4459 break;
4460 case 2:
4461 gen_op_fldl_FT0_A0();
4462 break;
4463 case 3:
4464 default:
4465 gen_op_fild_FT0_A0();
4466 break;
4467 }
4468
4469 gen_op_fp_arith_ST0_FT0[op1]();
4470 if (op1 == 3) {
4471 /* fcomp needs pop */
4472 gen_op_fpop();
4473 }
4474 }
4475 break;
4476 case 0x08: /* flds */
4477 case 0x0a: /* fsts */
4478 case 0x0b: /* fstps */
4479 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4480 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4481 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4482 switch(op & 7) {
4483 case 0:
4484 switch(op >> 4) {
4485 case 0:
4486 gen_op_flds_ST0_A0();
4487 break;
4488 case 1:
4489 gen_op_fildl_ST0_A0();
4490 break;
4491 case 2:
4492 gen_op_fldl_ST0_A0();
4493 break;
4494 case 3:
4495 default:
4496 gen_op_fild_ST0_A0();
4497 break;
4498 }
4499 break;
4500 case 1:
4501 switch(op >> 4) {
4502 case 1:
4503 gen_op_fisttl_ST0_A0();
4504 break;
4505 case 2:
4506 gen_op_fisttll_ST0_A0();
4507 break;
4508 case 3:
4509 default:
4510 gen_op_fistt_ST0_A0();
4511 }
4512 gen_op_fpop();
4513 break;
4514 default:
4515 switch(op >> 4) {
4516 case 0:
4517 gen_op_fsts_ST0_A0();
4518 break;
4519 case 1:
4520 gen_op_fistl_ST0_A0();
4521 break;
4522 case 2:
4523 gen_op_fstl_ST0_A0();
4524 break;
4525 case 3:
4526 default:
4527 gen_op_fist_ST0_A0();
4528 break;
4529 }
4530 if ((op & 7) == 3)
4531 gen_op_fpop();
4532 break;
4533 }
4534 break;
4535 case 0x0c: /* fldenv mem */
4536 gen_op_fldenv_A0(s->dflag);
4537 break;
4538 case 0x0d: /* fldcw mem */
4539 gen_op_fldcw_A0();
4540 break;
4541 case 0x0e: /* fnstenv mem */
4542 gen_op_fnstenv_A0(s->dflag);
4543 break;
4544 case 0x0f: /* fnstcw mem */
4545 gen_op_fnstcw_A0();
4546 break;
4547 case 0x1d: /* fldt mem */
4548 gen_op_fldt_ST0_A0();
4549 break;
4550 case 0x1f: /* fstpt mem */
4551 gen_op_fstt_ST0_A0();
4552 gen_op_fpop();
4553 break;
4554 case 0x2c: /* frstor mem */
4555 gen_op_frstor_A0(s->dflag);
4556 break;
4557 case 0x2e: /* fnsave mem */
4558 gen_op_fnsave_A0(s->dflag);
4559 break;
4560 case 0x2f: /* fnstsw mem */
4561 gen_op_fnstsw_A0();
4562 break;
4563 case 0x3c: /* fbld */
4564 gen_op_fbld_ST0_A0();
4565 break;
4566 case 0x3e: /* fbstp */
4567 gen_op_fbst_ST0_A0();
4568 gen_op_fpop();
4569 break;
4570 case 0x3d: /* fildll */
4571 gen_op_fildll_ST0_A0();
4572 break;
4573 case 0x3f: /* fistpll */
4574 gen_op_fistll_ST0_A0();
4575 gen_op_fpop();
4576 break;
4577 default:
4578 goto illegal_op;
4579 }
4580 } else {
4581 /* register float ops */
4582 opreg = rm;
4583
4584 switch(op) {
4585 case 0x08: /* fld sti */
4586 gen_op_fpush();
4587 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4588 break;
4589 case 0x09: /* fxchg sti */
4590 case 0x29: /* fxchg4 sti, undocumented op */
4591 case 0x39: /* fxchg7 sti, undocumented op */
4592 gen_op_fxchg_ST0_STN(opreg);
4593 break;
4594 case 0x0a: /* grp d9/2 */
4595 switch(rm) {
4596 case 0: /* fnop */
4597 /* check exceptions (FreeBSD FPU probe) */
4598 if (s->cc_op != CC_OP_DYNAMIC)
4599 gen_op_set_cc_op(s->cc_op);
4600 gen_jmp_im(pc_start - s->cs_base);
4601 gen_op_fwait();
4602 break;
4603 default:
4604 goto illegal_op;
4605 }
4606 break;
4607 case 0x0c: /* grp d9/4 */
4608 switch(rm) {
4609 case 0: /* fchs */
4610 gen_op_fchs_ST0();
4611 break;
4612 case 1: /* fabs */
4613 gen_op_fabs_ST0();
4614 break;
4615 case 4: /* ftst */
4616 gen_op_fldz_FT0();
4617 gen_op_fcom_ST0_FT0();
4618 break;
4619 case 5: /* fxam */
4620 gen_op_fxam_ST0();
4621 break;
4622 default:
4623 goto illegal_op;
4624 }
4625 break;
4626 case 0x0d: /* grp d9/5 */
4627 {
4628 switch(rm) {
4629 case 0:
4630 gen_op_fpush();
4631 gen_op_fld1_ST0();
4632 break;
4633 case 1:
4634 gen_op_fpush();
4635 gen_op_fldl2t_ST0();
4636 break;
4637 case 2:
4638 gen_op_fpush();
4639 gen_op_fldl2e_ST0();
4640 break;
4641 case 3:
4642 gen_op_fpush();
4643 gen_op_fldpi_ST0();
4644 break;
4645 case 4:
4646 gen_op_fpush();
4647 gen_op_fldlg2_ST0();
4648 break;
4649 case 5:
4650 gen_op_fpush();
4651 gen_op_fldln2_ST0();
4652 break;
4653 case 6:
4654 gen_op_fpush();
4655 gen_op_fldz_ST0();
4656 break;
4657 default:
4658 goto illegal_op;
4659 }
4660 }
4661 break;
4662 case 0x0e: /* grp d9/6 */
4663 switch(rm) {
4664 case 0: /* f2xm1 */
4665 gen_op_f2xm1();
4666 break;
4667 case 1: /* fyl2x */
4668 gen_op_fyl2x();
4669 break;
4670 case 2: /* fptan */
4671 gen_op_fptan();
4672 break;
4673 case 3: /* fpatan */
4674 gen_op_fpatan();
4675 break;
4676 case 4: /* fxtract */
4677 gen_op_fxtract();
4678 break;
4679 case 5: /* fprem1 */
4680 gen_op_fprem1();
4681 break;
4682 case 6: /* fdecstp */
4683 gen_op_fdecstp();
4684 break;
4685 default:
4686 case 7: /* fincstp */
4687 gen_op_fincstp();
4688 break;
4689 }
4690 break;
4691 case 0x0f: /* grp d9/7 */
4692 switch(rm) {
4693 case 0: /* fprem */
4694 gen_op_fprem();
4695 break;
4696 case 1: /* fyl2xp1 */
4697 gen_op_fyl2xp1();
4698 break;
4699 case 2: /* fsqrt */
4700 gen_op_fsqrt();
4701 break;
4702 case 3: /* fsincos */
4703 gen_op_fsincos();
4704 break;
4705 case 5: /* fscale */
4706 gen_op_fscale();
4707 break;
4708 case 4: /* frndint */
4709 gen_op_frndint();
4710 break;
4711 case 6: /* fsin */
4712 gen_op_fsin();
4713 break;
4714 default:
4715 case 7: /* fcos */
4716 gen_op_fcos();
4717 break;
4718 }
4719 break;
4720 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4721 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4722 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4723 {
4724 int op1;
4725
4726 op1 = op & 7;
4727 if (op >= 0x20) {
4728 gen_op_fp_arith_STN_ST0[op1](opreg);
4729 if (op >= 0x30)
4730 gen_op_fpop();
4731 } else {
4732 gen_op_fmov_FT0_STN(opreg);
4733 gen_op_fp_arith_ST0_FT0[op1]();
4734 }
4735 }
4736 break;
4737 case 0x02: /* fcom */
4738 case 0x22: /* fcom2, undocumented op */
4739 gen_op_fmov_FT0_STN(opreg);
4740 gen_op_fcom_ST0_FT0();
4741 break;
4742 case 0x03: /* fcomp */
4743 case 0x23: /* fcomp3, undocumented op */
4744 case 0x32: /* fcomp5, undocumented op */
4745 gen_op_fmov_FT0_STN(opreg);
4746 gen_op_fcom_ST0_FT0();
4747 gen_op_fpop();
4748 break;
4749 case 0x15: /* da/5 */
4750 switch(rm) {
4751 case 1: /* fucompp */
4752 gen_op_fmov_FT0_STN(1);
4753 gen_op_fucom_ST0_FT0();
4754 gen_op_fpop();
4755 gen_op_fpop();
4756 break;
4757 default:
4758 goto illegal_op;
4759 }
4760 break;
4761 case 0x1c:
4762 switch(rm) {
4763 case 0: /* feni (287 only, just do nop here) */
4764 break;
4765 case 1: /* fdisi (287 only, just do nop here) */
4766 break;
4767 case 2: /* fclex */
4768 gen_op_fclex();
4769 break;
4770 case 3: /* fninit */
4771 gen_op_fninit();
4772 break;
4773 case 4: /* fsetpm (287 only, just do nop here) */
4774 break;
4775 default:
4776 goto illegal_op;
4777 }
4778 break;
4779 case 0x1d: /* fucomi */
4780 if (s->cc_op != CC_OP_DYNAMIC)
4781 gen_op_set_cc_op(s->cc_op);
4782 gen_op_fmov_FT0_STN(opreg);
4783 gen_op_fucomi_ST0_FT0();
4784 s->cc_op = CC_OP_EFLAGS;
4785 break;
4786 case 0x1e: /* fcomi */
4787 if (s->cc_op != CC_OP_DYNAMIC)
4788 gen_op_set_cc_op(s->cc_op);
4789 gen_op_fmov_FT0_STN(opreg);
4790 gen_op_fcomi_ST0_FT0();
4791 s->cc_op = CC_OP_EFLAGS;
4792 break;
4793 case 0x28: /* ffree sti */
4794 gen_op_ffree_STN(opreg);
4795 break;
4796 case 0x2a: /* fst sti */
4797 gen_op_fmov_STN_ST0(opreg);
4798 break;
4799 case 0x2b: /* fstp sti */
4800 case 0x0b: /* fstp1 sti, undocumented op */
4801 case 0x3a: /* fstp8 sti, undocumented op */
4802 case 0x3b: /* fstp9 sti, undocumented op */
4803 gen_op_fmov_STN_ST0(opreg);
4804 gen_op_fpop();
4805 break;
4806 case 0x2c: /* fucom st(i) */
4807 gen_op_fmov_FT0_STN(opreg);
4808 gen_op_fucom_ST0_FT0();
4809 break;
4810 case 0x2d: /* fucomp st(i) */
4811 gen_op_fmov_FT0_STN(opreg);
4812 gen_op_fucom_ST0_FT0();
4813 gen_op_fpop();
4814 break;
4815 case 0x33: /* de/3 */
4816 switch(rm) {
4817 case 1: /* fcompp */
4818 gen_op_fmov_FT0_STN(1);
4819 gen_op_fcom_ST0_FT0();
4820 gen_op_fpop();
4821 gen_op_fpop();
4822 break;
4823 default:
4824 goto illegal_op;
4825 }
4826 break;
4827 case 0x38: /* ffreep sti, undocumented op */
4828 gen_op_ffree_STN(opreg);
4829 gen_op_fpop();
4830 break;
4831 case 0x3c: /* df/4 */
4832 switch(rm) {
4833 case 0:
4834 gen_op_fnstsw_EAX();
4835 break;
4836 default:
4837 goto illegal_op;
4838 }
4839 break;
4840 case 0x3d: /* fucomip */
4841 if (s->cc_op != CC_OP_DYNAMIC)
4842 gen_op_set_cc_op(s->cc_op);
4843 gen_op_fmov_FT0_STN(opreg);
4844 gen_op_fucomi_ST0_FT0();
4845 gen_op_fpop();
4846 s->cc_op = CC_OP_EFLAGS;
4847 break;
4848 case 0x3e: /* fcomip */
4849 if (s->cc_op != CC_OP_DYNAMIC)
4850 gen_op_set_cc_op(s->cc_op);
4851 gen_op_fmov_FT0_STN(opreg);
4852 gen_op_fcomi_ST0_FT0();
4853 gen_op_fpop();
4854 s->cc_op = CC_OP_EFLAGS;
4855 break;
4856 case 0x10 ... 0x13: /* fcmovxx */
4857 case 0x18 ... 0x1b:
4858 {
4859 int op1;
4860 const static uint8_t fcmov_cc[8] = {
4861 (JCC_B << 1),
4862 (JCC_Z << 1),
4863 (JCC_BE << 1),
4864 (JCC_P << 1),
4865 };
4866 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4867 gen_setcc(s, op1);
4868 gen_op_fcmov_ST0_STN_T0(opreg);
4869 }
4870 break;
4871 default:
4872 goto illegal_op;
4873 }
4874 }
4875#ifdef USE_CODE_COPY
4876 s->tb->cflags |= CF_TB_FP_USED;
4877#endif
4878 break;
4879 /************************/
4880 /* string ops */
4881
4882 case 0xa4: /* movsS */
4883 case 0xa5:
4884 if ((b & 1) == 0)
4885 ot = OT_BYTE;
4886 else
4887 ot = dflag + OT_WORD;
4888
4889 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4890 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4891 } else {
4892 gen_movs(s, ot);
4893 }
4894 break;
4895
4896 case 0xaa: /* stosS */
4897 case 0xab:
4898 if ((b & 1) == 0)
4899 ot = OT_BYTE;
4900 else
4901 ot = dflag + OT_WORD;
4902
4903 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4904 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4905 } else {
4906 gen_stos(s, ot);
4907 }
4908 break;
4909 case 0xac: /* lodsS */
4910 case 0xad:
4911 if ((b & 1) == 0)
4912 ot = OT_BYTE;
4913 else
4914 ot = dflag + OT_WORD;
4915 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4916 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4917 } else {
4918 gen_lods(s, ot);
4919 }
4920 break;
4921 case 0xae: /* scasS */
4922 case 0xaf:
4923 if ((b & 1) == 0)
4924 ot = OT_BYTE;
4925 else
4926 ot = dflag + OT_WORD;
4927 if (prefixes & PREFIX_REPNZ) {
4928 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4929 } else if (prefixes & PREFIX_REPZ) {
4930 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4931 } else {
4932 gen_scas(s, ot);
4933 s->cc_op = CC_OP_SUBB + ot;
4934 }
4935 break;
4936
4937 case 0xa6: /* cmpsS */
4938 case 0xa7:
4939 if ((b & 1) == 0)
4940 ot = OT_BYTE;
4941 else
4942 ot = dflag + OT_WORD;
4943 if (prefixes & PREFIX_REPNZ) {
4944 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4945 } else if (prefixes & PREFIX_REPZ) {
4946 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4947 } else {
4948 gen_cmps(s, ot);
4949 s->cc_op = CC_OP_SUBB + ot;
4950 }
4951 break;
4952 case 0x6c: /* insS */
4953 case 0x6d:
4954 if ((b & 1) == 0)
4955 ot = OT_BYTE;
4956 else
4957 ot = dflag ? OT_LONG : OT_WORD;
4958 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4959 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4960 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4961 } else {
4962 gen_ins(s, ot);
4963 }
4964 break;
4965 case 0x6e: /* outsS */
4966 case 0x6f:
4967 if ((b & 1) == 0)
4968 ot = OT_BYTE;
4969 else
4970 ot = dflag ? OT_LONG : OT_WORD;
4971 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4972 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4973 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4974 } else {
4975 gen_outs(s, ot);
4976 }
4977 break;
4978
4979 /************************/
4980 /* port I/O */
4981 case 0xe4:
4982 case 0xe5:
4983 if ((b & 1) == 0)
4984 ot = OT_BYTE;
4985 else
4986 ot = dflag ? OT_LONG : OT_WORD;
4987 val = ldub_code(s->pc++);
4988 gen_op_movl_T0_im(val);
4989 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4990 gen_op_in[ot]();
4991 gen_op_mov_reg_T1[ot][R_EAX]();
4992 break;
4993 case 0xe6:
4994 case 0xe7:
4995 if ((b & 1) == 0)
4996 ot = OT_BYTE;
4997 else
4998 ot = dflag ? OT_LONG : OT_WORD;
4999 val = ldub_code(s->pc++);
5000 gen_op_movl_T0_im(val);
5001 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5002#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
5003 if (val == 0x80)
5004 break;
5005#endif /* VBOX */
5006 gen_op_mov_TN_reg[ot][1][R_EAX]();
5007 gen_op_out[ot]();
5008 break;
5009 case 0xec:
5010 case 0xed:
5011 if ((b & 1) == 0)
5012 ot = OT_BYTE;
5013 else
5014 ot = dflag ? OT_LONG : OT_WORD;
5015 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
5016 gen_op_andl_T0_ffff();
5017 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5018 gen_op_in[ot]();
5019 gen_op_mov_reg_T1[ot][R_EAX]();
5020 break;
5021 case 0xee:
5022 case 0xef:
5023 if ((b & 1) == 0)
5024 ot = OT_BYTE;
5025 else
5026 ot = dflag ? OT_LONG : OT_WORD;
5027 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
5028 gen_op_andl_T0_ffff();
5029 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5030 gen_op_mov_TN_reg[ot][1][R_EAX]();
5031 gen_op_out[ot]();
5032 break;
5033
5034 /************************/
5035 /* control */
5036 case 0xc2: /* ret im */
5037 val = ldsw_code(s->pc);
5038 s->pc += 2;
5039 gen_pop_T0(s);
5040 if (CODE64(s) && s->dflag)
5041 s->dflag = 2;
5042 gen_stack_update(s, val + (2 << s->dflag));
5043 if (s->dflag == 0)
5044 gen_op_andl_T0_ffff();
5045 gen_op_jmp_T0();
5046 gen_eob(s);
5047 break;
5048 case 0xc3: /* ret */
5049 gen_pop_T0(s);
5050 gen_pop_update(s);
5051 if (s->dflag == 0)
5052 gen_op_andl_T0_ffff();
5053 gen_op_jmp_T0();
5054 gen_eob(s);
5055 break;
5056 case 0xca: /* lret im */
5057 val = ldsw_code(s->pc);
5058 s->pc += 2;
5059 do_lret:
5060 if (s->pe && !s->vm86) {
5061 if (s->cc_op != CC_OP_DYNAMIC)
5062 gen_op_set_cc_op(s->cc_op);
5063 gen_jmp_im(pc_start - s->cs_base);
5064 gen_op_lret_protected(s->dflag, val);
5065 } else {
5066 gen_stack_A0(s);
5067 /* pop offset */
5068 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
5069 if (s->dflag == 0)
5070 gen_op_andl_T0_ffff();
5071 /* NOTE: keeping EIP updated is not a problem in case of
5072 exception */
5073 gen_op_jmp_T0();
5074 /* pop selector */
5075 gen_op_addl_A0_im(2 << s->dflag);
5076 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
5077 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5078 /* add stack offset */
5079 gen_stack_update(s, val + (4 << s->dflag));
5080 }
5081 gen_eob(s);
5082 break;
5083 case 0xcb: /* lret */
5084 val = 0;
5085 goto do_lret;
5086 case 0xcf: /* iret */
5087 if (!s->pe) {
5088 /* real mode */
5089 gen_op_iret_real(s->dflag);
5090 s->cc_op = CC_OP_EFLAGS;
5091 } else if (s->vm86) {
5092#ifdef VBOX
5093 if (s->iopl != 3 && (!s->vme || s->dflag)) {
5094#else
5095 if (s->iopl != 3) {
5096#endif
5097 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5098 } else {
5099 gen_op_iret_real(s->dflag);
5100 s->cc_op = CC_OP_EFLAGS;
5101 }
5102 } else {
5103 if (s->cc_op != CC_OP_DYNAMIC)
5104 gen_op_set_cc_op(s->cc_op);
5105 gen_jmp_im(pc_start - s->cs_base);
5106 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5107 s->cc_op = CC_OP_EFLAGS;
5108 }
5109 gen_eob(s);
5110 break;
5111 case 0xe8: /* call im */
5112 {
5113 if (dflag)
5114 tval = (int32_t)insn_get(s, OT_LONG);
5115 else
5116 tval = (int16_t)insn_get(s, OT_WORD);
5117 next_eip = s->pc - s->cs_base;
5118 tval += next_eip;
5119 if (s->dflag == 0)
5120 tval &= 0xffff;
5121 gen_movtl_T0_im(next_eip);
5122 gen_push_T0(s);
5123 gen_jmp(s, tval);
5124 }
5125 break;
5126 case 0x9a: /* lcall im */
5127 {
5128 unsigned int selector, offset;
5129
5130 if (CODE64(s))
5131 goto illegal_op;
5132 ot = dflag ? OT_LONG : OT_WORD;
5133 offset = insn_get(s, ot);
5134 selector = insn_get(s, OT_WORD);
5135
5136 gen_op_movl_T0_im(selector);
5137 gen_op_movl_T1_imu(offset);
5138 }
5139 goto do_lcall;
5140 case 0xe9: /* jmp im */
5141 if (dflag)
5142 tval = (int32_t)insn_get(s, OT_LONG);
5143 else
5144 tval = (int16_t)insn_get(s, OT_WORD);
5145 tval += s->pc - s->cs_base;
5146 if (s->dflag == 0)
5147 tval &= 0xffff;
5148 gen_jmp(s, tval);
5149 break;
5150 case 0xea: /* ljmp im */
5151 {
5152 unsigned int selector, offset;
5153
5154 if (CODE64(s))
5155 goto illegal_op;
5156 ot = dflag ? OT_LONG : OT_WORD;
5157 offset = insn_get(s, ot);
5158 selector = insn_get(s, OT_WORD);
5159
5160 gen_op_movl_T0_im(selector);
5161 gen_op_movl_T1_imu(offset);
5162 }
5163 goto do_ljmp;
5164 case 0xeb: /* jmp Jb */
5165 tval = (int8_t)insn_get(s, OT_BYTE);
5166 tval += s->pc - s->cs_base;
5167 if (s->dflag == 0)
5168 tval &= 0xffff;
5169 gen_jmp(s, tval);
5170 break;
5171 case 0x70 ... 0x7f: /* jcc Jb */
5172 tval = (int8_t)insn_get(s, OT_BYTE);
5173 goto do_jcc;
5174 case 0x180 ... 0x18f: /* jcc Jv */
5175 if (dflag) {
5176 tval = (int32_t)insn_get(s, OT_LONG);
5177 } else {
5178 tval = (int16_t)insn_get(s, OT_WORD);
5179 }
5180 do_jcc:
5181 next_eip = s->pc - s->cs_base;
5182 tval += next_eip;
5183 if (s->dflag == 0)
5184 tval &= 0xffff;
5185 gen_jcc(s, b, tval, next_eip);
5186 break;
5187
5188 case 0x190 ... 0x19f: /* setcc Gv */
5189 modrm = ldub_code(s->pc++);
5190 gen_setcc(s, b);
5191 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5192 break;
5193 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5194 ot = dflag + OT_WORD;
5195 modrm = ldub_code(s->pc++);
5196 reg = ((modrm >> 3) & 7) | rex_r;
5197 mod = (modrm >> 6) & 3;
5198 gen_setcc(s, b);
5199 if (mod != 3) {
5200 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5201 gen_op_ld_T1_A0[ot + s->mem_index]();
5202 } else {
5203 rm = (modrm & 7) | REX_B(s);
5204 gen_op_mov_TN_reg[ot][1][rm]();
5205 }
5206 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5207 break;
5208
5209 /************************/
5210 /* flags */
5211 case 0x9c: /* pushf */
5212#ifdef VBOX
5213 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
5214#else
5215 if (s->vm86 && s->iopl != 3) {
5216#endif
5217 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5218 } else {
5219 if (s->cc_op != CC_OP_DYNAMIC)
5220 gen_op_set_cc_op(s->cc_op);
5221#ifdef VBOX
5222 if (s->vm86 && s->vme && s->iopl != 3)
5223 gen_op_movl_T0_eflags_vme();
5224 else
5225#endif
5226 gen_op_movl_T0_eflags();
5227 gen_push_T0(s);
5228 }
5229 break;
5230 case 0x9d: /* popf */
5231#ifdef VBOX
5232 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
5233#else
5234 if (s->vm86 && s->iopl != 3) {
5235#endif
5236 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5237 } else {
5238 gen_pop_T0(s);
5239 if (s->cpl == 0) {
5240 if (s->dflag) {
5241 gen_op_movl_eflags_T0_cpl0();
5242 } else {
5243 gen_op_movw_eflags_T0_cpl0();
5244 }
5245 } else {
5246 if (s->cpl <= s->iopl) {
5247 if (s->dflag) {
5248 gen_op_movl_eflags_T0_io();
5249 } else {
5250 gen_op_movw_eflags_T0_io();
5251 }
5252 } else {
5253 if (s->dflag) {
5254 gen_op_movl_eflags_T0();
5255 } else {
5256#ifdef VBOX
5257 if (s->vm86 && s->vme)
5258 gen_op_movw_eflags_T0_vme();
5259 else
5260#endif
5261 gen_op_movw_eflags_T0();
5262 }
5263 }
5264 }
5265 gen_pop_update(s);
5266 s->cc_op = CC_OP_EFLAGS;
5267 /* abort translation because TF flag may change */
5268 gen_jmp_im(s->pc - s->cs_base);
5269 gen_eob(s);
5270 }
5271 break;
5272 case 0x9e: /* sahf */
5273 if (CODE64(s))
5274 goto illegal_op;
5275 gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
5276 if (s->cc_op != CC_OP_DYNAMIC)
5277 gen_op_set_cc_op(s->cc_op);
5278 gen_op_movb_eflags_T0();
5279 s->cc_op = CC_OP_EFLAGS;
5280 break;
5281 case 0x9f: /* lahf */
5282 if (CODE64(s))
5283 goto illegal_op;
5284 if (s->cc_op != CC_OP_DYNAMIC)
5285 gen_op_set_cc_op(s->cc_op);
5286 gen_op_movl_T0_eflags();
5287 gen_op_mov_reg_T0[OT_BYTE][R_AH]();
5288 break;
5289 case 0xf5: /* cmc */
5290 if (s->cc_op != CC_OP_DYNAMIC)
5291 gen_op_set_cc_op(s->cc_op);
5292 gen_op_cmc();
5293 s->cc_op = CC_OP_EFLAGS;
5294 break;
5295 case 0xf8: /* clc */
5296 if (s->cc_op != CC_OP_DYNAMIC)
5297 gen_op_set_cc_op(s->cc_op);
5298 gen_op_clc();
5299 s->cc_op = CC_OP_EFLAGS;
5300 break;
5301 case 0xf9: /* stc */
5302 if (s->cc_op != CC_OP_DYNAMIC)
5303 gen_op_set_cc_op(s->cc_op);
5304 gen_op_stc();
5305 s->cc_op = CC_OP_EFLAGS;
5306 break;
5307 case 0xfc: /* cld */
5308 gen_op_cld();
5309 break;
5310 case 0xfd: /* std */
5311 gen_op_std();
5312 break;
5313
5314 /************************/
5315 /* bit operations */
5316 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5317 ot = dflag + OT_WORD;
5318 modrm = ldub_code(s->pc++);
5319 op = (modrm >> 3) & 7;
5320 mod = (modrm >> 6) & 3;
5321 rm = (modrm & 7) | REX_B(s);
5322 if (mod != 3) {
5323 s->rip_offset = 1;
5324 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5325 gen_op_ld_T0_A0[ot + s->mem_index]();
5326 } else {
5327 gen_op_mov_TN_reg[ot][0][rm]();
5328 }
5329 /* load shift */
5330 val = ldub_code(s->pc++);
5331 gen_op_movl_T1_im(val);
5332 if (op < 4)
5333 goto illegal_op;
5334 op -= 4;
5335 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5336 s->cc_op = CC_OP_SARB + ot;
5337 if (op != 0) {
5338 if (mod != 3)
5339 gen_op_st_T0_A0[ot + s->mem_index]();
5340 else
5341 gen_op_mov_reg_T0[ot][rm]();
5342 gen_op_update_bt_cc();
5343 }
5344 break;
5345 case 0x1a3: /* bt Gv, Ev */
5346 op = 0;
5347 goto do_btx;
5348 case 0x1ab: /* bts */
5349 op = 1;
5350 goto do_btx;
5351 case 0x1b3: /* btr */
5352 op = 2;
5353 goto do_btx;
5354 case 0x1bb: /* btc */
5355 op = 3;
5356 do_btx:
5357 ot = dflag + OT_WORD;
5358 modrm = ldub_code(s->pc++);
5359 reg = ((modrm >> 3) & 7) | rex_r;
5360 mod = (modrm >> 6) & 3;
5361 rm = (modrm & 7) | REX_B(s);
5362 gen_op_mov_TN_reg[OT_LONG][1][reg]();
5363 if (mod != 3) {
5364 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5365 /* specific case: we need to add a displacement */
5366 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5367 gen_op_ld_T0_A0[ot + s->mem_index]();
5368 } else {
5369 gen_op_mov_TN_reg[ot][0][rm]();
5370 }
5371 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5372 s->cc_op = CC_OP_SARB + ot;
5373 if (op != 0) {
5374 if (mod != 3)
5375 gen_op_st_T0_A0[ot + s->mem_index]();
5376 else
5377 gen_op_mov_reg_T0[ot][rm]();
5378 gen_op_update_bt_cc();
5379 }
5380 break;
5381 case 0x1bc: /* bsf */
5382 case 0x1bd: /* bsr */
5383 ot = dflag + OT_WORD;
5384 modrm = ldub_code(s->pc++);
5385 reg = ((modrm >> 3) & 7) | rex_r;
5386 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5387 /* NOTE: in order to handle the 0 case, we must load the
5388 result. It could be optimized with a generated jump */
5389 gen_op_mov_TN_reg[ot][1][reg]();
5390 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5391 gen_op_mov_reg_T1[ot][reg]();
5392 s->cc_op = CC_OP_LOGICB + ot;
5393 break;
5394 /************************/
5395 /* bcd */
5396 case 0x27: /* daa */
5397 if (CODE64(s))
5398 goto illegal_op;
5399 if (s->cc_op != CC_OP_DYNAMIC)
5400 gen_op_set_cc_op(s->cc_op);
5401 gen_op_daa();
5402 s->cc_op = CC_OP_EFLAGS;
5403 break;
5404 case 0x2f: /* das */
5405 if (CODE64(s))
5406 goto illegal_op;
5407 if (s->cc_op != CC_OP_DYNAMIC)
5408 gen_op_set_cc_op(s->cc_op);
5409 gen_op_das();
5410 s->cc_op = CC_OP_EFLAGS;
5411 break;
5412 case 0x37: /* aaa */
5413 if (CODE64(s))
5414 goto illegal_op;
5415 if (s->cc_op != CC_OP_DYNAMIC)
5416 gen_op_set_cc_op(s->cc_op);
5417 gen_op_aaa();
5418 s->cc_op = CC_OP_EFLAGS;
5419 break;
5420 case 0x3f: /* aas */
5421 if (CODE64(s))
5422 goto illegal_op;
5423 if (s->cc_op != CC_OP_DYNAMIC)
5424 gen_op_set_cc_op(s->cc_op);
5425 gen_op_aas();
5426 s->cc_op = CC_OP_EFLAGS;
5427 break;
5428 case 0xd4: /* aam */
5429 if (CODE64(s))
5430 goto illegal_op;
5431 val = ldub_code(s->pc++);
5432 if (val == 0) {
5433 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5434 } else {
5435 gen_op_aam(val);
5436 s->cc_op = CC_OP_LOGICB;
5437 }
5438 break;
5439 case 0xd5: /* aad */
5440 if (CODE64(s))
5441 goto illegal_op;
5442 val = ldub_code(s->pc++);
5443 gen_op_aad(val);
5444 s->cc_op = CC_OP_LOGICB;
5445 break;
5446 /************************/
5447 /* misc */
5448 case 0x90: /* nop */
5449 /* XXX: xchg + rex handling */
5450 /* XXX: correct lock test for all insn */
5451 if (prefixes & PREFIX_LOCK)
5452 goto illegal_op;
5453 break;
5454 case 0x9b: /* fwait */
5455 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5456 (HF_MP_MASK | HF_TS_MASK)) {
5457 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5458 } else {
5459 if (s->cc_op != CC_OP_DYNAMIC)
5460 gen_op_set_cc_op(s->cc_op);
5461 gen_jmp_im(pc_start - s->cs_base);
5462 gen_op_fwait();
5463 }
5464 break;
5465 case 0xcc: /* int3 */
5466#ifdef VBOX
5467 if (s->vm86 && s->iopl != 3 && !s->vme) {
5468 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5469 } else
5470#endif
5471 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5472 break;
5473 case 0xcd: /* int N */
5474 val = ldub_code(s->pc++);
5475#ifdef VBOX
5476 if (s->vm86 && s->iopl != 3 && !s->vme) {
5477#else
5478 if (s->vm86 && s->iopl != 3) {
5479#endif
5480 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5481 } else {
5482 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5483 }
5484 break;
5485 case 0xce: /* into */
5486 if (CODE64(s))
5487 goto illegal_op;
5488 if (s->cc_op != CC_OP_DYNAMIC)
5489 gen_op_set_cc_op(s->cc_op);
5490 gen_jmp_im(pc_start - s->cs_base);
5491 gen_op_into(s->pc - pc_start);
5492 break;
5493 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5494#if 1
5495 gen_debug(s, pc_start - s->cs_base);
5496#else
5497 /* start debug */
5498 tb_flush(cpu_single_env);
5499 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5500#endif
5501 break;
5502 case 0xfa: /* cli */
5503 if (!s->vm86) {
5504 if (s->cpl <= s->iopl) {
5505 gen_op_cli();
5506 } else {
5507 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5508 }
5509 } else {
5510 if (s->iopl == 3) {
5511 gen_op_cli();
5512#ifdef VBOX
5513 } else if (s->iopl != 3 && s->vme) {
5514 gen_op_cli_vme();
5515#endif
5516 } else {
5517 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5518 }
5519 }
5520 break;
5521 case 0xfb: /* sti */
5522 if (!s->vm86) {
5523 if (s->cpl <= s->iopl) {
5524 gen_sti:
5525 gen_op_sti();
5526 /* interruptions are enabled only the first insn after sti */
5527 /* If several instructions disable interrupts, only the
5528 _first_ does it */
5529 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5530 gen_op_set_inhibit_irq();
5531 /* give a chance to handle pending irqs */
5532 gen_jmp_im(s->pc - s->cs_base);
5533 gen_eob(s);
5534 } else {
5535 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5536 }
5537 } else {
5538 if (s->iopl == 3) {
5539 goto gen_sti;
5540#ifdef VBOX
5541 } else if (s->iopl != 3 && s->vme) {
5542 gen_op_sti_vme();
5543 /* give a chance to handle pending irqs */
5544 gen_jmp_im(s->pc - s->cs_base);
5545 gen_eob(s);
5546#endif
5547 } else {
5548 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5549 }
5550 }
5551 break;
5552 case 0x62: /* bound */
5553 if (CODE64(s))
5554 goto illegal_op;
5555 ot = dflag ? OT_LONG : OT_WORD;
5556 modrm = ldub_code(s->pc++);
5557 reg = (modrm >> 3) & 7;
5558 mod = (modrm >> 6) & 3;
5559 if (mod == 3)
5560 goto illegal_op;
5561 gen_op_mov_TN_reg[ot][0][reg]();
5562 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5563 gen_jmp_im(pc_start - s->cs_base);
5564 if (ot == OT_WORD)
5565 gen_op_boundw();
5566 else
5567 gen_op_boundl();
5568 break;
5569 case 0x1c8 ... 0x1cf: /* bswap reg */
5570 reg = (b & 7) | REX_B(s);
5571#ifdef TARGET_X86_64
5572 if (dflag == 2) {
5573 gen_op_mov_TN_reg[OT_QUAD][0][reg]();
5574 gen_op_bswapq_T0();
5575 gen_op_mov_reg_T0[OT_QUAD][reg]();
5576 } else
5577#endif
5578 {
5579 gen_op_mov_TN_reg[OT_LONG][0][reg]();
5580 gen_op_bswapl_T0();
5581 gen_op_mov_reg_T0[OT_LONG][reg]();
5582 }
5583 break;
5584 case 0xd6: /* salc */
5585 if (CODE64(s))
5586 goto illegal_op;
5587 if (s->cc_op != CC_OP_DYNAMIC)
5588 gen_op_set_cc_op(s->cc_op);
5589 gen_op_salc();
5590 break;
5591 case 0xe0: /* loopnz */
5592 case 0xe1: /* loopz */
5593 if (s->cc_op != CC_OP_DYNAMIC)
5594 gen_op_set_cc_op(s->cc_op);
5595 /* FALL THRU */
5596 case 0xe2: /* loop */
5597 case 0xe3: /* jecxz */
5598 {
5599 int l1, l2;
5600
5601 tval = (int8_t)insn_get(s, OT_BYTE);
5602 next_eip = s->pc - s->cs_base;
5603 tval += next_eip;
5604 if (s->dflag == 0)
5605 tval &= 0xffff;
5606
5607 l1 = gen_new_label();
5608 l2 = gen_new_label();
5609 b &= 3;
5610 if (b == 3) {
5611 gen_op_jz_ecx[s->aflag](l1);
5612 } else {
5613 gen_op_dec_ECX[s->aflag]();
5614 if (b <= 1)
5615 gen_op_mov_T0_cc();
5616 gen_op_loop[s->aflag][b](l1);
5617 }
5618
5619 gen_jmp_im(next_eip);
5620 gen_op_jmp_label(l2);
5621 gen_set_label(l1);
5622 gen_jmp_im(tval);
5623 gen_set_label(l2);
5624 gen_eob(s);
5625 }
5626 break;
5627 case 0x130: /* wrmsr */
5628 case 0x132: /* rdmsr */
5629 if (s->cpl != 0) {
5630 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5631 } else {
5632 if (b & 2)
5633 gen_op_rdmsr();
5634 else
5635 gen_op_wrmsr();
5636 }
5637 break;
5638 case 0x131: /* rdtsc */
5639 gen_jmp_im(pc_start - s->cs_base);
5640 gen_op_rdtsc();
5641 break;
5642 case 0x134: /* sysenter */
5643 if (CODE64(s))
5644 goto illegal_op;
5645 if (!s->pe) {
5646 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5647 } else {
5648 if (s->cc_op != CC_OP_DYNAMIC) {
5649 gen_op_set_cc_op(s->cc_op);
5650 s->cc_op = CC_OP_DYNAMIC;
5651 }
5652 gen_jmp_im(pc_start - s->cs_base);
5653 gen_op_sysenter();
5654 gen_eob(s);
5655 }
5656 break;
5657 case 0x135: /* sysexit */
5658 if (CODE64(s))
5659 goto illegal_op;
5660 if (!s->pe) {
5661 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5662 } else {
5663 if (s->cc_op != CC_OP_DYNAMIC) {
5664 gen_op_set_cc_op(s->cc_op);
5665 s->cc_op = CC_OP_DYNAMIC;
5666 }
5667 gen_jmp_im(pc_start - s->cs_base);
5668 gen_op_sysexit();
5669 gen_eob(s);
5670 }
5671 break;
5672#ifdef TARGET_X86_64
5673 case 0x105: /* syscall */
5674 /* XXX: is it usable in real mode ? */
5675 if (s->cc_op != CC_OP_DYNAMIC) {
5676 gen_op_set_cc_op(s->cc_op);
5677 s->cc_op = CC_OP_DYNAMIC;
5678 }
5679 gen_jmp_im(pc_start - s->cs_base);
5680 gen_op_syscall(s->pc - pc_start);
5681 gen_eob(s);
5682 break;
5683 case 0x107: /* sysret */
5684 if (!s->pe) {
5685 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5686 } else {
5687 if (s->cc_op != CC_OP_DYNAMIC) {
5688 gen_op_set_cc_op(s->cc_op);
5689 s->cc_op = CC_OP_DYNAMIC;
5690 }
5691 gen_jmp_im(pc_start - s->cs_base);
5692 gen_op_sysret(s->dflag);
5693 /* condition codes are modified only in long mode */
5694 if (s->lma)
5695 s->cc_op = CC_OP_EFLAGS;
5696 gen_eob(s);
5697 }
5698 break;
5699#endif
5700 case 0x1a2: /* cpuid */
5701 gen_op_cpuid();
5702 break;
5703 case 0xf4: /* hlt */
5704 if (s->cpl != 0) {
5705 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5706 } else {
5707 if (s->cc_op != CC_OP_DYNAMIC)
5708 gen_op_set_cc_op(s->cc_op);
5709 gen_jmp_im(s->pc - s->cs_base);
5710 gen_op_hlt();
5711 s->is_jmp = 3;
5712 }
5713 break;
5714 case 0x100:
5715 modrm = ldub_code(s->pc++);
5716 mod = (modrm >> 6) & 3;
5717 op = (modrm >> 3) & 7;
5718 switch(op) {
5719 case 0: /* sldt */
5720 if (!s->pe || s->vm86)
5721 goto illegal_op;
5722 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5723 ot = OT_WORD;
5724 if (mod == 3)
5725 ot += s->dflag;
5726 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5727 break;
5728 case 2: /* lldt */
5729 if (!s->pe || s->vm86)
5730 goto illegal_op;
5731 if (s->cpl != 0) {
5732 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5733 } else {
5734 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5735 gen_jmp_im(pc_start - s->cs_base);
5736 gen_op_lldt_T0();
5737 }
5738 break;
5739 case 1: /* str */
5740 if (!s->pe || s->vm86)
5741 goto illegal_op;
5742 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5743 ot = OT_WORD;
5744 if (mod == 3)
5745 ot += s->dflag;
5746 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5747 break;
5748 case 3: /* ltr */
5749 if (!s->pe || s->vm86)
5750 goto illegal_op;
5751 if (s->cpl != 0) {
5752 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5753 } else {
5754 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5755 gen_jmp_im(pc_start - s->cs_base);
5756 gen_op_ltr_T0();
5757 }
5758 break;
5759 case 4: /* verr */
5760 case 5: /* verw */
5761 if (!s->pe || s->vm86)
5762 goto illegal_op;
5763 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5764 if (s->cc_op != CC_OP_DYNAMIC)
5765 gen_op_set_cc_op(s->cc_op);
5766 if (op == 4)
5767 gen_op_verr();
5768 else
5769 gen_op_verw();
5770 s->cc_op = CC_OP_EFLAGS;
5771 break;
5772 default:
5773 goto illegal_op;
5774 }
5775 break;
5776 case 0x101:
5777 modrm = ldub_code(s->pc++);
5778 mod = (modrm >> 6) & 3;
5779 op = (modrm >> 3) & 7;
5780 rm = modrm & 7;
5781 switch(op) {
5782 case 0: /* sgdt */
5783 if (mod == 3)
5784 goto illegal_op;
5785 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5786 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5787 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5788 gen_add_A0_im(s, 2);
5789 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5790 if (!s->dflag)
5791 gen_op_andl_T0_im(0xffffff);
5792 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5793 break;
5794 case 1:
5795 if (mod == 3) {
5796 switch (rm) {
5797 case 0: /* monitor */
5798 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5799 s->cpl != 0)
5800 goto illegal_op;
5801 gen_jmp_im(pc_start - s->cs_base);
5802#ifdef TARGET_X86_64
5803 if (s->aflag == 2) {
5804 gen_op_movq_A0_reg[R_EBX]();
5805 gen_op_addq_A0_AL();
5806 } else
5807#endif
5808 {
5809 gen_op_movl_A0_reg[R_EBX]();
5810 gen_op_addl_A0_AL();
5811 if (s->aflag == 0)
5812 gen_op_andl_A0_ffff();
5813 }
5814 gen_add_A0_ds_seg(s);
5815 gen_op_monitor();
5816 break;
5817 case 1: /* mwait */
5818 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5819 s->cpl != 0)
5820 goto illegal_op;
5821 if (s->cc_op != CC_OP_DYNAMIC) {
5822 gen_op_set_cc_op(s->cc_op);
5823 s->cc_op = CC_OP_DYNAMIC;
5824 }
5825 gen_jmp_im(s->pc - s->cs_base);
5826 gen_op_mwait();
5827 gen_eob(s);
5828 break;
5829 default:
5830 goto illegal_op;
5831 }
5832 } else { /* sidt */
5833 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5834 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5835 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5836 gen_add_A0_im(s, 2);
5837 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
5838 if (!s->dflag)
5839 gen_op_andl_T0_im(0xffffff);
5840 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5841 }
5842 break;
5843 case 2: /* lgdt */
5844 case 3: /* lidt */
5845 if (mod == 3)
5846 goto illegal_op;
5847 if (s->cpl != 0) {
5848 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5849 } else {
5850 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5851 gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
5852 gen_add_A0_im(s, 2);
5853 gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5854 if (!s->dflag)
5855 gen_op_andl_T0_im(0xffffff);
5856 if (op == 2) {
5857 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5858 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5859 } else {
5860 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5861 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5862 }
5863 }
5864 break;
5865 case 4: /* smsw */
5866 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5867 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5868 break;
5869 case 6: /* lmsw */
5870 if (s->cpl != 0) {
5871 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5872 } else {
5873 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5874 gen_op_lmsw_T0();
5875 gen_jmp_im(s->pc - s->cs_base);
5876 gen_eob(s);
5877 }
5878 break;
5879 case 7: /* invlpg */
5880 if (s->cpl != 0) {
5881 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5882 } else {
5883 if (mod == 3) {
5884#ifdef TARGET_X86_64
5885 if (CODE64(s) && rm == 0) {
5886 /* swapgs */
5887 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5888 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5889 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5890 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5891 } else
5892#endif
5893 {
5894 goto illegal_op;
5895 }
5896 } else {
5897 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5898 gen_op_invlpg_A0();
5899 gen_jmp_im(s->pc - s->cs_base);
5900 gen_eob(s);
5901 }
5902 }
5903 break;
5904 default:
5905 goto illegal_op;
5906 }
5907 break;
5908 case 0x108: /* invd */
5909 case 0x109: /* wbinvd */
5910 if (s->cpl != 0) {
5911 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5912 } else {
5913 /* nothing to do */
5914 }
5915 break;
5916 case 0x63: /* arpl or movslS (x86_64) */
5917#ifdef TARGET_X86_64
5918 if (CODE64(s)) {
5919 int d_ot;
5920 /* d_ot is the size of destination */
5921 d_ot = dflag + OT_WORD;
5922
5923 modrm = ldub_code(s->pc++);
5924 reg = ((modrm >> 3) & 7) | rex_r;
5925 mod = (modrm >> 6) & 3;
5926 rm = (modrm & 7) | REX_B(s);
5927
5928 if (mod == 3) {
5929 gen_op_mov_TN_reg[OT_LONG][0][rm]();
5930 /* sign extend */
5931 if (d_ot == OT_QUAD)
5932 gen_op_movslq_T0_T0();
5933 gen_op_mov_reg_T0[d_ot][reg]();
5934 } else {
5935 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5936 if (d_ot == OT_QUAD) {
5937 gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
5938 } else {
5939 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5940 }
5941 gen_op_mov_reg_T0[d_ot][reg]();
5942 }
5943 } else
5944#endif
5945 {
5946 if (!s->pe || s->vm86)
5947 goto illegal_op;
5948 ot = dflag ? OT_LONG : OT_WORD;
5949 modrm = ldub_code(s->pc++);
5950 reg = (modrm >> 3) & 7;
5951 mod = (modrm >> 6) & 3;
5952 rm = modrm & 7;
5953#ifdef VBOX /* Fix for obvious bug - T1 needs to be loaded */
5954 gen_op_mov_TN_reg[ot][1][reg]();
5955#endif
5956 if (mod != 3) {
5957 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5958 gen_op_ld_T0_A0[ot + s->mem_index]();
5959 } else {
5960 gen_op_mov_TN_reg[ot][0][rm]();
5961 }
5962 if (s->cc_op != CC_OP_DYNAMIC)
5963 gen_op_set_cc_op(s->cc_op);
5964 gen_op_arpl();
5965 s->cc_op = CC_OP_EFLAGS;
5966 if (mod != 3) {
5967 gen_op_st_T0_A0[ot + s->mem_index]();
5968 } else {
5969 gen_op_mov_reg_T0[ot][rm]();
5970 }
5971 gen_op_arpl_update();
5972 }
5973 break;
5974 case 0x102: /* lar */
5975 case 0x103: /* lsl */
5976 if (!s->pe || s->vm86)
5977 goto illegal_op;
5978 ot = dflag ? OT_LONG : OT_WORD;
5979 modrm = ldub_code(s->pc++);
5980 reg = ((modrm >> 3) & 7) | rex_r;
5981 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5982 gen_op_mov_TN_reg[ot][1][reg]();
5983 if (s->cc_op != CC_OP_DYNAMIC)
5984 gen_op_set_cc_op(s->cc_op);
5985 if (b == 0x102)
5986 gen_op_lar();
5987 else
5988 gen_op_lsl();
5989 s->cc_op = CC_OP_EFLAGS;
5990 gen_op_mov_reg_T1[ot][reg]();
5991 break;
5992 case 0x118:
5993 modrm = ldub_code(s->pc++);
5994 mod = (modrm >> 6) & 3;
5995 op = (modrm >> 3) & 7;
5996 switch(op) {
5997 case 0: /* prefetchnta */
5998 case 1: /* prefetchnt0 */
5999 case 2: /* prefetchnt0 */
6000 case 3: /* prefetchnt0 */
6001 if (mod == 3)
6002 goto illegal_op;
6003 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6004 /* nothing more to do */
6005 break;
6006 default: /* nop (multi byte) */
6007 gen_nop_modrm(s, modrm);
6008 break;
6009 }
6010 break;
6011 case 0x119 ... 0x11f: /* nop (multi byte) */
6012 modrm = ldub_code(s->pc++);
6013 gen_nop_modrm(s, modrm);
6014 break;
6015 case 0x120: /* mov reg, crN */
6016 case 0x122: /* mov crN, reg */
6017 if (s->cpl != 0) {
6018 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6019 } else {
6020 modrm = ldub_code(s->pc++);
6021 if ((modrm & 0xc0) != 0xc0)
6022 goto illegal_op;
6023 rm = (modrm & 7) | REX_B(s);
6024 reg = ((modrm >> 3) & 7) | rex_r;
6025 if (CODE64(s))
6026 ot = OT_QUAD;
6027 else
6028 ot = OT_LONG;
6029 switch(reg) {
6030 case 0:
6031 case 2:
6032 case 3:
6033 case 4:
6034 case 8:
6035 if (b & 2) {
6036 gen_op_mov_TN_reg[ot][0][rm]();
6037 gen_op_movl_crN_T0(reg);
6038 gen_jmp_im(s->pc - s->cs_base);
6039 gen_eob(s);
6040 } else {
6041#if !defined(CONFIG_USER_ONLY)
6042 if (reg == 8)
6043 gen_op_movtl_T0_cr8();
6044 else
6045#endif
6046 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6047 gen_op_mov_reg_T0[ot][rm]();
6048 }
6049 break;
6050 default:
6051 goto illegal_op;
6052 }
6053 }
6054 break;
6055 case 0x121: /* mov reg, drN */
6056 case 0x123: /* mov drN, reg */
6057 if (s->cpl != 0) {
6058 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6059 } else {
6060 modrm = ldub_code(s->pc++);
6061 if ((modrm & 0xc0) != 0xc0)
6062 goto illegal_op;
6063 rm = (modrm & 7) | REX_B(s);
6064 reg = ((modrm >> 3) & 7) | rex_r;
6065 if (CODE64(s))
6066 ot = OT_QUAD;
6067 else
6068 ot = OT_LONG;
6069 /* XXX: do it dynamically with CR4.DE bit */
6070 if (reg == 4 || reg == 5 || reg >= 8)
6071 goto illegal_op;
6072 if (b & 2) {
6073 gen_op_mov_TN_reg[ot][0][rm]();
6074 gen_op_movl_drN_T0(reg);
6075 gen_jmp_im(s->pc - s->cs_base);
6076 gen_eob(s);
6077 } else {
6078 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6079 gen_op_mov_reg_T0[ot][rm]();
6080 }
6081 }
6082 break;
6083 case 0x106: /* clts */
6084 if (s->cpl != 0) {
6085 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6086 } else {
6087 gen_op_clts();
6088 /* abort block because static cpu state changed */
6089 gen_jmp_im(s->pc - s->cs_base);
6090 gen_eob(s);
6091 }
6092 break;
6093 /* MMX/SSE/SSE2/PNI support */
6094 case 0x1c3: /* MOVNTI reg, mem */
6095 if (!(s->cpuid_features & CPUID_SSE2))
6096 goto illegal_op;
6097 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6098 modrm = ldub_code(s->pc++);
6099 mod = (modrm >> 6) & 3;
6100 if (mod == 3)
6101 goto illegal_op;
6102 reg = ((modrm >> 3) & 7) | rex_r;
6103 /* generate a generic store */
6104 gen_ldst_modrm(s, modrm, ot, reg, 1);
6105 break;
6106 case 0x1ae:
6107 modrm = ldub_code(s->pc++);
6108 mod = (modrm >> 6) & 3;
6109 op = (modrm >> 3) & 7;
6110 switch(op) {
6111 case 0: /* fxsave */
6112 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6113 (s->flags & HF_EM_MASK))
6114 goto illegal_op;
6115 if (s->flags & HF_TS_MASK) {
6116 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6117 break;
6118 }
6119 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6120 gen_op_fxsave_A0((s->dflag == 2));
6121 break;
6122 case 1: /* fxrstor */
6123 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6124 (s->flags & HF_EM_MASK))
6125 goto illegal_op;
6126 if (s->flags & HF_TS_MASK) {
6127 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6128 break;
6129 }
6130 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6131 gen_op_fxrstor_A0((s->dflag == 2));
6132 break;
6133 case 2: /* ldmxcsr */
6134 case 3: /* stmxcsr */
6135 if (s->flags & HF_TS_MASK) {
6136 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6137 break;
6138 }
6139 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6140 mod == 3)
6141 goto illegal_op;
6142 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6143 if (op == 2) {
6144 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
6145 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6146 } else {
6147 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6148 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
6149 }
6150 break;
6151 case 5: /* lfence */
6152 case 6: /* mfence */
6153 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6154 goto illegal_op;
6155 break;
6156 case 7: /* sfence / clflush */
6157 if ((modrm & 0xc7) == 0xc0) {
6158 /* sfence */
6159 if (!(s->cpuid_features & CPUID_SSE))
6160 goto illegal_op;
6161 } else {
6162 /* clflush */
6163 if (!(s->cpuid_features & CPUID_CLFLUSH))
6164 goto illegal_op;
6165 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6166 }
6167 break;
6168 default:
6169 goto illegal_op;
6170 }
6171 break;
6172 case 0x10d: /* prefetch */
6173 modrm = ldub_code(s->pc++);
6174 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6175 /* ignore for now */
6176 break;
6177 case 0x1aa: /* rsm */
6178 if (!(s->flags & HF_SMM_MASK))
6179 goto illegal_op;
6180 if (s->cc_op != CC_OP_DYNAMIC) {
6181 gen_op_set_cc_op(s->cc_op);
6182 s->cc_op = CC_OP_DYNAMIC;
6183 }
6184 gen_jmp_im(s->pc - s->cs_base);
6185 gen_op_rsm();
6186 gen_eob(s);
6187 break;
6188 case 0x110 ... 0x117:
6189 case 0x128 ... 0x12f:
6190 case 0x150 ... 0x177:
6191 case 0x17c ... 0x17f:
6192 case 0x1c2:
6193 case 0x1c4 ... 0x1c6:
6194 case 0x1d0 ... 0x1fe:
6195 gen_sse(s, b, pc_start, rex_r);
6196 break;
6197 default:
6198 goto illegal_op;
6199 }
6200 /* lock generation */
6201 if (s->prefix & PREFIX_LOCK)
6202 gen_op_unlock();
6203 return s->pc;
6204 illegal_op:
6205 if (s->prefix & PREFIX_LOCK)
6206 gen_op_unlock();
6207 /* XXX: ensure that no lock was generated */
6208 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6209 return s->pc;
6210}
6211
6212#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6213#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6214
6215/* flags read by an operation */
6216static uint16_t opc_read_flags[NB_OPS] = {
6217 [INDEX_op_aas] = CC_A,
6218 [INDEX_op_aaa] = CC_A,
6219 [INDEX_op_das] = CC_A | CC_C,
6220 [INDEX_op_daa] = CC_A | CC_C,
6221
6222 /* subtle: due to the incl/decl implementation, C is used */
6223 [INDEX_op_update_inc_cc] = CC_C,
6224
6225 [INDEX_op_into] = CC_O,
6226
6227 [INDEX_op_jb_subb] = CC_C,
6228 [INDEX_op_jb_subw] = CC_C,
6229 [INDEX_op_jb_subl] = CC_C,
6230
6231 [INDEX_op_jz_subb] = CC_Z,
6232 [INDEX_op_jz_subw] = CC_Z,
6233 [INDEX_op_jz_subl] = CC_Z,
6234
6235 [INDEX_op_jbe_subb] = CC_Z | CC_C,
6236 [INDEX_op_jbe_subw] = CC_Z | CC_C,
6237 [INDEX_op_jbe_subl] = CC_Z | CC_C,
6238
6239 [INDEX_op_js_subb] = CC_S,
6240 [INDEX_op_js_subw] = CC_S,
6241 [INDEX_op_js_subl] = CC_S,
6242
6243 [INDEX_op_jl_subb] = CC_O | CC_S,
6244 [INDEX_op_jl_subw] = CC_O | CC_S,
6245 [INDEX_op_jl_subl] = CC_O | CC_S,
6246
6247 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6248 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6249 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6250
6251 [INDEX_op_loopnzw] = CC_Z,
6252 [INDEX_op_loopnzl] = CC_Z,
6253 [INDEX_op_loopzw] = CC_Z,
6254 [INDEX_op_loopzl] = CC_Z,
6255
6256 [INDEX_op_seto_T0_cc] = CC_O,
6257 [INDEX_op_setb_T0_cc] = CC_C,
6258 [INDEX_op_setz_T0_cc] = CC_Z,
6259 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6260 [INDEX_op_sets_T0_cc] = CC_S,
6261 [INDEX_op_setp_T0_cc] = CC_P,
6262 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6263 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6264
6265 [INDEX_op_setb_T0_subb] = CC_C,
6266 [INDEX_op_setb_T0_subw] = CC_C,
6267 [INDEX_op_setb_T0_subl] = CC_C,
6268
6269 [INDEX_op_setz_T0_subb] = CC_Z,
6270 [INDEX_op_setz_T0_subw] = CC_Z,
6271 [INDEX_op_setz_T0_subl] = CC_Z,
6272
6273 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6274 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6275 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6276
6277 [INDEX_op_sets_T0_subb] = CC_S,
6278 [INDEX_op_sets_T0_subw] = CC_S,
6279 [INDEX_op_sets_T0_subl] = CC_S,
6280
6281 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6282 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6283 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6284
6285 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6286 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6287 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6288
6289 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6290 [INDEX_op_cmc] = CC_C,
6291 [INDEX_op_salc] = CC_C,
6292
6293 /* needed for correct flag optimisation before string ops */
6294 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6295 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6296 [INDEX_op_jz_ecxw] = CC_OSZAPC,
6297 [INDEX_op_jz_ecxl] = CC_OSZAPC,
6298
6299#ifdef TARGET_X86_64
6300 [INDEX_op_jb_subq] = CC_C,
6301 [INDEX_op_jz_subq] = CC_Z,
6302 [INDEX_op_jbe_subq] = CC_Z | CC_C,
6303 [INDEX_op_js_subq] = CC_S,
6304 [INDEX_op_jl_subq] = CC_O | CC_S,
6305 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6306
6307 [INDEX_op_loopnzq] = CC_Z,
6308 [INDEX_op_loopzq] = CC_Z,
6309
6310 [INDEX_op_setb_T0_subq] = CC_C,
6311 [INDEX_op_setz_T0_subq] = CC_Z,
6312 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6313 [INDEX_op_sets_T0_subq] = CC_S,
6314 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6315 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6316
6317 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6318 [INDEX_op_jz_ecxq] = CC_OSZAPC,
6319#endif
6320
6321#define DEF_READF(SUFFIX)\
6322 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6323 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6324 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6325 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6326 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6327 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6328 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6329 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6330\
6331 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6332 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6333 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6334 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6335 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6336 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6337 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6338 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6339
6340 DEF_READF( )
6341 DEF_READF(_raw)
6342#ifndef CONFIG_USER_ONLY
6343 DEF_READF(_kernel)
6344 DEF_READF(_user)
6345#endif
6346};
6347
6348/* flags written by an operation */
6349static uint16_t opc_write_flags[NB_OPS] = {
6350 [INDEX_op_update2_cc] = CC_OSZAPC,
6351 [INDEX_op_update1_cc] = CC_OSZAPC,
6352 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6353 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6354 /* subtle: due to the incl/decl implementation, C is used */
6355 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6356 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6357
6358 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6359 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6360 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6361 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6362 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6363 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6364 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6365 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6366 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6367 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6368 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6369
6370 /* sse */
6371 [INDEX_op_ucomiss] = CC_OSZAPC,
6372 [INDEX_op_ucomisd] = CC_OSZAPC,
6373 [INDEX_op_comiss] = CC_OSZAPC,
6374 [INDEX_op_comisd] = CC_OSZAPC,
6375
6376 /* bcd */
6377 [INDEX_op_aam] = CC_OSZAPC,
6378 [INDEX_op_aad] = CC_OSZAPC,
6379 [INDEX_op_aas] = CC_OSZAPC,
6380 [INDEX_op_aaa] = CC_OSZAPC,
6381 [INDEX_op_das] = CC_OSZAPC,
6382 [INDEX_op_daa] = CC_OSZAPC,
6383
6384 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6385 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6386 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6387 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6388 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6389 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6390 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6391 [INDEX_op_clc] = CC_C,
6392 [INDEX_op_stc] = CC_C,
6393 [INDEX_op_cmc] = CC_C,
6394
6395 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6396 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6397 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6398 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6399 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6400 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6401 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6402 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6403 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6404 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6405 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6406 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6407
6408 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6409 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6410 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6411 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6412 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6413 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6414
6415 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6416 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6417 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6418 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6419
6420 [INDEX_op_cmpxchg8b] = CC_Z,
6421 [INDEX_op_lar] = CC_Z,
6422 [INDEX_op_lsl] = CC_Z,
6423 [INDEX_op_verr] = CC_Z,
6424 [INDEX_op_verw] = CC_Z,
6425 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6426 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6427
6428#define DEF_WRITEF(SUFFIX)\
6429 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6430 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6431 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6432 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6433 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6434 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6435 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6436 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6437\
6438 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6439 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6440 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6441 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6442 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6443 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6444 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6445 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6446\
6447 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6448 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6449 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6450 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6451 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6452 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6453 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6454 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6455\
6456 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6457 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6458 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6459 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6460\
6461 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6462 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6463 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6464 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6465\
6466 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6467 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6468 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6469 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6470\
6471 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6472 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6473 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6474 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6475 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6476 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6477\
6478 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6479 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6480 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6481 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6482 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6483 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6484\
6485 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6486 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6487 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6488 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6489
6490
6491 DEF_WRITEF( )
6492 DEF_WRITEF(_raw)
6493#ifndef CONFIG_USER_ONLY
6494 DEF_WRITEF(_kernel)
6495 DEF_WRITEF(_user)
6496#endif
6497};
6498
6499/* simpler form of an operation if no flags need to be generated */
6500static uint16_t opc_simpler[NB_OPS] = {
6501 [INDEX_op_update2_cc] = INDEX_op_nop,
6502 [INDEX_op_update1_cc] = INDEX_op_nop,
6503 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6504#if 0
6505 /* broken: CC_OP logic must be rewritten */
6506 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6507#endif
6508
6509 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6510 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6511 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6512 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6513
6514 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6515 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6516 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6517 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6518
6519 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6520 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6521 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6522 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6523
6524#define DEF_SIMPLER(SUFFIX)\
6525 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6526 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6527 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6528 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6529\
6530 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6531 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6532 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6533 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6534
6535 DEF_SIMPLER( )
6536 DEF_SIMPLER(_raw)
6537#ifndef CONFIG_USER_ONLY
6538 DEF_SIMPLER(_kernel)
6539 DEF_SIMPLER(_user)
6540#endif
6541};
6542
6543void optimize_flags_init(void)
6544{
6545 int i;
6546 /* put default values in arrays */
6547 for(i = 0; i < NB_OPS; i++) {
6548 if (opc_simpler[i] == 0)
6549 opc_simpler[i] = i;
6550 }
6551}
6552
6553/* CPU flags computation optimization: we move backward thru the
6554 generated code to see which flags are needed. The operation is
6555 modified if suitable */
6556static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6557{
6558 uint16_t *opc_ptr;
6559 int live_flags, write_flags, op;
6560
6561 opc_ptr = opc_buf + opc_buf_len;
6562 /* live_flags contains the flags needed by the next instructions
6563 in the code. At the end of the bloc, we consider that all the
6564 flags are live. */
6565 live_flags = CC_OSZAPC;
6566 while (opc_ptr > opc_buf) {
6567 op = *--opc_ptr;
6568 /* if none of the flags written by the instruction is used,
6569 then we can try to find a simpler instruction */
6570 write_flags = opc_write_flags[op];
6571 if ((live_flags & write_flags) == 0) {
6572 *opc_ptr = opc_simpler[op];
6573 }
6574 /* compute the live flags before the instruction */
6575 live_flags &= ~write_flags;
6576 live_flags |= opc_read_flags[op];
6577 }
6578}
6579
6580/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6581 basic block 'tb'. If search_pc is TRUE, also generate PC
6582 information for each intermediate instruction. */
6583static inline int gen_intermediate_code_internal(CPUState *env,
6584 TranslationBlock *tb,
6585 int search_pc)
6586{
6587 DisasContext dc1, *dc = &dc1;
6588 target_ulong pc_ptr;
6589 uint16_t *gen_opc_end;
6590 int flags, j, lj, cflags;
6591 target_ulong pc_start;
6592 target_ulong cs_base;
6593
6594 /* generate intermediate code */
6595 pc_start = tb->pc;
6596 cs_base = tb->cs_base;
6597 flags = tb->flags;
6598 cflags = tb->cflags;
6599
6600 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6601 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6602 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6603 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6604 dc->f_st = 0;
6605 dc->vm86 = (flags >> VM_SHIFT) & 1;
6606#ifdef VBOX_WITH_CALL_RECORD
6607 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
6608 if ( !(env->state & CPU_RAW_RING0)
6609 && (env->cr[0] & CR0_PG_MASK)
6610 && !(env->eflags & X86_EFL_IF)
6611 && dc->code32)
6612 dc->record_call = 1;
6613 else
6614 dc->record_call = 0;
6615#endif
6616 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6617 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6618 dc->tf = (flags >> TF_SHIFT) & 1;
6619 dc->singlestep_enabled = env->singlestep_enabled;
6620 dc->cc_op = CC_OP_DYNAMIC;
6621 dc->cs_base = cs_base;
6622 dc->tb = tb;
6623 dc->popl_esp_hack = 0;
6624 /* select memory access functions */
6625 dc->mem_index = 0;
6626 if (flags & HF_SOFTMMU_MASK) {
6627 if (dc->cpl == 3)
6628 dc->mem_index = 2 * 4;
6629 else
6630 dc->mem_index = 1 * 4;
6631 }
6632 dc->cpuid_features = env->cpuid_features;
6633 dc->cpuid_ext_features = env->cpuid_ext_features;
6634#ifdef TARGET_X86_64
6635 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6636 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6637#endif
6638 dc->flags = flags;
6639 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6640 (flags & HF_INHIBIT_IRQ_MASK)
6641#ifndef CONFIG_SOFTMMU
6642 || (flags & HF_SOFTMMU_MASK)
6643#endif
6644 );
6645#if 0
6646 /* check addseg logic */
6647 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6648 printf("ERROR addseg\n");
6649#endif
6650
6651 gen_opc_ptr = gen_opc_buf;
6652 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6653 gen_opparam_ptr = gen_opparam_buf;
6654 nb_gen_labels = 0;
6655
6656 dc->is_jmp = DISAS_NEXT;
6657 pc_ptr = pc_start;
6658 lj = -1;
6659
6660 for(;;) {
6661 if (env->nb_breakpoints > 0) {
6662 for(j = 0; j < env->nb_breakpoints; j++) {
6663 if (env->breakpoints[j] == pc_ptr) {
6664 gen_debug(dc, pc_ptr - dc->cs_base);
6665 break;
6666 }
6667 }
6668 }
6669 if (search_pc) {
6670 j = gen_opc_ptr - gen_opc_buf;
6671 if (lj < j) {
6672 lj++;
6673 while (lj < j)
6674 gen_opc_instr_start[lj++] = 0;
6675 }
6676 gen_opc_pc[lj] = pc_ptr;
6677 gen_opc_cc_op[lj] = dc->cc_op;
6678 gen_opc_instr_start[lj] = 1;
6679 }
6680 pc_ptr = disas_insn(dc, pc_ptr);
6681 /* stop translation if indicated */
6682 if (dc->is_jmp)
6683 break;
6684
6685#ifdef VBOX
6686#ifdef DEBUG
6687/*
6688 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
6689 {
6690 //should never happen as the jump to the patch code terminates the translation block
6691 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
6692 }
6693*/
6694#endif
6695 if (env->state & CPU_EMULATE_SINGLE_INSTR)
6696 {
6697 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
6698 gen_jmp_im(pc_ptr - dc->cs_base);
6699 gen_eob(dc);
6700 break;
6701 }
6702#endif /* VBOX */
6703
6704 /* if single step mode, we generate only one instruction and
6705 generate an exception */
6706 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6707 the flag and abort the translation to give the irqs a
6708 change to be happen */
6709 if (dc->tf || dc->singlestep_enabled ||
6710 (flags & HF_INHIBIT_IRQ_MASK) ||
6711 (cflags & CF_SINGLE_INSN)) {
6712 gen_jmp_im(pc_ptr - dc->cs_base);
6713 gen_eob(dc);
6714 break;
6715 }
6716 /* if too long translation, stop generation too */
6717 if (gen_opc_ptr >= gen_opc_end ||
6718 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6719 gen_jmp_im(pc_ptr - dc->cs_base);
6720 gen_eob(dc);
6721 break;
6722 }
6723 }
6724 *gen_opc_ptr = INDEX_op_end;
6725 /* we don't forget to fill the last values */
6726 if (search_pc) {
6727 j = gen_opc_ptr - gen_opc_buf;
6728 lj++;
6729 while (lj <= j)
6730 gen_opc_instr_start[lj++] = 0;
6731 }
6732
6733#ifdef DEBUG_DISAS
6734 if (loglevel & CPU_LOG_TB_CPU) {
6735 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6736 }
6737 if (loglevel & CPU_LOG_TB_IN_ASM) {
6738 int disas_flags;
6739 fprintf(logfile, "----------------\n");
6740 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6741#ifdef TARGET_X86_64
6742 if (dc->code64)
6743 disas_flags = 2;
6744 else
6745#endif
6746 disas_flags = !dc->code32;
6747 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6748 fprintf(logfile, "\n");
6749 if (loglevel & CPU_LOG_TB_OP) {
6750 fprintf(logfile, "OP:\n");
6751 dump_ops(gen_opc_buf, gen_opparam_buf);
6752 fprintf(logfile, "\n");
6753 }
6754 }
6755#endif
6756
6757 /* optimize flag computations */
6758 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6759
6760#ifdef DEBUG_DISAS
6761 if (loglevel & CPU_LOG_TB_OP_OPT) {
6762 fprintf(logfile, "AFTER FLAGS OPT:\n");
6763 dump_ops(gen_opc_buf, gen_opparam_buf);
6764 fprintf(logfile, "\n");
6765 }
6766#endif
6767 if (!search_pc)
6768 tb->size = pc_ptr - pc_start;
6769 return 0;
6770}
6771
6772int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6773{
6774 return gen_intermediate_code_internal(env, tb, 0);
6775}
6776
6777int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6778{
6779 return gen_intermediate_code_internal(env, tb, 1);
6780}
6781
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette