2 /*---------------------------------------------------------------*/
3 /*--- begin host_amd64_defs.h ---*/
4 /*---------------------------------------------------------------*/
7 This file is part of Valgrind, a dynamic binary instrumentation
10 Copyright (C) 2004-2010 OpenWorks LLP
13 This program is free software; you can redistribute it and/or
14 modify it under the terms of the GNU General Public License as
15 published by the Free Software Foundation; either version 2 of the
16 License, or (at your option) any later version.
18 This program is distributed in the hope that it will be useful, but
19 WITHOUT ANY WARRANTY; without even the implied warranty of
20 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
21 General Public License for more details.
23 You should have received a copy of the GNU General Public License
24 along with this program; if not, write to the Free Software
25 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
28 The GNU General Public License is contained in the file COPYING.
30 Neither the names of the U.S. Department of Energy nor the
31 University of California nor the names of its contributors may be
32 used to endorse or promote products derived from this software
33 without prior written permission.
36 #ifndef __VEX_HOST_AMD64_DEFS_H
37 #define __VEX_HOST_AMD64_DEFS_H
40 /* --------- Registers. --------- */
42 /* The usual HReg abstraction. There are 16 real int regs, 6 real
43 float regs, and 16 real vector regs.
46 extern void ppHRegAMD64 ( HReg );
48 extern HReg hregAMD64_RAX ( void );
49 extern HReg hregAMD64_RBX ( void );
50 extern HReg hregAMD64_RCX ( void );
51 extern HReg hregAMD64_RDX ( void );
52 extern HReg hregAMD64_RSP ( void );
53 extern HReg hregAMD64_RBP ( void );
54 extern HReg hregAMD64_RSI ( void );
55 extern HReg hregAMD64_RDI ( void );
56 extern HReg hregAMD64_R8 ( void );
57 extern HReg hregAMD64_R9 ( void );
58 extern HReg hregAMD64_R10 ( void );
59 extern HReg hregAMD64_R11 ( void );
60 extern HReg hregAMD64_R12 ( void );
61 extern HReg hregAMD64_R13 ( void );
62 extern HReg hregAMD64_R14 ( void );
63 extern HReg hregAMD64_R15 ( void );
65 extern HReg hregAMD64_FAKE0 ( void );
66 extern HReg hregAMD64_FAKE1 ( void );
67 extern HReg hregAMD64_FAKE2 ( void );
68 extern HReg hregAMD64_FAKE3 ( void );
69 extern HReg hregAMD64_FAKE4 ( void );
70 extern HReg hregAMD64_FAKE5 ( void );
72 extern HReg hregAMD64_XMM0 ( void );
73 extern HReg hregAMD64_XMM1 ( void );
74 extern HReg hregAMD64_XMM2 ( void );
75 extern HReg hregAMD64_XMM3 ( void );
76 extern HReg hregAMD64_XMM4 ( void );
77 extern HReg hregAMD64_XMM5 ( void );
78 extern HReg hregAMD64_XMM6 ( void );
79 extern HReg hregAMD64_XMM7 ( void );
80 extern HReg hregAMD64_XMM8 ( void );
81 extern HReg hregAMD64_XMM9 ( void );
82 extern HReg hregAMD64_XMM10 ( void );
83 extern HReg hregAMD64_XMM11 ( void );
84 extern HReg hregAMD64_XMM12 ( void );
85 extern HReg hregAMD64_XMM13 ( void );
86 extern HReg hregAMD64_XMM14 ( void );
87 extern HReg hregAMD64_XMM15 ( void );
90 /* --------- Condition codes, AMD encoding. --------- */
94 Acc_O = 0, /* overflow */
95 Acc_NO = 1, /* no overflow */
97 Acc_B = 2, /* below */
98 Acc_NB = 3, /* not below */
100 Acc_Z = 4, /* zero */
101 Acc_NZ = 5, /* not zero */
103 Acc_BE = 6, /* below or equal */
104 Acc_NBE = 7, /* not below or equal */
106 Acc_S = 8, /* negative */
107 Acc_NS = 9, /* not negative */
109 Acc_P = 10, /* parity even */
110 Acc_NP = 11, /* not parity even */
112 Acc_L = 12, /* jump less */
113 Acc_NL = 13, /* not less */
115 Acc_LE = 14, /* less or equal */
116 Acc_NLE = 15, /* not less or equal */
118 Acc_ALWAYS = 16 /* the usual hack */
122 extern HChar* showAMD64CondCode ( AMD64CondCode );
125 /* --------- Memory address expressions (amodes). --------- */
129 Aam_IR, /* Immediate + Reg */
130 Aam_IRRS /* Immediate + Reg1 + (Reg2 << Shift) */
146 Int shift; /* 0, 1, 2 or 3 only */
152 extern AMD64AMode* AMD64AMode_IR ( UInt, HReg );
153 extern AMD64AMode* AMD64AMode_IRRS ( UInt, HReg, HReg, Int );
155 extern AMD64AMode* dopyAMD64AMode ( AMD64AMode* );
157 extern void ppAMD64AMode ( AMD64AMode* );
160 /* --------- Operand, which can be reg, immediate or memory. --------- */
188 extern AMD64RMI* AMD64RMI_Imm ( UInt );
189 extern AMD64RMI* AMD64RMI_Reg ( HReg );
190 extern AMD64RMI* AMD64RMI_Mem ( AMD64AMode* );
192 extern void ppAMD64RMI ( AMD64RMI* );
195 /* --------- Operand, which can be reg or immediate only. --------- */
219 extern AMD64RI* AMD64RI_Imm ( UInt );
220 extern AMD64RI* AMD64RI_Reg ( HReg );
222 extern void ppAMD64RI ( AMD64RI* );
225 /* --------- Operand, which can be reg or memory only. --------- */
249 extern AMD64RM* AMD64RM_Reg ( HReg );
250 extern AMD64RM* AMD64RM_Mem ( AMD64AMode* );
252 extern void ppAMD64RM ( AMD64RM* );
255 /* --------- Instructions. --------- */
265 extern HChar* showAMD64UnaryOp ( AMD64UnaryOp );
274 Aalu_ADD, Aalu_SUB, Aalu_ADC, Aalu_SBB,
275 Aalu_AND, Aalu_OR, Aalu_XOR,
280 extern HChar* showAMD64AluOp ( AMD64AluOp );
287 Ash_SHL, Ash_SHR, Ash_SAR
291 extern HChar* showAMD64ShiftOp ( AMD64ShiftOp );
299 Afp_SCALE, Afp_ATAN, Afp_YL2X, Afp_YL2XP1, Afp_PREM, Afp_PREM1,
302 Afp_SIN, Afp_COS, Afp_TAN,
307 extern HChar* showA87FpOp ( A87FpOp );
316 /* Floating point binary */
317 Asse_ADDF, Asse_SUBF, Asse_MULF, Asse_DIVF,
318 Asse_MAXF, Asse_MINF,
319 Asse_CMPEQF, Asse_CMPLTF, Asse_CMPLEF, Asse_CMPUNF,
320 /* Floating point unary */
321 Asse_RCPF, Asse_RSQRTF, Asse_SQRTF,
323 Asse_AND, Asse_OR, Asse_XOR, Asse_ANDN,
324 Asse_ADD8, Asse_ADD16, Asse_ADD32, Asse_ADD64,
325 Asse_QADD8U, Asse_QADD16U,
326 Asse_QADD8S, Asse_QADD16S,
327 Asse_SUB8, Asse_SUB16, Asse_SUB32, Asse_SUB64,
328 Asse_QSUB8U, Asse_QSUB16U,
329 Asse_QSUB8S, Asse_QSUB16S,
333 Asse_AVG8U, Asse_AVG16U,
338 Asse_CMPEQ8, Asse_CMPEQ16, Asse_CMPEQ32,
339 Asse_CMPGT8S, Asse_CMPGT16S, Asse_CMPGT32S,
340 Asse_SHL16, Asse_SHL32, Asse_SHL64,
341 Asse_SHR16, Asse_SHR32, Asse_SHR64,
342 Asse_SAR16, Asse_SAR32,
343 Asse_PACKSSD, Asse_PACKSSW, Asse_PACKUSW,
344 Asse_UNPCKHB, Asse_UNPCKHW, Asse_UNPCKHD, Asse_UNPCKHQ,
345 Asse_UNPCKLB, Asse_UNPCKLW, Asse_UNPCKLD, Asse_UNPCKLQ
349 extern HChar* showAMD64SseOp ( AMD64SseOp );
355 Ain_Imm64, /* Generate 64-bit literal to register */
356 Ain_Alu64R, /* 64-bit mov/arith/logical, dst=REG */
357 Ain_Alu64M, /* 64-bit mov/arith/logical, dst=MEM */
358 Ain_Sh64, /* 64-bit shift/rotate, dst=REG or MEM */
359 Ain_Test64, /* 64-bit test (AND, set flags, discard result) */
360 Ain_Unary64, /* 64-bit not and neg */
361 Ain_Lea64, /* 64-bit compute EA into a reg */
362 Ain_MulL, /* widening multiply */
363 Ain_Div, /* div and mod */
364 //.. Xin_Sh3232, /* shldl or shrdl */
365 Ain_Push, /* push 64-bit value on stack */
366 Ain_Call, /* call to address in register */
367 Ain_Goto, /* conditional/unconditional jmp to dst */
368 Ain_CMov64, /* conditional move */
369 Ain_MovZLQ, /* reg-reg move, zeroing out top half */
370 Ain_LoadEX, /* mov{s,z}{b,w,l}q from mem to reg */
371 Ain_Store, /* store 32/16/8 bit value in memory */
372 Ain_Set64, /* convert condition code to 64-bit value */
373 Ain_Bsfr64, /* 64-bit bsf/bsr */
374 Ain_MFence, /* mem fence */
375 Ain_ACAS, /* 8/16/32/64-bit lock;cmpxchg */
376 Ain_DACAS, /* lock;cmpxchg8b/16b (doubleword ACAS, 2 x
377 32-bit or 2 x 64-bit only) */
379 Ain_A87Free, /* free up x87 registers */
380 Ain_A87PushPop, /* x87 loads/stores */
381 Ain_A87FpOp, /* x87 operations */
382 Ain_A87LdCW, /* load x87 control word */
383 Ain_A87StSW, /* store x87 status word */
385 //.. Xin_FpUnary, /* FP fake unary op */
386 //.. Xin_FpBinary, /* FP fake binary op */
387 //.. Xin_FpLdSt, /* FP fake load/store */
388 //.. Xin_FpLdStI, /* FP fake load/store, converting to/from Int */
389 //.. Xin_Fp64to32, /* FP round IEEE754 double to IEEE754 single */
390 //.. Xin_FpCMov, /* FP fake floating point conditional move */
391 Ain_LdMXCSR, /* load %mxcsr */
392 //.. Xin_FpStSW_AX, /* fstsw %ax */
393 Ain_SseUComIS, /* ucomisd/ucomiss, then get %rflags into int
395 Ain_SseSI2SF, /* scalar 32/64 int to 32/64 float conversion */
396 Ain_SseSF2SI, /* scalar 32/64 float to 32/64 int conversion */
397 Ain_SseSDSS, /* scalar float32 to/from float64 */
399 //.. Xin_SseConst, /* Generate restricted SSE literal */
400 Ain_SseLdSt, /* SSE load/store 32/64/128 bits, no alignment
401 constraints, upper 96/64/0 bits arbitrary */
402 Ain_SseLdzLO, /* SSE load low 32/64 bits, zero remainder of reg */
403 Ain_Sse32Fx4, /* SSE binary, 32Fx4 */
404 Ain_Sse32FLo, /* SSE binary, 32F in lowest lane only */
405 Ain_Sse64Fx2, /* SSE binary, 64Fx2 */
406 Ain_Sse64FLo, /* SSE binary, 64F in lowest lane only */
407 Ain_SseReRg, /* SSE binary general reg-reg, Re, Rg */
408 Ain_SseCMov, /* SSE conditional move */
409 Ain_SseShuf /* SSE2 shuffle (pshufd) */
413 /* Destinations are on the RIGHT (second operand) */
435 UInt src; /* shift amount, or 0 means %cl */
447 /* 64-bit compute EA into a reg */
452 /* 64 x 64 -> 128 bit widening multiply: RDX:RAX = RAX *s/u
458 /* amd64 div/idiv instruction. Modifies RDX and RAX and
462 Int sz; /* 4 or 8 only */
465 //.. /* shld/shrd. op may only be Xsh_SHL or Xsh_SHR */
468 //.. UInt amt; /* shift amount, or 0 means %cl */
475 /* Pseudo-insn. Call target (an absolute address), on given
476 condition (which could be Xcc_ALWAYS). */
480 Int regparms; /* 0 .. 6 */
482 /* Pseudo-insn. Goto dst, on given condition (which could be
489 /* Mov src to dst on the given condition, which may not
490 be the bogus Acc_ALWAYS. */
496 /* reg-reg move, zeroing out top half */
501 /* Sign/Zero extending loads. Dst size is always 64 bits. */
503 UChar szSmall; /* only 1, 2 or 4 */
508 /* 32/16/8 bit stores. */
510 UChar sz; /* only 1, 2 or 4 */
514 /* Convert an amd64 condition code to a 64-bit value (0 or 1). */
519 /* 64-bit bsf or bsr. */
525 /* Mem fence. In short, an insn which flushes all preceding
526 loads and stores as much as possible before continuing.
527 On AMD64 we emit a real "mfence". */
532 UChar sz; /* 1, 2, 4 or 8 */
536 UChar sz; /* 4 or 8 only */
541 /* A very minimal set of x87 insns, that operate exactly in a
542 stack-like way so no need to think about x87 registers. */
544 /* Do 'ffree' on %st(7) .. %st(7-nregs) */
546 Int nregs; /* 1 <= nregs <= 7 */
549 /* Push a 64-bit FP value from memory onto the stack, or move
550 a value from the stack to memory and remove it from the
557 /* Do an operation on the top-of-stack. This can be unary, in
558 which case it is %st0 = OP( %st0 ), or binary: %st0 = OP(
564 /* Load the FPU control word. */
569 /* Store the FPU status word (fstsw m16) */
576 /* Load 32 bits into %mxcsr. */
586 /* ucomisd/ucomiss, then get %rflags into int register */
588 UChar sz; /* 4 or 8 only */
593 /* scalar 32/64 int to 32/64 float conversion */
595 UChar szS; /* 4 or 8 */
596 UChar szD; /* 4 or 8 */
597 HReg src; /* i class */
598 HReg dst; /* v class */
600 /* scalar 32/64 float to 32/64 int conversion */
602 UChar szS; /* 4 or 8 */
603 UChar szD; /* 4 or 8 */
604 HReg src; /* v class */
605 HReg dst; /* i class */
607 /* scalar float32 to/from float64 */
609 Bool from64; /* True: 64->32; False: 32->64 */
614 //.. /* Simplistic SSE[123] */
621 UChar sz; /* 4, 8 or 16 only */
626 Int sz; /* 4 or 8 only */
655 /* Mov src to dst on the given condition, which may not
656 be the bogus Xcc_ALWAYS. */
663 Int order; /* 0 <= order <= 0xFF */
672 extern AMD64Instr* AMD64Instr_Imm64 ( ULong imm64, HReg dst );
673 extern AMD64Instr* AMD64Instr_Alu64R ( AMD64AluOp, AMD64RMI*, HReg );
674 extern AMD64Instr* AMD64Instr_Alu64M ( AMD64AluOp, AMD64RI*, AMD64AMode* );
675 extern AMD64Instr* AMD64Instr_Unary64 ( AMD64UnaryOp op, HReg dst );
676 extern AMD64Instr* AMD64Instr_Lea64 ( AMD64AMode* am, HReg dst );
677 extern AMD64Instr* AMD64Instr_Sh64 ( AMD64ShiftOp, UInt, HReg );
678 extern AMD64Instr* AMD64Instr_Test64 ( UInt imm32, HReg dst );
679 extern AMD64Instr* AMD64Instr_MulL ( Bool syned, AMD64RM* );
680 extern AMD64Instr* AMD64Instr_Div ( Bool syned, Int sz, AMD64RM* );
681 //.. extern AMD64Instr* AMD64Instr_Sh3232 ( AMD64ShiftOp, UInt amt, HReg src, HReg dst );
682 extern AMD64Instr* AMD64Instr_Push ( AMD64RMI* );
683 extern AMD64Instr* AMD64Instr_Call ( AMD64CondCode, Addr64, Int );
684 extern AMD64Instr* AMD64Instr_Goto ( IRJumpKind, AMD64CondCode cond, AMD64RI* dst );
685 extern AMD64Instr* AMD64Instr_CMov64 ( AMD64CondCode, AMD64RM* src, HReg dst );
686 extern AMD64Instr* AMD64Instr_MovZLQ ( HReg src, HReg dst );
687 extern AMD64Instr* AMD64Instr_LoadEX ( UChar szSmall, Bool syned,
688 AMD64AMode* src, HReg dst );
689 extern AMD64Instr* AMD64Instr_Store ( UChar sz, HReg src, AMD64AMode* dst );
690 extern AMD64Instr* AMD64Instr_Set64 ( AMD64CondCode cond, HReg dst );
691 extern AMD64Instr* AMD64Instr_Bsfr64 ( Bool isFwds, HReg src, HReg dst );
692 extern AMD64Instr* AMD64Instr_MFence ( void );
693 extern AMD64Instr* AMD64Instr_ACAS ( AMD64AMode* addr, UChar sz );
694 extern AMD64Instr* AMD64Instr_DACAS ( AMD64AMode* addr, UChar sz );
696 extern AMD64Instr* AMD64Instr_A87Free ( Int nregs );
697 extern AMD64Instr* AMD64Instr_A87PushPop ( AMD64AMode* addr, Bool isPush );
698 extern AMD64Instr* AMD64Instr_A87FpOp ( A87FpOp op );
699 extern AMD64Instr* AMD64Instr_A87LdCW ( AMD64AMode* addr );
700 extern AMD64Instr* AMD64Instr_A87StSW ( AMD64AMode* addr );
702 //.. extern AMD64Instr* AMD64Instr_FpUnary ( AMD64FpOp op, HReg src, HReg dst );
703 //.. extern AMD64Instr* AMD64Instr_FpBinary ( AMD64FpOp op, HReg srcL, HReg srcR, HReg dst );
704 //.. extern AMD64Instr* AMD64Instr_FpLdSt ( Bool isLoad, UChar sz, HReg reg, AMD64AMode* );
705 //.. extern AMD64Instr* AMD64Instr_FpLdStI ( Bool isLoad, UChar sz, HReg reg, AMD64AMode* );
706 //.. extern AMD64Instr* AMD64Instr_Fp64to32 ( HReg src, HReg dst );
707 //.. extern AMD64Instr* AMD64Instr_FpCMov ( AMD64CondCode, HReg src, HReg dst );
708 extern AMD64Instr* AMD64Instr_LdMXCSR ( AMD64AMode* );
709 //.. extern AMD64Instr* AMD64Instr_FpStSW_AX ( void );
710 extern AMD64Instr* AMD64Instr_SseUComIS ( Int sz, HReg srcL, HReg srcR, HReg dst );
711 extern AMD64Instr* AMD64Instr_SseSI2SF ( Int szS, Int szD, HReg src, HReg dst );
712 extern AMD64Instr* AMD64Instr_SseSF2SI ( Int szS, Int szD, HReg src, HReg dst );
713 extern AMD64Instr* AMD64Instr_SseSDSS ( Bool from64, HReg src, HReg dst );
715 //.. extern AMD64Instr* AMD64Instr_SseConst ( UShort con, HReg dst );
716 extern AMD64Instr* AMD64Instr_SseLdSt ( Bool isLoad, Int sz, HReg, AMD64AMode* );
717 extern AMD64Instr* AMD64Instr_SseLdzLO ( Int sz, HReg, AMD64AMode* );
718 extern AMD64Instr* AMD64Instr_Sse32Fx4 ( AMD64SseOp, HReg, HReg );
719 extern AMD64Instr* AMD64Instr_Sse32FLo ( AMD64SseOp, HReg, HReg );
720 extern AMD64Instr* AMD64Instr_Sse64Fx2 ( AMD64SseOp, HReg, HReg );
721 extern AMD64Instr* AMD64Instr_Sse64FLo ( AMD64SseOp, HReg, HReg );
722 extern AMD64Instr* AMD64Instr_SseReRg ( AMD64SseOp, HReg, HReg );
723 extern AMD64Instr* AMD64Instr_SseCMov ( AMD64CondCode, HReg src, HReg dst );
724 extern AMD64Instr* AMD64Instr_SseShuf ( Int order, HReg src, HReg dst );
727 extern void ppAMD64Instr ( AMD64Instr*, Bool );
729 /* Some functions that insulate the register allocator from details
730 of the underlying instruction set. */
731 extern void getRegUsage_AMD64Instr ( HRegUsage*, AMD64Instr*, Bool );
732 extern void mapRegs_AMD64Instr ( HRegRemap*, AMD64Instr*, Bool );
733 extern Bool isMove_AMD64Instr ( AMD64Instr*, HReg*, HReg* );
734 extern Int emit_AMD64Instr ( UChar* buf, Int nbuf, AMD64Instr*,
735 Bool, void* dispatch );
737 extern void genSpill_AMD64 ( /*OUT*/HInstr** i1, /*OUT*/HInstr** i2,
738 HReg rreg, Int offset, Bool );
739 extern void genReload_AMD64 ( /*OUT*/HInstr** i1, /*OUT*/HInstr** i2,
740 HReg rreg, Int offset, Bool );
742 extern void getAllocableRegs_AMD64 ( Int*, HReg** );
743 extern HInstrArray* iselSB_AMD64 ( IRSB*, VexArch,
747 #endif /* ndef __VEX_HOST_AMD64_DEFS_H */
749 /*---------------------------------------------------------------*/
750 /*--- end host_amd64_defs.h ---*/
751 /*---------------------------------------------------------------*/