host_amd64_defs.h revision 8f943afc22a6a683b78271836c8ddc462b4824a9
1
2/*---------------------------------------------------------------*/
3/*--- begin                                 host_amd64_defs.h ---*/
4/*---------------------------------------------------------------*/
5
6/*
7   This file is part of Valgrind, a dynamic binary instrumentation
8   framework.
9
10   Copyright (C) 2004-2011 OpenWorks LLP
11      info@open-works.net
12
13   This program is free software; you can redistribute it and/or
14   modify it under the terms of the GNU General Public License as
15   published by the Free Software Foundation; either version 2 of the
16   License, or (at your option) any later version.
17
18   This program is distributed in the hope that it will be useful, but
19   WITHOUT ANY WARRANTY; without even the implied warranty of
20   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
21   General Public License for more details.
22
23   You should have received a copy of the GNU General Public License
24   along with this program; if not, write to the Free Software
25   Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
26   02110-1301, USA.
27
28   The GNU General Public License is contained in the file COPYING.
29
30   Neither the names of the U.S. Department of Energy nor the
31   University of California nor the names of its contributors may be
32   used to endorse or promote products derived from this software
33   without prior written permission.
34*/
35
36#ifndef __VEX_HOST_AMD64_DEFS_H
37#define __VEX_HOST_AMD64_DEFS_H
38
39
40/* --------- Registers. --------- */
41
42/* The usual HReg abstraction.  There are 16 real int regs, 6 real
43   float regs, and 16 real vector regs.
44*/
45
46extern void ppHRegAMD64 ( HReg );
47
48extern HReg hregAMD64_RAX ( void );
49extern HReg hregAMD64_RBX ( void );
50extern HReg hregAMD64_RCX ( void );
51extern HReg hregAMD64_RDX ( void );
52extern HReg hregAMD64_RSP ( void );
53extern HReg hregAMD64_RBP ( void );
54extern HReg hregAMD64_RSI ( void );
55extern HReg hregAMD64_RDI ( void );
56extern HReg hregAMD64_R8  ( void );
57extern HReg hregAMD64_R9  ( void );
58extern HReg hregAMD64_R10 ( void );
59extern HReg hregAMD64_R11 ( void );
60extern HReg hregAMD64_R12 ( void );
61extern HReg hregAMD64_R13 ( void );
62extern HReg hregAMD64_R14 ( void );
63extern HReg hregAMD64_R15 ( void );
64
65extern HReg hregAMD64_FAKE0 ( void );
66extern HReg hregAMD64_FAKE1 ( void );
67extern HReg hregAMD64_FAKE2 ( void );
68extern HReg hregAMD64_FAKE3 ( void );
69extern HReg hregAMD64_FAKE4 ( void );
70extern HReg hregAMD64_FAKE5 ( void );
71
72extern HReg hregAMD64_XMM0  ( void );
73extern HReg hregAMD64_XMM1  ( void );
74extern HReg hregAMD64_XMM2  ( void );
75extern HReg hregAMD64_XMM3  ( void );
76extern HReg hregAMD64_XMM4  ( void );
77extern HReg hregAMD64_XMM5  ( void );
78extern HReg hregAMD64_XMM6  ( void );
79extern HReg hregAMD64_XMM7  ( void );
80extern HReg hregAMD64_XMM8  ( void );
81extern HReg hregAMD64_XMM9  ( void );
82extern HReg hregAMD64_XMM10 ( void );
83extern HReg hregAMD64_XMM11 ( void );
84extern HReg hregAMD64_XMM12 ( void );
85extern HReg hregAMD64_XMM13 ( void );
86extern HReg hregAMD64_XMM14 ( void );
87extern HReg hregAMD64_XMM15 ( void );
88
89
90/* --------- Condition codes, AMD encoding. --------- */
91
92typedef
93   enum {
94      Acc_O      = 0,  /* overflow           */
95      Acc_NO     = 1,  /* no overflow        */
96
97      Acc_B      = 2,  /* below              */
98      Acc_NB     = 3,  /* not below          */
99
100      Acc_Z      = 4,  /* zero               */
101      Acc_NZ     = 5,  /* not zero           */
102
103      Acc_BE     = 6,  /* below or equal     */
104      Acc_NBE    = 7,  /* not below or equal */
105
106      Acc_S      = 8,  /* negative           */
107      Acc_NS     = 9,  /* not negative       */
108
109      Acc_P      = 10, /* parity even        */
110      Acc_NP     = 11, /* not parity even    */
111
112      Acc_L      = 12, /* jump less          */
113      Acc_NL     = 13, /* not less           */
114
115      Acc_LE     = 14, /* less or equal      */
116      Acc_NLE    = 15, /* not less or equal  */
117
118      Acc_ALWAYS = 16  /* the usual hack     */
119   }
120   AMD64CondCode;
121
122extern HChar* showAMD64CondCode ( AMD64CondCode );
123
124
125/* --------- Memory address expressions (amodes). --------- */
126
127typedef
128   enum {
129     Aam_IR,        /* Immediate + Reg */
130     Aam_IRRS       /* Immediate + Reg1 + (Reg2 << Shift) */
131   }
132   AMD64AModeTag;
133
134typedef
135   struct {
136      AMD64AModeTag tag;
137      union {
138         struct {
139            UInt imm;
140            HReg reg;
141         } IR;
142         struct {
143            UInt imm;
144            HReg base;
145            HReg index;
146            Int  shift; /* 0, 1, 2 or 3 only */
147         } IRRS;
148      } Aam;
149   }
150   AMD64AMode;
151
152extern AMD64AMode* AMD64AMode_IR   ( UInt, HReg );
153extern AMD64AMode* AMD64AMode_IRRS ( UInt, HReg, HReg, Int );
154
155extern AMD64AMode* dopyAMD64AMode ( AMD64AMode* );
156
157extern void ppAMD64AMode ( AMD64AMode* );
158
159
160/* --------- Operand, which can be reg, immediate or memory. --------- */
161
162typedef
163   enum {
164      Armi_Imm,
165      Armi_Reg,
166      Armi_Mem
167   }
168   AMD64RMITag;
169
170typedef
171   struct {
172      AMD64RMITag tag;
173      union {
174         struct {
175            UInt imm32;
176         } Imm;
177         struct {
178            HReg reg;
179         } Reg;
180         struct {
181            AMD64AMode* am;
182         } Mem;
183      }
184      Armi;
185   }
186   AMD64RMI;
187
188extern AMD64RMI* AMD64RMI_Imm ( UInt );
189extern AMD64RMI* AMD64RMI_Reg ( HReg );
190extern AMD64RMI* AMD64RMI_Mem ( AMD64AMode* );
191
192extern void ppAMD64RMI      ( AMD64RMI* );
193extern void ppAMD64RMI_lo32 ( AMD64RMI* );
194
195
196/* --------- Operand, which can be reg or immediate only. --------- */
197
198typedef
199   enum {
200      Ari_Imm,
201      Ari_Reg
202   }
203   AMD64RITag;
204
205typedef
206   struct {
207      AMD64RITag tag;
208      union {
209         struct {
210            UInt imm32;
211         } Imm;
212         struct {
213            HReg reg;
214         } Reg;
215      }
216      Ari;
217   }
218   AMD64RI;
219
220extern AMD64RI* AMD64RI_Imm ( UInt );
221extern AMD64RI* AMD64RI_Reg ( HReg );
222
223extern void ppAMD64RI ( AMD64RI* );
224
225
226/* --------- Operand, which can be reg or memory only. --------- */
227
228typedef
229   enum {
230      Arm_Reg,
231      Arm_Mem
232   }
233   AMD64RMTag;
234
235typedef
236   struct {
237      AMD64RMTag tag;
238      union {
239         struct {
240            HReg reg;
241         } Reg;
242         struct {
243            AMD64AMode* am;
244         } Mem;
245      }
246      Arm;
247   }
248   AMD64RM;
249
250extern AMD64RM* AMD64RM_Reg ( HReg );
251extern AMD64RM* AMD64RM_Mem ( AMD64AMode* );
252
253extern void ppAMD64RM ( AMD64RM* );
254
255
256/* --------- Instructions. --------- */
257
258/* --------- */
259typedef
260   enum {
261      Aun_NEG,
262      Aun_NOT
263   }
264   AMD64UnaryOp;
265
266extern HChar* showAMD64UnaryOp ( AMD64UnaryOp );
267
268
269/* --------- */
270typedef
271   enum {
272      Aalu_INVALID,
273      Aalu_MOV,
274      Aalu_CMP,
275      Aalu_ADD, Aalu_SUB, Aalu_ADC, Aalu_SBB,
276      Aalu_AND, Aalu_OR, Aalu_XOR,
277      Aalu_MUL
278   }
279   AMD64AluOp;
280
281extern HChar* showAMD64AluOp ( AMD64AluOp );
282
283
284/* --------- */
285typedef
286   enum {
287      Ash_INVALID,
288      Ash_SHL, Ash_SHR, Ash_SAR
289   }
290   AMD64ShiftOp;
291
292extern HChar* showAMD64ShiftOp ( AMD64ShiftOp );
293
294
295/* --------- */
296typedef
297   enum {
298      Afp_INVALID,
299      /* Binary */
300      Afp_SCALE, Afp_ATAN, Afp_YL2X, Afp_YL2XP1, Afp_PREM, Afp_PREM1,
301      /* Unary */
302      Afp_SQRT,
303      Afp_SIN, Afp_COS, Afp_TAN,
304      Afp_ROUND, Afp_2XM1
305   }
306   A87FpOp;
307
308extern HChar* showA87FpOp ( A87FpOp );
309
310
311/* --------- */
312typedef
313   enum {
314      Asse_INVALID,
315      /* mov */
316      Asse_MOV,
317      /* Floating point binary */
318      Asse_ADDF, Asse_SUBF, Asse_MULF, Asse_DIVF,
319      Asse_MAXF, Asse_MINF,
320      Asse_CMPEQF, Asse_CMPLTF, Asse_CMPLEF, Asse_CMPUNF,
321      /* Floating point unary */
322      Asse_RCPF, Asse_RSQRTF, Asse_SQRTF,
323      /* Bitwise */
324      Asse_AND, Asse_OR, Asse_XOR, Asse_ANDN,
325      Asse_ADD8, Asse_ADD16, Asse_ADD32, Asse_ADD64,
326      Asse_QADD8U, Asse_QADD16U,
327      Asse_QADD8S, Asse_QADD16S,
328      Asse_SUB8, Asse_SUB16, Asse_SUB32, Asse_SUB64,
329      Asse_QSUB8U, Asse_QSUB16U,
330      Asse_QSUB8S, Asse_QSUB16S,
331      Asse_MUL16,
332      Asse_MULHI16U,
333      Asse_MULHI16S,
334      Asse_AVG8U, Asse_AVG16U,
335      Asse_MAX16S,
336      Asse_MAX8U,
337      Asse_MIN16S,
338      Asse_MIN8U,
339      Asse_CMPEQ8, Asse_CMPEQ16, Asse_CMPEQ32,
340      Asse_CMPGT8S, Asse_CMPGT16S, Asse_CMPGT32S,
341      Asse_SHL16, Asse_SHL32, Asse_SHL64,
342      Asse_SHR16, Asse_SHR32, Asse_SHR64,
343      Asse_SAR16, Asse_SAR32,
344      Asse_PACKSSD, Asse_PACKSSW, Asse_PACKUSW,
345      Asse_UNPCKHB, Asse_UNPCKHW, Asse_UNPCKHD, Asse_UNPCKHQ,
346      Asse_UNPCKLB, Asse_UNPCKLW, Asse_UNPCKLD, Asse_UNPCKLQ
347   }
348   AMD64SseOp;
349
350extern HChar* showAMD64SseOp ( AMD64SseOp );
351
352
353/* --------- */
354typedef
355   enum {
356      Ain_Imm64,       /* Generate 64-bit literal to register */
357      Ain_Alu64R,      /* 64-bit mov/arith/logical, dst=REG */
358      Ain_Alu64M,      /* 64-bit mov/arith/logical, dst=MEM */
359      Ain_Sh64,        /* 64-bit shift/rotate, dst=REG or MEM */
360      Ain_Test64,      /* 64-bit test (AND, set flags, discard result) */
361      Ain_Unary64,     /* 64-bit not and neg */
362      Ain_Lea64,       /* 64-bit compute EA into a reg */
363      Ain_Alu32R,      /* 32-bit add/sub/and/or/xor/cmp, dst=REG (a la Alu64R) */
364      Ain_MulL,        /* widening multiply */
365      Ain_Div,         /* div and mod */
366//..       Xin_Sh3232,    /* shldl or shrdl */
367      Ain_Push,        /* push 64-bit value on stack */
368      Ain_Call,        /* call to address in register */
369      Ain_Goto,        /* conditional/unconditional jmp to dst */
370      Ain_CMov64,      /* conditional move */
371      Ain_MovxLQ,      /* reg-reg move, zx-ing/sx-ing top half */
372      Ain_LoadEX,      /* mov{s,z}{b,w,l}q from mem to reg */
373      Ain_Store,       /* store 32/16/8 bit value in memory */
374      Ain_Set64,       /* convert condition code to 64-bit value */
375      Ain_Bsfr64,      /* 64-bit bsf/bsr */
376      Ain_MFence,      /* mem fence */
377      Ain_ACAS,        /* 8/16/32/64-bit lock;cmpxchg */
378      Ain_DACAS,       /* lock;cmpxchg8b/16b (doubleword ACAS, 2 x
379                          32-bit or 2 x 64-bit only) */
380
381      Ain_A87Free,     /* free up x87 registers */
382      Ain_A87PushPop,  /* x87 loads/stores */
383      Ain_A87FpOp,     /* x87 operations */
384      Ain_A87LdCW,     /* load x87 control word */
385      Ain_A87StSW,     /* store x87 status word */
386//..
387//..       Xin_FpUnary,   /* FP fake unary op */
388//..       Xin_FpBinary,  /* FP fake binary op */
389//..       Xin_FpLdSt,    /* FP fake load/store */
390//..       Xin_FpLdStI,   /* FP fake load/store, converting to/from Int */
391//..       Xin_Fp64to32,  /* FP round IEEE754 double to IEEE754 single */
392//..       Xin_FpCMov,    /* FP fake floating point conditional move */
393      Ain_LdMXCSR,     /* load %mxcsr */
394//..       Xin_FpStSW_AX, /* fstsw %ax */
395      Ain_SseUComIS,   /* ucomisd/ucomiss, then get %rflags into int
396                          register */
397      Ain_SseSI2SF,    /* scalar 32/64 int to 32/64 float conversion */
398      Ain_SseSF2SI,    /* scalar 32/64 float to 32/64 int conversion */
399      Ain_SseSDSS,     /* scalar float32 to/from float64 */
400//..
401//..       Xin_SseConst,  /* Generate restricted SSE literal */
402      Ain_SseLdSt,     /* SSE load/store 32/64/128 bits, no alignment
403                          constraints, upper 96/64/0 bits arbitrary */
404      Ain_SseLdzLO,    /* SSE load low 32/64 bits, zero remainder of reg */
405      Ain_Sse32Fx4,    /* SSE binary, 32Fx4 */
406      Ain_Sse32FLo,    /* SSE binary, 32F in lowest lane only */
407      Ain_Sse64Fx2,    /* SSE binary, 64Fx2 */
408      Ain_Sse64FLo,    /* SSE binary, 64F in lowest lane only */
409      Ain_SseReRg,     /* SSE binary general reg-reg, Re, Rg */
410      Ain_SseCMov,     /* SSE conditional move */
411      Ain_SseShuf      /* SSE2 shuffle (pshufd) */
412   }
413   AMD64InstrTag;
414
415/* Destinations are on the RIGHT (second operand) */
416
417typedef
418   struct {
419      AMD64InstrTag tag;
420      union {
421         struct {
422            ULong imm64;
423            HReg  dst;
424         } Imm64;
425         struct {
426            AMD64AluOp op;
427            AMD64RMI*  src;
428            HReg       dst;
429         } Alu64R;
430         struct {
431            AMD64AluOp  op;
432            AMD64RI*    src;
433            AMD64AMode* dst;
434         } Alu64M;
435         struct {
436            AMD64ShiftOp op;
437            UInt         src;  /* shift amount, or 0 means %cl */
438            HReg         dst;
439         } Sh64;
440         struct {
441            UInt   imm32;
442            HReg   dst;
443         } Test64;
444         /* Not and Neg */
445         struct {
446            AMD64UnaryOp op;
447            HReg         dst;
448         } Unary64;
449         /* 64-bit compute EA into a reg */
450         struct {
451            AMD64AMode* am;
452            HReg        dst;
453         } Lea64;
454         /* 32-bit add/sub/and/or/xor/cmp, dst=REG (a la Alu64R) */
455         struct {
456            AMD64AluOp op;
457            AMD64RMI*  src;
458            HReg       dst;
459         } Alu32R;
460         /* 64 x 64 -> 128 bit widening multiply: RDX:RAX = RAX *s/u
461            r/m64 */
462         struct {
463            Bool     syned;
464            AMD64RM* src;
465         } MulL;
466          /* amd64 div/idiv instruction.  Modifies RDX and RAX and
467	     reads src. */
468         struct {
469            Bool     syned;
470            Int      sz; /* 4 or 8 only */
471            AMD64RM* src;
472         } Div;
473//..          /* shld/shrd.  op may only be Xsh_SHL or Xsh_SHR */
474//..          struct {
475//..             X86ShiftOp op;
476//..             UInt       amt;   /* shift amount, or 0 means %cl */
477//..             HReg       src;
478//..             HReg       dst;
479//..          } Sh3232;
480         struct {
481            AMD64RMI* src;
482         } Push;
483         /* Pseudo-insn.  Call target (an absolute address), on given
484            condition (which could be Xcc_ALWAYS). */
485         struct {
486            AMD64CondCode cond;
487            Addr64        target;
488            Int           regparms; /* 0 .. 6 */
489         } Call;
490         /* Pseudo-insn.  Goto dst, on given condition (which could be
491            Acc_ALWAYS). */
492         struct {
493            IRJumpKind    jk;
494            AMD64CondCode cond;
495            AMD64RI*      dst;
496         } Goto;
497         /* Mov src to dst on the given condition, which may not
498            be the bogus Acc_ALWAYS. */
499         struct {
500            AMD64CondCode cond;
501            AMD64RM*      src;
502            HReg          dst;
503         } CMov64;
504         /* reg-reg move, sx-ing/zx-ing top half */
505         struct {
506            Bool syned;
507            HReg src;
508            HReg dst;
509         } MovxLQ;
510         /* Sign/Zero extending loads.  Dst size is always 64 bits. */
511         struct {
512            UChar       szSmall; /* only 1, 2 or 4 */
513            Bool        syned;
514            AMD64AMode* src;
515            HReg        dst;
516         } LoadEX;
517         /* 32/16/8 bit stores. */
518         struct {
519            UChar       sz; /* only 1, 2 or 4 */
520            HReg        src;
521            AMD64AMode* dst;
522         } Store;
523         /* Convert an amd64 condition code to a 64-bit value (0 or 1). */
524         struct {
525            AMD64CondCode cond;
526            HReg          dst;
527         } Set64;
528         /* 64-bit bsf or bsr. */
529         struct {
530            Bool isFwds;
531            HReg src;
532            HReg dst;
533         } Bsfr64;
534         /* Mem fence.  In short, an insn which flushes all preceding
535            loads and stores as much as possible before continuing.
536            On AMD64 we emit a real "mfence". */
537         struct {
538         } MFence;
539         struct {
540            AMD64AMode* addr;
541            UChar       sz; /* 1, 2, 4 or 8 */
542         } ACAS;
543         struct {
544            AMD64AMode* addr;
545            UChar       sz; /* 4 or 8 only */
546         } DACAS;
547
548         /* --- X87 --- */
549
550         /* A very minimal set of x87 insns, that operate exactly in a
551            stack-like way so no need to think about x87 registers. */
552
553         /* Do 'ffree' on %st(7) .. %st(7-nregs) */
554         struct {
555            Int nregs; /* 1 <= nregs <= 7 */
556         } A87Free;
557
558         /* Push a 32- or 64-bit FP value from memory onto the stack,
559            or move a value from the stack to memory and remove it
560            from the stack. */
561         struct {
562            AMD64AMode* addr;
563            Bool        isPush;
564            UChar       szB; /* 4 or 8 */
565         } A87PushPop;
566
567         /* Do an operation on the top-of-stack.  This can be unary, in
568            which case it is %st0 = OP( %st0 ), or binary: %st0 = OP(
569            %st0, %st1 ). */
570         struct {
571            A87FpOp op;
572         } A87FpOp;
573
574         /* Load the FPU control word. */
575         struct {
576            AMD64AMode* addr;
577         } A87LdCW;
578
579         /* Store the FPU status word (fstsw m16) */
580         struct {
581            AMD64AMode* addr;
582         } A87StSW;
583
584         /* --- SSE --- */
585
586         /* Load 32 bits into %mxcsr. */
587         struct {
588            AMD64AMode* addr;
589         }
590         LdMXCSR;
591//..          /* fstsw %ax */
592//..          struct {
593//..             /* no fields */
594//..          }
595//..          FpStSW_AX;
596         /* ucomisd/ucomiss, then get %rflags into int register */
597         struct {
598            UChar   sz;   /* 4 or 8 only */
599            HReg    srcL; /* xmm */
600            HReg    srcR; /* xmm */
601            HReg    dst;  /* int */
602         } SseUComIS;
603         /* scalar 32/64 int to 32/64 float conversion */
604         struct {
605            UChar szS; /* 4 or 8 */
606            UChar szD; /* 4 or 8 */
607            HReg  src; /* i class */
608            HReg  dst; /* v class */
609         } SseSI2SF;
610         /* scalar 32/64 float to 32/64 int conversion */
611         struct {
612            UChar szS; /* 4 or 8 */
613            UChar szD; /* 4 or 8 */
614            HReg  src; /* v class */
615            HReg  dst; /* i class */
616         } SseSF2SI;
617         /* scalar float32 to/from float64 */
618         struct {
619            Bool from64; /* True: 64->32; False: 32->64 */
620            HReg src;
621            HReg dst;
622         } SseSDSS;
623//..
624//..          /* Simplistic SSE[123] */
625//..          struct {
626//..             UShort  con;
627//..             HReg    dst;
628//..          } SseConst;
629         struct {
630            Bool        isLoad;
631            UChar       sz; /* 4, 8 or 16 only */
632            HReg        reg;
633            AMD64AMode* addr;
634         } SseLdSt;
635         struct {
636            Int         sz; /* 4 or 8 only */
637            HReg        reg;
638            AMD64AMode* addr;
639         } SseLdzLO;
640         struct {
641            AMD64SseOp op;
642            HReg       src;
643            HReg       dst;
644         } Sse32Fx4;
645         struct {
646            AMD64SseOp op;
647            HReg       src;
648            HReg       dst;
649         } Sse32FLo;
650         struct {
651            AMD64SseOp op;
652            HReg       src;
653            HReg       dst;
654         } Sse64Fx2;
655         struct {
656            AMD64SseOp op;
657            HReg       src;
658            HReg       dst;
659         } Sse64FLo;
660         struct {
661            AMD64SseOp op;
662            HReg       src;
663            HReg       dst;
664         } SseReRg;
665         /* Mov src to dst on the given condition, which may not
666            be the bogus Xcc_ALWAYS. */
667         struct {
668            AMD64CondCode cond;
669            HReg          src;
670            HReg          dst;
671         } SseCMov;
672         struct {
673            Int    order; /* 0 <= order <= 0xFF */
674            HReg   src;
675            HReg   dst;
676         } SseShuf;
677
678      } Ain;
679   }
680   AMD64Instr;
681
682extern AMD64Instr* AMD64Instr_Imm64      ( ULong imm64, HReg dst );
683extern AMD64Instr* AMD64Instr_Alu64R     ( AMD64AluOp, AMD64RMI*, HReg );
684extern AMD64Instr* AMD64Instr_Alu64M     ( AMD64AluOp, AMD64RI*,  AMD64AMode* );
685extern AMD64Instr* AMD64Instr_Unary64    ( AMD64UnaryOp op, HReg dst );
686extern AMD64Instr* AMD64Instr_Lea64      ( AMD64AMode* am, HReg dst );
687extern AMD64Instr* AMD64Instr_Alu32R     ( AMD64AluOp, AMD64RMI*, HReg );
688extern AMD64Instr* AMD64Instr_Sh64       ( AMD64ShiftOp, UInt, HReg );
689extern AMD64Instr* AMD64Instr_Test64     ( UInt imm32, HReg dst );
690extern AMD64Instr* AMD64Instr_MulL       ( Bool syned, AMD64RM* );
691extern AMD64Instr* AMD64Instr_Div        ( Bool syned, Int sz, AMD64RM* );
692//.. extern AMD64Instr* AMD64Instr_Sh3232    ( AMD64ShiftOp, UInt amt, HReg src, HReg dst );
693extern AMD64Instr* AMD64Instr_Push       ( AMD64RMI* );
694extern AMD64Instr* AMD64Instr_Call       ( AMD64CondCode, Addr64, Int );
695extern AMD64Instr* AMD64Instr_Goto       ( IRJumpKind, AMD64CondCode cond, AMD64RI* dst );
696extern AMD64Instr* AMD64Instr_CMov64     ( AMD64CondCode, AMD64RM* src, HReg dst );
697extern AMD64Instr* AMD64Instr_MovxLQ     ( Bool syned, HReg src, HReg dst );
698extern AMD64Instr* AMD64Instr_LoadEX     ( UChar szSmall, Bool syned,
699                                           AMD64AMode* src, HReg dst );
700extern AMD64Instr* AMD64Instr_Store      ( UChar sz, HReg src, AMD64AMode* dst );
701extern AMD64Instr* AMD64Instr_Set64      ( AMD64CondCode cond, HReg dst );
702extern AMD64Instr* AMD64Instr_Bsfr64     ( Bool isFwds, HReg src, HReg dst );
703extern AMD64Instr* AMD64Instr_MFence     ( void );
704extern AMD64Instr* AMD64Instr_ACAS       ( AMD64AMode* addr, UChar sz );
705extern AMD64Instr* AMD64Instr_DACAS      ( AMD64AMode* addr, UChar sz );
706
707extern AMD64Instr* AMD64Instr_A87Free    ( Int nregs );
708extern AMD64Instr* AMD64Instr_A87PushPop ( AMD64AMode* addr, Bool isPush, UChar szB );
709extern AMD64Instr* AMD64Instr_A87FpOp    ( A87FpOp op );
710extern AMD64Instr* AMD64Instr_A87LdCW    ( AMD64AMode* addr );
711extern AMD64Instr* AMD64Instr_A87StSW    ( AMD64AMode* addr );
712//..
713//.. extern AMD64Instr* AMD64Instr_FpUnary   ( AMD64FpOp op, HReg src, HReg dst );
714//.. extern AMD64Instr* AMD64Instr_FpBinary  ( AMD64FpOp op, HReg srcL, HReg srcR, HReg dst );
715//.. extern AMD64Instr* AMD64Instr_FpLdSt    ( Bool isLoad, UChar sz, HReg reg, AMD64AMode* );
716//.. extern AMD64Instr* AMD64Instr_FpLdStI   ( Bool isLoad, UChar sz, HReg reg, AMD64AMode* );
717//.. extern AMD64Instr* AMD64Instr_Fp64to32  ( HReg src, HReg dst );
718//.. extern AMD64Instr* AMD64Instr_FpCMov    ( AMD64CondCode, HReg src, HReg dst );
719extern AMD64Instr* AMD64Instr_LdMXCSR    ( AMD64AMode* );
720//.. extern AMD64Instr* AMD64Instr_FpStSW_AX ( void );
721extern AMD64Instr* AMD64Instr_SseUComIS  ( Int sz, HReg srcL, HReg srcR, HReg dst );
722extern AMD64Instr* AMD64Instr_SseSI2SF   ( Int szS, Int szD, HReg src, HReg dst );
723extern AMD64Instr* AMD64Instr_SseSF2SI   ( Int szS, Int szD, HReg src, HReg dst );
724extern AMD64Instr* AMD64Instr_SseSDSS    ( Bool from64, HReg src, HReg dst );
725//..
726//.. extern AMD64Instr* AMD64Instr_SseConst  ( UShort con, HReg dst );
727extern AMD64Instr* AMD64Instr_SseLdSt    ( Bool isLoad, Int sz, HReg, AMD64AMode* );
728extern AMD64Instr* AMD64Instr_SseLdzLO   ( Int sz, HReg, AMD64AMode* );
729extern AMD64Instr* AMD64Instr_Sse32Fx4   ( AMD64SseOp, HReg, HReg );
730extern AMD64Instr* AMD64Instr_Sse32FLo   ( AMD64SseOp, HReg, HReg );
731extern AMD64Instr* AMD64Instr_Sse64Fx2   ( AMD64SseOp, HReg, HReg );
732extern AMD64Instr* AMD64Instr_Sse64FLo   ( AMD64SseOp, HReg, HReg );
733extern AMD64Instr* AMD64Instr_SseReRg    ( AMD64SseOp, HReg, HReg );
734extern AMD64Instr* AMD64Instr_SseCMov    ( AMD64CondCode, HReg src, HReg dst );
735extern AMD64Instr* AMD64Instr_SseShuf    ( Int order, HReg src, HReg dst );
736
737
738extern void ppAMD64Instr ( AMD64Instr*, Bool );
739
740/* Some functions that insulate the register allocator from details
741   of the underlying instruction set. */
742extern void         getRegUsage_AMD64Instr ( HRegUsage*, AMD64Instr*, Bool );
743extern void         mapRegs_AMD64Instr     ( HRegRemap*, AMD64Instr*, Bool );
744extern Bool         isMove_AMD64Instr      ( AMD64Instr*, HReg*, HReg* );
745extern Int          emit_AMD64Instr        ( UChar* buf, Int nbuf, AMD64Instr*,
746                                             Bool,
747                                             void* dispatch_unassisted,
748                                             void* dispatch_assisted );
749
750extern void genSpill_AMD64  ( /*OUT*/HInstr** i1, /*OUT*/HInstr** i2,
751                              HReg rreg, Int offset, Bool );
752extern void genReload_AMD64 ( /*OUT*/HInstr** i1, /*OUT*/HInstr** i2,
753                              HReg rreg, Int offset, Bool );
754
755extern void         getAllocableRegs_AMD64 ( Int*, HReg** );
756extern HInstrArray* iselSB_AMD64           ( IRSB*, VexArch,
757                                                    VexArchInfo*,
758                                                    VexAbiInfo* );
759
760#endif /* ndef __VEX_HOST_AMD64_DEFS_H */
761
762/*---------------------------------------------------------------*/
763/*--- end                                   host_amd64_defs.h ---*/
764/*---------------------------------------------------------------*/
765