1
2/*---------------------------------------------------------------*/
3/*--- begin                                 host_arm64_defs.h ---*/
4/*---------------------------------------------------------------*/
5
6/*
7   This file is part of Valgrind, a dynamic binary instrumentation
8   framework.
9
10   Copyright (C) 2013-2013 OpenWorks
11      info@open-works.net
12
13   This program is free software; you can redistribute it and/or
14   modify it under the terms of the GNU General Public License as
15   published by the Free Software Foundation; either version 2 of the
16   License, or (at your option) any later version.
17
18   This program is distributed in the hope that it will be useful, but
19   WITHOUT ANY WARRANTY; without even the implied warranty of
20   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
21   General Public License for more details.
22
23   You should have received a copy of the GNU General Public License
24   along with this program; if not, write to the Free Software
25   Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
26   02110-1301, USA.
27
28   The GNU General Public License is contained in the file COPYING.
29*/
30
31#ifndef __VEX_HOST_ARM64_DEFS_H
32#define __VEX_HOST_ARM64_DEFS_H
33
34#include "libvex_basictypes.h"
35#include "libvex.h"                      // VexArch
36#include "host_generic_regs.h"           // HReg
37
38
39/* --------- Registers. --------- */
40
41#define ST_IN static inline
42ST_IN HReg hregARM64_X22 ( void ) { return mkHReg(False, HRcInt64,  22,  0); }
43ST_IN HReg hregARM64_X23 ( void ) { return mkHReg(False, HRcInt64,  23,  1); }
44ST_IN HReg hregARM64_X24 ( void ) { return mkHReg(False, HRcInt64,  24,  2); }
45ST_IN HReg hregARM64_X25 ( void ) { return mkHReg(False, HRcInt64,  25,  3); }
46ST_IN HReg hregARM64_X26 ( void ) { return mkHReg(False, HRcInt64,  26,  4); }
47ST_IN HReg hregARM64_X27 ( void ) { return mkHReg(False, HRcInt64,  27,  5); }
48ST_IN HReg hregARM64_X28 ( void ) { return mkHReg(False, HRcInt64,  28,  6); }
49
50ST_IN HReg hregARM64_X0  ( void ) { return mkHReg(False, HRcInt64,  0,   7); }
51ST_IN HReg hregARM64_X1  ( void ) { return mkHReg(False, HRcInt64,  1,   8); }
52ST_IN HReg hregARM64_X2  ( void ) { return mkHReg(False, HRcInt64,  2,   9); }
53ST_IN HReg hregARM64_X3  ( void ) { return mkHReg(False, HRcInt64,  3,  10); }
54ST_IN HReg hregARM64_X4  ( void ) { return mkHReg(False, HRcInt64,  4,  11); }
55ST_IN HReg hregARM64_X5  ( void ) { return mkHReg(False, HRcInt64,  5,  12); }
56ST_IN HReg hregARM64_X6  ( void ) { return mkHReg(False, HRcInt64,  6,  13); }
57ST_IN HReg hregARM64_X7  ( void ) { return mkHReg(False, HRcInt64,  7,  14); }
58
59ST_IN HReg hregARM64_Q16 ( void ) { return mkHReg(False, HRcVec128, 16, 15); }
60ST_IN HReg hregARM64_Q17 ( void ) { return mkHReg(False, HRcVec128, 17, 16); }
61ST_IN HReg hregARM64_Q18 ( void ) { return mkHReg(False, HRcVec128, 18, 17); }
62ST_IN HReg hregARM64_Q19 ( void ) { return mkHReg(False, HRcVec128, 19, 18); }
63ST_IN HReg hregARM64_Q20 ( void ) { return mkHReg(False, HRcVec128, 20, 19); }
64
65ST_IN HReg hregARM64_D8  ( void ) { return mkHReg(False, HRcFlt64,  8,  20); }
66ST_IN HReg hregARM64_D9  ( void ) { return mkHReg(False, HRcFlt64,  9,  21); }
67ST_IN HReg hregARM64_D10 ( void ) { return mkHReg(False, HRcFlt64,  10, 22); }
68ST_IN HReg hregARM64_D11 ( void ) { return mkHReg(False, HRcFlt64,  11, 23); }
69ST_IN HReg hregARM64_D12 ( void ) { return mkHReg(False, HRcFlt64,  12, 24); }
70ST_IN HReg hregARM64_D13 ( void ) { return mkHReg(False, HRcFlt64,  13, 25); }
71
72ST_IN HReg hregARM64_X8  ( void ) { return mkHReg(False, HRcInt64,  8,  26); }
73ST_IN HReg hregARM64_X9  ( void ) { return mkHReg(False, HRcInt64,  9,  27); }
74ST_IN HReg hregARM64_X21 ( void ) { return mkHReg(False, HRcInt64, 21,  28); }
75#undef ST_IN
76
77extern void ppHRegARM64 ( HReg );
78
79/* Number of registers used arg passing in function calls */
80#define ARM64_N_ARGREGS 8   /* x0 .. x7 */
81
82
83/* --------- Condition codes. --------- */
84
85typedef
86   enum {
87      ARM64cc_EQ  = 0,  /* equal                         : Z=1 */
88      ARM64cc_NE  = 1,  /* not equal                     : Z=0 */
89
90      ARM64cc_CS  = 2,  /* >=u (higher or same)          : C=1 */
91      ARM64cc_CC  = 3,  /* <u  (lower)                   : C=0 */
92
93      ARM64cc_MI  = 4,  /* minus (negative)              : N=1 */
94      ARM64cc_PL  = 5,  /* plus (zero or +ve)            : N=0 */
95
96      ARM64cc_VS  = 6,  /* overflow                      : V=1 */
97      ARM64cc_VC  = 7,  /* no overflow                   : V=0 */
98
99      ARM64cc_HI  = 8,  /* >u   (higher)                 :   C=1 && Z=0 */
100      ARM64cc_LS  = 9,  /* <=u  (lower or same)          : !(C=1 && Z=0) */
101
102      ARM64cc_GE  = 10, /* >=s (signed greater or equal) :   N=V */
103      ARM64cc_LT  = 11, /* <s  (signed less than)        : !(N=V) */
104
105      ARM64cc_GT  = 12, /* >s  (signed greater)          :   Z=0 && N=V */
106      ARM64cc_LE  = 13, /* <=s (signed less or equal)    : !(Z=0 && N=V) */
107
108      ARM64cc_AL  = 14, /* always (unconditional) */
109      ARM64cc_NV  = 15  /* in 64-bit mode also means "always" */
110   }
111   ARM64CondCode;
112
113
114/* --------- Memory address expressions (amodes). --------- */
115
116typedef
117   enum {
118      ARM64am_RI9=10, /* reg + simm9 */
119      ARM64am_RI12,   /* reg + uimm12 * szB (iow, scaled by access size) */
120      ARM64am_RR      /* reg1 + reg2 */
121   }
122   ARM64AModeTag;
123
124typedef
125   struct {
126      ARM64AModeTag tag;
127      union {
128         struct {
129            HReg reg;
130            Int  simm9; /* -256 .. +255 */
131         } RI9;
132         struct {
133            HReg  reg;
134            UInt  uimm12; /* 0 .. 4095 */
135            UChar szB;    /* 1, 2, 4, 8 (16 ?) */
136         } RI12;
137         struct {
138            HReg base;
139            HReg index;
140         } RR;
141      } ARM64am;
142   }
143   ARM64AMode;
144
145extern ARM64AMode* ARM64AMode_RI9  ( HReg reg, Int simm9 );
146extern ARM64AMode* ARM64AMode_RI12 ( HReg reg, Int uimm12, UChar szB );
147extern ARM64AMode* ARM64AMode_RR   ( HReg base, HReg index );
148
149
150/* --------- Reg or uimm12 or (uimm12 << 12) operands --------- */
151
152typedef
153   enum {
154      ARM64riA_I12=20, /* uimm12 << 0 or 12 only */
155      ARM64riA_R       /* reg */
156   }
157   ARM64RIATag;
158
159typedef
160   struct {
161      ARM64RIATag tag;
162      union {
163         struct {
164            UShort imm12;  /* 0 .. 4095 */
165            UChar  shift;  /* 0 or 12 only */
166         } I12;
167         struct {
168            HReg reg;
169         } R;
170      } ARM64riA;
171   }
172   ARM64RIA;
173
174extern ARM64RIA* ARM64RIA_I12 ( UShort imm12, UChar shift );
175extern ARM64RIA* ARM64RIA_R   ( HReg );
176
177
178/* --------- Reg or "bitfield" (logic immediate) operands --------- */
179
180typedef
181   enum {
182      ARM64riL_I13=6, /* wierd-o bitfield immediate, 13 bits in total */
183      ARM64riL_R      /* reg */
184   }
185   ARM64RILTag;
186
187typedef
188   struct {
189      ARM64RILTag tag;
190      union {
191         struct {
192            UChar bitN; /* 0 .. 1 */
193            UChar immR; /* 0 .. 63 */
194            UChar immS; /* 0 .. 63 */
195         } I13;
196         struct {
197            HReg reg;
198         } R;
199      } ARM64riL;
200   }
201   ARM64RIL;
202
203extern ARM64RIL* ARM64RIL_I13 ( UChar bitN, UChar immR, UChar immS );
204extern ARM64RIL* ARM64RIL_R   ( HReg );
205
206
207/* --------------- Reg or uimm6 operands --------------- */
208
209typedef
210   enum {
211      ARM64ri6_I6=30, /* uimm6, 1 .. 63 only */
212      ARM64ri6_R      /* reg */
213   }
214   ARM64RI6Tag;
215
216typedef
217   struct {
218      ARM64RI6Tag tag;
219      union {
220         struct {
221            UInt imm6;   /* 1 .. 63 */
222         } I6;
223         struct {
224            HReg reg;
225         } R;
226      } ARM64ri6;
227   }
228   ARM64RI6;
229
230extern ARM64RI6* ARM64RI6_I6 ( UInt imm6 );
231extern ARM64RI6* ARM64RI6_R  ( HReg );
232
233
234/* --------------------- Instructions --------------------- */
235
236typedef
237   enum {
238      ARM64lo_AND=40,
239      ARM64lo_OR,
240      ARM64lo_XOR
241   }
242   ARM64LogicOp;
243
244typedef
245   enum {
246      ARM64sh_SHL=50,
247      ARM64sh_SHR,
248      ARM64sh_SAR
249   }
250   ARM64ShiftOp;
251
252typedef
253   enum {
254      ARM64un_NEG=60,
255      ARM64un_NOT,
256      ARM64un_CLZ,
257   }
258   ARM64UnaryOp;
259
260typedef
261   enum {
262      ARM64mul_PLAIN=70, /* lo64(64 * 64)  */
263      ARM64mul_ZX,       /* hi64(64 *u 64) */
264      ARM64mul_SX        /* hi64(64 *s 64) */
265   }
266   ARM64MulOp;
267
268typedef
269   /* These characterise an integer-FP conversion, but don't imply any
270      particular direction. */
271   enum {
272      ARM64cvt_F32_I32S=80,
273      ARM64cvt_F64_I32S,
274      ARM64cvt_F32_I64S,
275      ARM64cvt_F64_I64S,
276      ARM64cvt_F32_I32U,
277      ARM64cvt_F64_I32U,
278      ARM64cvt_F32_I64U,
279      ARM64cvt_F64_I64U,
280      ARM64cvt_INVALID
281   }
282   ARM64CvtOp;
283
284typedef
285   enum {
286      ARM64fpb_ADD=100,
287      ARM64fpb_SUB,
288      ARM64fpb_MUL,
289      ARM64fpb_DIV,
290      ARM64fpb_INVALID
291   }
292   ARM64FpBinOp;
293
294typedef
295   enum {
296      ARM64fpu_NEG=110,
297      ARM64fpu_ABS,
298      ARM64fpu_SQRT,
299      ARM64fpu_RINT,
300      ARM64fpu_RECPX,
301      ARM64fpu_INVALID
302   }
303   ARM64FpUnaryOp;
304
305typedef
306   enum {
307      ARM64vecb_ADD64x2=120, ARM64vecb_ADD32x4,
308      ARM64vecb_ADD16x8,     ARM64vecb_ADD8x16,
309      ARM64vecb_SUB64x2,     ARM64vecb_SUB32x4,
310      ARM64vecb_SUB16x8,     ARM64vecb_SUB8x16,
311                             ARM64vecb_MUL32x4,
312      ARM64vecb_MUL16x8,     ARM64vecb_MUL8x16,
313      ARM64vecb_FADD64x2,    ARM64vecb_FADD32x4,
314      ARM64vecb_FSUB64x2,    ARM64vecb_FSUB32x4,
315      ARM64vecb_FMUL64x2,    ARM64vecb_FMUL32x4,
316      ARM64vecb_FDIV64x2,    ARM64vecb_FDIV32x4,
317      ARM64vecb_FMAX64x2,    ARM64vecb_FMAX32x4,
318      ARM64vecb_FMIN64x2,    ARM64vecb_FMIN32x4,
319                             ARM64vecb_UMAX32x4,
320      ARM64vecb_UMAX16x8,    ARM64vecb_UMAX8x16,
321                             ARM64vecb_UMIN32x4,
322      ARM64vecb_UMIN16x8,    ARM64vecb_UMIN8x16,
323                             ARM64vecb_SMAX32x4,
324      ARM64vecb_SMAX16x8,    ARM64vecb_SMAX8x16,
325                             ARM64vecb_SMIN32x4,
326      ARM64vecb_SMIN16x8,    ARM64vecb_SMIN8x16,
327      ARM64vecb_AND,
328      ARM64vecb_ORR,
329      ARM64vecb_XOR,
330      ARM64vecb_CMEQ64x2,    ARM64vecb_CMEQ32x4,
331      ARM64vecb_CMEQ16x8,    ARM64vecb_CMEQ8x16,
332      ARM64vecb_CMHI64x2,    ARM64vecb_CMHI32x4, /* >u */
333      ARM64vecb_CMHI16x8,    ARM64vecb_CMHI8x16,
334      ARM64vecb_CMGT64x2,    ARM64vecb_CMGT32x4, /* >s */
335      ARM64vecb_CMGT16x8,    ARM64vecb_CMGT8x16,
336      ARM64vecb_FCMEQ64x2,   ARM64vecb_FCMEQ32x4,
337      ARM64vecb_FCMGE64x2,   ARM64vecb_FCMGE32x4,
338      ARM64vecb_FCMGT64x2,   ARM64vecb_FCMGT32x4,
339      ARM64vecb_TBL1,
340      ARM64vecb_UZP164x2,    ARM64vecb_UZP132x4,
341      ARM64vecb_UZP116x8,    ARM64vecb_UZP18x16,
342      ARM64vecb_UZP264x2,    ARM64vecb_UZP232x4,
343      ARM64vecb_UZP216x8,    ARM64vecb_UZP28x16,
344      ARM64vecb_ZIP132x4,    ARM64vecb_ZIP116x8,
345      ARM64vecb_ZIP18x16,    ARM64vecb_ZIP232x4,
346      ARM64vecb_ZIP216x8,    ARM64vecb_ZIP28x16,
347                             ARM64vecb_PMUL8x16,
348                             ARM64vecb_PMULL8x8,
349                             ARM64vecb_UMULL2DSS,
350      ARM64vecb_UMULL4SHH,   ARM64vecb_UMULL8HBB,
351                             ARM64vecb_SMULL2DSS,
352      ARM64vecb_SMULL4SHH,   ARM64vecb_SMULL8HBB,
353      ARM64vecb_SQADD64x2,   ARM64vecb_SQADD32x4,
354      ARM64vecb_SQADD16x8,   ARM64vecb_SQADD8x16,
355      ARM64vecb_UQADD64x2,   ARM64vecb_UQADD32x4,
356      ARM64vecb_UQADD16x8,   ARM64vecb_UQADD8x16,
357      ARM64vecb_SQSUB64x2,   ARM64vecb_SQSUB32x4,
358      ARM64vecb_SQSUB16x8,   ARM64vecb_SQSUB8x16,
359      ARM64vecb_UQSUB64x2,   ARM64vecb_UQSUB32x4,
360      ARM64vecb_UQSUB16x8,   ARM64vecb_UQSUB8x16,
361                             ARM64vecb_SQDMULL2DSS,
362      ARM64vecb_SQDMULL4SHH,
363                             ARM64vecb_SQDMULH32x4,
364      ARM64vecb_SQDMULH16x8,
365                             ARM64vecb_SQRDMULH32x4,
366      ARM64vecb_SQRDMULH16x8,
367      ARM64vecb_SQSHL64x2,   ARM64vecb_SQSHL32x4,
368      ARM64vecb_SQSHL16x8,   ARM64vecb_SQSHL8x16,
369      ARM64vecb_UQSHL64x2,   ARM64vecb_UQSHL32x4,
370      ARM64vecb_UQSHL16x8,   ARM64vecb_UQSHL8x16,
371      ARM64vecb_SQRSHL64x2,  ARM64vecb_SQRSHL32x4,
372      ARM64vecb_SQRSHL16x8,  ARM64vecb_SQRSHL8x16,
373      ARM64vecb_UQRSHL64x2,  ARM64vecb_UQRSHL32x4,
374      ARM64vecb_UQRSHL16x8,  ARM64vecb_UQRSHL8x16,
375      ARM64vecb_SSHL64x2,    ARM64vecb_SSHL32x4,
376      ARM64vecb_SSHL16x8,    ARM64vecb_SSHL8x16,
377      ARM64vecb_USHL64x2,    ARM64vecb_USHL32x4,
378      ARM64vecb_USHL16x8,    ARM64vecb_USHL8x16,
379      ARM64vecb_SRSHL64x2,   ARM64vecb_SRSHL32x4,
380      ARM64vecb_SRSHL16x8,   ARM64vecb_SRSHL8x16,
381      ARM64vecb_URSHL64x2,   ARM64vecb_URSHL32x4,
382      ARM64vecb_URSHL16x8,   ARM64vecb_URSHL8x16,
383      ARM64vecb_FRECPS64x2,  ARM64vecb_FRECPS32x4,
384      ARM64vecb_FRSQRTS64x2, ARM64vecb_FRSQRTS32x4,
385      ARM64vecb_INVALID
386   }
387   ARM64VecBinOp;
388
389typedef
390   enum {
391      ARM64vecmo_SUQADD64x2=300, ARM64vecmo_SUQADD32x4,
392      ARM64vecmo_SUQADD16x8,     ARM64vecmo_SUQADD8x16,
393      ARM64vecmo_USQADD64x2,     ARM64vecmo_USQADD32x4,
394      ARM64vecmo_USQADD16x8,     ARM64vecmo_USQADD8x16,
395      ARM64vecmo_INVALID
396   }
397   ARM64VecModifyOp;
398
399typedef
400   enum {
401      ARM64vecu_FNEG64x2=350, ARM64vecu_FNEG32x4,
402      ARM64vecu_FABS64x2,     ARM64vecu_FABS32x4,
403      ARM64vecu_NOT,
404      ARM64vecu_ABS64x2,      ARM64vecu_ABS32x4,
405      ARM64vecu_ABS16x8,      ARM64vecu_ABS8x16,
406      ARM64vecu_CLS32x4,      ARM64vecu_CLS16x8,      ARM64vecu_CLS8x16,
407      ARM64vecu_CLZ32x4,      ARM64vecu_CLZ16x8,      ARM64vecu_CLZ8x16,
408      ARM64vecu_CNT8x16,
409      ARM64vecu_RBIT,
410      ARM64vecu_REV1616B,
411      ARM64vecu_REV3216B,     ARM64vecu_REV328H,
412      ARM64vecu_REV6416B,     ARM64vecu_REV648H,      ARM64vecu_REV644S,
413      ARM64vecu_URECPE32x4,
414      ARM64vecu_URSQRTE32x4,
415      ARM64vecu_FRECPE64x2,   ARM64vecu_FRECPE32x4,
416      ARM64vecu_FRSQRTE64x2,  ARM64vecu_FRSQRTE32x4,
417      ARM64vecu_FSQRT64x2,    ARM64vecu_FSQRT32x4,
418      ARM64vecu_INVALID
419   }
420   ARM64VecUnaryOp;
421
422typedef
423   enum {
424      ARM64vecshi_USHR64x2=400, ARM64vecshi_USHR32x4,
425      ARM64vecshi_USHR16x8,     ARM64vecshi_USHR8x16,
426      ARM64vecshi_SSHR64x2,     ARM64vecshi_SSHR32x4,
427      ARM64vecshi_SSHR16x8,     ARM64vecshi_SSHR8x16,
428      ARM64vecshi_SHL64x2,      ARM64vecshi_SHL32x4,
429      ARM64vecshi_SHL16x8,      ARM64vecshi_SHL8x16,
430      /* These narrowing shifts zero out the top half of the destination
431         register. */
432      ARM64vecshi_SQSHRN2SD,    ARM64vecshi_SQSHRN4HS,   ARM64vecshi_SQSHRN8BH,
433      ARM64vecshi_UQSHRN2SD,    ARM64vecshi_UQSHRN4HS,   ARM64vecshi_UQSHRN8BH,
434      ARM64vecshi_SQSHRUN2SD,   ARM64vecshi_SQSHRUN4HS,  ARM64vecshi_SQSHRUN8BH,
435      ARM64vecshi_SQRSHRN2SD,   ARM64vecshi_SQRSHRN4HS,  ARM64vecshi_SQRSHRN8BH,
436      ARM64vecshi_UQRSHRN2SD,   ARM64vecshi_UQRSHRN4HS,  ARM64vecshi_UQRSHRN8BH,
437      ARM64vecshi_SQRSHRUN2SD,  ARM64vecshi_SQRSHRUN4HS, ARM64vecshi_SQRSHRUN8BH,
438      /* Saturating left shifts, of various flavours. */
439      ARM64vecshi_UQSHL64x2,    ARM64vecshi_UQSHL32x4,
440      ARM64vecshi_UQSHL16x8,    ARM64vecshi_UQSHL8x16,
441      ARM64vecshi_SQSHL64x2,    ARM64vecshi_SQSHL32x4,
442      ARM64vecshi_SQSHL16x8,    ARM64vecshi_SQSHL8x16,
443      ARM64vecshi_SQSHLU64x2,   ARM64vecshi_SQSHLU32x4,
444      ARM64vecshi_SQSHLU16x8,   ARM64vecshi_SQSHLU8x16,
445      ARM64vecshi_INVALID
446   }
447   ARM64VecShiftImmOp;
448
449typedef
450   enum {
451      ARM64vecna_XTN=450,
452      ARM64vecna_SQXTN,
453      ARM64vecna_UQXTN,
454      ARM64vecna_SQXTUN,
455      ARM64vecna_INVALID
456   }
457   ARM64VecNarrowOp;
458
459typedef
460   enum {
461      /* baseline */
462      ARM64in_Arith=1220,
463      ARM64in_Cmp,
464      ARM64in_Logic,
465      ARM64in_Test,
466      ARM64in_Shift,
467      ARM64in_Unary,
468      ARM64in_MovI,        /* int reg-reg move */
469      ARM64in_Imm64,
470      ARM64in_LdSt64,
471      ARM64in_LdSt32,      /* w/ ZX loads */
472      ARM64in_LdSt16,      /* w/ ZX loads */
473      ARM64in_LdSt8,       /* w/ ZX loads */
474      ARM64in_XDirect,     /* direct transfer to GA */
475      ARM64in_XIndir,      /* indirect transfer to GA */
476      ARM64in_XAssisted,   /* assisted transfer to GA */
477      ARM64in_CSel,
478      ARM64in_Call,
479      ARM64in_AddToSP,     /* move SP by small, signed constant */
480      ARM64in_FromSP,      /* move SP to integer register */
481      ARM64in_Mul,
482      ARM64in_LdrEX,
483      ARM64in_StrEX,
484      ARM64in_MFence,
485      /* ARM64in_V*: scalar ops involving vector registers */
486      ARM64in_VLdStH,   /* ld/st to/from low 16 bits of vec reg, imm offset */
487      ARM64in_VLdStS,   /* ld/st to/from low 32 bits of vec reg, imm offset */
488      ARM64in_VLdStD,   /* ld/st to/from low 64 bits of vec reg, imm offset */
489      ARM64in_VLdStQ,   /* ld/st to/from all 128 bits of vec reg, no offset */
490      ARM64in_VCvtI2F,
491      ARM64in_VCvtF2I,
492      ARM64in_VCvtSD,   /* scalar 32 bit FP <--> 64 bit FP */
493      ARM64in_VCvtHS,   /* scalar 16 bit FP <--> 32 bit FP */
494      ARM64in_VCvtHD,   /* scalar 16 bit FP <--> 64 bit FP */
495      ARM64in_VUnaryD,
496      ARM64in_VUnaryS,
497      ARM64in_VBinD,
498      ARM64in_VBinS,
499      ARM64in_VCmpD,
500      ARM64in_VCmpS,
501      ARM64in_VFCSel,
502      ARM64in_FPCR,
503      ARM64in_FPSR,
504      /* ARM64in_V*V: vector ops on vector registers */
505      ARM64in_VBinV,
506      ARM64in_VModifyV,
507      ARM64in_VUnaryV,
508      ARM64in_VNarrowV,
509      ARM64in_VShiftImmV,
510      ARM64in_VExtV,
511      ARM64in_VImmQ,
512      ARM64in_VDfromX,    /* Move an Xreg to a Dreg */
513      ARM64in_VQfromX,    /* Move an Xreg to a Qreg lo64, and zero hi64 */
514      ARM64in_VQfromXX,   /* Move 2 Xregs to a Qreg */
515      ARM64in_VXfromQ,    /* Move half a Qreg to an Xreg */
516      ARM64in_VXfromDorS, /* Move Dreg or Sreg(ZX) to an Xreg */
517      ARM64in_VMov,       /* vector reg-reg move, 16, 8 or 4 bytes */
518      /* infrastructure */
519      ARM64in_EvCheck,    /* Event check */
520      ARM64in_ProfInc     /* 64-bit profile counter increment */
521   }
522   ARM64InstrTag;
523
524/* Destinations are on the LEFT (first operand) */
525
526typedef
527   struct {
528      ARM64InstrTag tag;
529      union {
530         /* --- INTEGER INSTRUCTIONS --- */
531         /* 64 bit ADD/SUB reg, reg or uimm12<<{0,12} */
532         struct {
533            HReg      dst;
534            HReg      argL;
535            ARM64RIA* argR;
536            Bool      isAdd;
537         } Arith;
538         /* 64 or 32 bit CMP reg, reg or aimm (SUB and set flags) */
539         struct {
540            HReg      argL;
541            ARM64RIA* argR;
542            Bool      is64;
543         } Cmp;
544         /* 64 bit AND/OR/XOR reg, reg or bitfield-immediate */
545         struct {
546            HReg         dst;
547            HReg         argL;
548            ARM64RIL*    argR;
549            ARM64LogicOp op;
550         } Logic;
551         /* 64 bit TST reg, reg or bimm (AND and set flags) */
552         struct {
553            HReg      argL;
554            ARM64RIL* argR;
555         } Test;
556         /* 64 bit SHL/SHR/SAR, 2nd arg is reg or imm */
557         struct {
558            HReg         dst;
559            HReg         argL;
560            ARM64RI6*    argR;
561            ARM64ShiftOp op;
562         } Shift;
563         /* NOT/NEG/CLZ, 64 bit only */
564         struct {
565            HReg         dst;
566            HReg         src;
567            ARM64UnaryOp op;
568         } Unary;
569         /* MOV dst, src -- reg-reg move for integer registers */
570         struct {
571            HReg dst;
572            HReg src;
573         } MovI;
574         /* Pseudo-insn; make a 64-bit immediate */
575         struct {
576            HReg  dst;
577            ULong imm64;
578         } Imm64;
579         /* 64-bit load or store */
580         struct {
581            Bool        isLoad;
582            HReg        rD;
583            ARM64AMode* amode;
584         } LdSt64;
585         /* zx-32-to-64-bit load, or 32-bit store */
586         struct {
587            Bool        isLoad;
588            HReg        rD;
589            ARM64AMode* amode;
590         } LdSt32;
591         /* zx-16-to-64-bit load, or 16-bit store */
592         struct {
593            Bool        isLoad;
594            HReg        rD;
595            ARM64AMode* amode;
596         } LdSt16;
597         /* zx-8-to-64-bit load, or 8-bit store */
598         struct {
599            Bool        isLoad;
600            HReg        rD;
601            ARM64AMode* amode;
602         } LdSt8;
603         /* Update the guest PC value, then exit requesting to chain
604            to it.  May be conditional.  Urr, use of Addr64 implicitly
605            assumes that wordsize(guest) == wordsize(host). */
606         struct {
607            Addr64        dstGA;    /* next guest address */
608            ARM64AMode*   amPC;     /* amode in guest state for PC */
609            ARM64CondCode cond;     /* can be ARM64cc_AL */
610            Bool          toFastEP; /* chain to the slow or fast point? */
611         } XDirect;
612         /* Boring transfer to a guest address not known at JIT time.
613            Not chainable.  May be conditional. */
614         struct {
615            HReg          dstGA;
616            ARM64AMode*   amPC;
617            ARM64CondCode cond; /* can be ARM64cc_AL */
618         } XIndir;
619         /* Assisted transfer to a guest address, most general case.
620            Not chainable.  May be conditional. */
621         struct {
622            HReg          dstGA;
623            ARM64AMode*   amPC;
624            ARM64CondCode cond; /* can be ARM64cc_AL */
625            IRJumpKind    jk;
626         } XAssisted;
627         /* CSEL: dst = if cond then argL else argR.  cond may be anything. */
628          struct {
629            HReg          dst;
630            HReg          argL;
631            HReg          argR;
632            ARM64CondCode cond;
633         } CSel;
634         /* Pseudo-insn.  Call target (an absolute address), on given
635            condition (which could be ARM64cc_AL). */
636         struct {
637            RetLoc        rloc;     /* where the return value will be */
638            Addr64        target;
639            ARM64CondCode cond;
640            Int           nArgRegs; /* # regs carrying args: 0 .. 8 */
641         } Call;
642         /* move SP by small, signed constant */
643         struct {
644            Int simm; /* needs to be 0 % 16 and in the range -4095
645                         .. 4095 inclusive */
646         } AddToSP;
647         /* move SP to integer register */
648         struct {
649            HReg dst;
650         } FromSP;
651         /* Integer multiply, with 3 variants:
652              (PLAIN) lo64(64 *  64)
653              (ZX)    hi64(64 *u 64)
654              (SX)    hi64(64 *s 64)
655         */
656         struct {
657            HReg       dst;
658            HReg       argL;
659            HReg       argR;
660            ARM64MulOp op;
661         } Mul;
662         /* LDXR{,H,B} x2, [x4] */
663         struct {
664            Int  szB; /* 1, 2, 4 or 8 */
665         } LdrEX;
666         /* STXR{,H,B} w0, x2, [x4] */
667         struct {
668            Int  szB; /* 1, 2, 4 or 8 */
669         } StrEX;
670         /* Mem fence.  An insn which fences all loads and stores as
671            much as possible before continuing.  On ARM64 we emit the
672            sequence "dsb sy ; dmb sy ; isb sy", which is probably
673            total nuclear overkill, but better safe than sorry. */
674         struct {
675         } MFence;
676         /* --- INSTRUCTIONS INVOLVING VECTOR REGISTERS --- */
677         /* ld/st to/from low 16 bits of vec reg, imm offset */
678         struct {
679            Bool isLoad;
680            HReg hD;
681            HReg rN;
682            UInt uimm12;  /* 0 .. 8190 inclusive, 0 % 2 */
683         } VLdStH;
684         /* ld/st to/from low 32 bits of vec reg, imm offset */
685         struct {
686            Bool isLoad;
687            HReg sD;
688            HReg rN;
689            UInt uimm12;  /* 0 .. 16380 inclusive, 0 % 4 */
690         } VLdStS;
691         /* ld/st to/from low 64 bits of vec reg, imm offset */
692         struct {
693            Bool isLoad;
694            HReg dD;
695            HReg rN;
696            UInt uimm12;  /* 0 .. 32760 inclusive, 0 % 8 */
697         } VLdStD;
698         /* ld/st to/from all 128 bits of vec reg, no offset */
699         struct {
700            Bool isLoad;
701            HReg rQ; // data
702            HReg rN; // address
703         } VLdStQ;
704         /* Scalar conversion of int to float. */
705         struct {
706            ARM64CvtOp how;
707            HReg       rD; // dst, a D or S register
708            HReg       rS; // src, a W or X register
709         } VCvtI2F;
710         /* Scalar conversion of float to int, w/ specified RM. */
711         struct {
712            ARM64CvtOp how;
713            HReg       rD; // dst, a W or X register
714            HReg       rS; // src, a D or S register
715            UChar      armRM; // ARM encoded RM:
716                              // 00=nearest, 01=+inf, 10=-inf, 11=zero
717         } VCvtF2I;
718         /* Convert between 32-bit and 64-bit FP values (both ways). (FCVT) */
719         struct {
720            Bool sToD; /* True: F32->F64.  False: F64->F32 */
721            HReg dst;
722            HReg src;
723         } VCvtSD;
724         /* Convert between 16-bit and 32-bit FP values (both ways). (FCVT) */
725         struct {
726            Bool hToS; /* True: F16->F32.  False: F32->F16 */
727            HReg dst;
728            HReg src;
729         } VCvtHS;
730         /* Convert between 16-bit and 64-bit FP values (both ways). (FCVT) */
731         struct {
732            Bool hToD; /* True: F16->F64.  False: F64->F16 */
733            HReg dst;
734            HReg src;
735         } VCvtHD;
736         /* 64-bit FP unary */
737         struct {
738            ARM64FpUnaryOp op;
739            HReg           dst;
740            HReg           src;
741         } VUnaryD;
742         /* 32-bit FP unary */
743         struct {
744            ARM64FpUnaryOp op;
745            HReg           dst;
746            HReg           src;
747         } VUnaryS;
748         /* 64-bit FP binary arithmetic */
749         struct {
750            ARM64FpBinOp op;
751            HReg         dst;
752            HReg         argL;
753            HReg         argR;
754         } VBinD;
755         /* 32-bit FP binary arithmetic */
756         struct {
757            ARM64FpBinOp op;
758            HReg         dst;
759            HReg         argL;
760            HReg         argR;
761         } VBinS;
762         /* 64-bit FP compare */
763         struct {
764            HReg argL;
765            HReg argR;
766         } VCmpD;
767         /* 32-bit FP compare */
768         struct {
769            HReg argL;
770            HReg argR;
771         } VCmpS;
772         /* 32- or 64-bit FP conditional select */
773         struct {
774            HReg          dst;
775            HReg          argL;
776            HReg          argR;
777            ARM64CondCode cond;
778            Bool          isD;
779         }
780         VFCSel;
781         /* Move a 32-bit value to/from the FPCR */
782         struct {
783            Bool toFPCR;
784            HReg iReg;
785         } FPCR;
786         /* Move a 32-bit value to/from the FPSR */
787         struct {
788            Bool toFPSR;
789            HReg iReg;
790         } FPSR;
791         /* binary vector operation on vector registers */
792         struct {
793            ARM64VecBinOp op;
794            HReg          dst;
795            HReg          argL;
796            HReg          argR;
797         } VBinV;
798         /* binary vector operation on vector registers.
799            Dst reg is also a src. */
800         struct {
801            ARM64VecModifyOp op;
802            HReg             mod;
803            HReg             arg;
804         } VModifyV;
805         /* unary vector operation on vector registers */
806         struct {
807            ARM64VecUnaryOp op;
808            HReg            dst;
809            HReg            arg;
810         } VUnaryV;
811         /* vector narrowing, Q -> Q.  Result goes in the bottom half
812            of dst and the top half is zeroed out.  Iow one of the
813            XTN family. */
814        struct {
815           ARM64VecNarrowOp op;
816           UInt             dszBlg2; // 0: 16to8_x8  1: 32to16_x4  2: 64to32_x2
817           HReg             dst;     // Q reg
818           HReg             src;     // Q reg
819        } VNarrowV;
820        /* Vector shift by immediate.  For left shifts, |amt| must be
821           >= 0 and < implied lane size of |op|.  For right shifts,
822           |amt| must be > 0 and <= implied lane size of |op|.  Shifts
823           beyond these ranges are not allowed. */
824        struct {
825           ARM64VecShiftImmOp op;
826           HReg               dst;
827           HReg               src;
828           UInt               amt;
829        } VShiftImmV;
830        struct {
831           HReg dst;
832           HReg srcLo;
833           HReg srcHi;
834           UInt amtB;
835        } VExtV;
836         struct {
837            HReg   rQ;
838            UShort imm; /* Same 1-bit-per-byte encoding as IR */
839         } VImmQ;
840         struct {
841            HReg rD;
842            HReg rX;
843         } VDfromX;
844         struct {
845            HReg rQ;
846            HReg rXlo;
847         } VQfromX;
848         struct {
849            HReg rQ;
850            HReg rXhi;
851            HReg rXlo;
852         } VQfromXX;
853         struct {
854            HReg rX;
855            HReg rQ;
856            UInt laneNo; /* either 0 or 1 */
857         } VXfromQ;
858         struct {
859            HReg rX;
860            HReg rDorS;
861            Bool fromD;
862         } VXfromDorS;
863         /* MOV dst, src -- reg-reg move for vector registers */
864         struct {
865            UInt szB; // 16=mov qD,qS;  8=mov dD,dS;  4=mov sD,sS
866            HReg dst;
867            HReg src;
868         } VMov;
869         struct {
870            ARM64AMode* amCounter;
871            ARM64AMode* amFailAddr;
872         } EvCheck;
873         struct {
874            /* No fields.  The address of the counter to inc is
875               installed later, post-translation, by patching it in,
876               as it is not known at translation time. */
877         } ProfInc;
878      } ARM64in;
879   }
880   ARM64Instr;
881
882
883extern ARM64Instr* ARM64Instr_Arith   ( HReg, HReg, ARM64RIA*, Bool isAdd );
884extern ARM64Instr* ARM64Instr_Cmp     ( HReg, ARM64RIA*, Bool is64 );
885extern ARM64Instr* ARM64Instr_Logic   ( HReg, HReg, ARM64RIL*, ARM64LogicOp );
886extern ARM64Instr* ARM64Instr_Test    ( HReg, ARM64RIL* );
887extern ARM64Instr* ARM64Instr_Shift   ( HReg, HReg, ARM64RI6*, ARM64ShiftOp );
888extern ARM64Instr* ARM64Instr_Unary   ( HReg, HReg, ARM64UnaryOp );
889extern ARM64Instr* ARM64Instr_MovI    ( HReg, HReg );
890extern ARM64Instr* ARM64Instr_Imm64   ( HReg, ULong );
891extern ARM64Instr* ARM64Instr_LdSt64  ( Bool isLoad, HReg, ARM64AMode* );
892extern ARM64Instr* ARM64Instr_LdSt32  ( Bool isLoad, HReg, ARM64AMode* );
893extern ARM64Instr* ARM64Instr_LdSt16  ( Bool isLoad, HReg, ARM64AMode* );
894extern ARM64Instr* ARM64Instr_LdSt8   ( Bool isLoad, HReg, ARM64AMode* );
895extern ARM64Instr* ARM64Instr_XDirect ( Addr64 dstGA, ARM64AMode* amPC,
896                                        ARM64CondCode cond, Bool toFastEP );
897extern ARM64Instr* ARM64Instr_XIndir  ( HReg dstGA, ARM64AMode* amPC,
898                                        ARM64CondCode cond );
899extern ARM64Instr* ARM64Instr_XAssisted ( HReg dstGA, ARM64AMode* amPC,
900                                          ARM64CondCode cond, IRJumpKind jk );
901extern ARM64Instr* ARM64Instr_CSel    ( HReg dst, HReg argL, HReg argR,
902                                        ARM64CondCode cond );
903extern ARM64Instr* ARM64Instr_Call    ( ARM64CondCode, Addr64, Int nArgRegs,
904                                        RetLoc rloc );
905extern ARM64Instr* ARM64Instr_AddToSP ( Int simm );
906extern ARM64Instr* ARM64Instr_FromSP  ( HReg dst );
907extern ARM64Instr* ARM64Instr_Mul     ( HReg dst, HReg argL, HReg argR,
908                                        ARM64MulOp op );
909extern ARM64Instr* ARM64Instr_LdrEX   ( Int szB );
910extern ARM64Instr* ARM64Instr_StrEX   ( Int szB );
911extern ARM64Instr* ARM64Instr_MFence  ( void );
912extern ARM64Instr* ARM64Instr_VLdStH  ( Bool isLoad, HReg sD, HReg rN,
913                                        UInt uimm12 /* 0 .. 8190, 0 % 2 */ );
914extern ARM64Instr* ARM64Instr_VLdStS  ( Bool isLoad, HReg sD, HReg rN,
915                                        UInt uimm12 /* 0 .. 16380, 0 % 4 */ );
916extern ARM64Instr* ARM64Instr_VLdStD  ( Bool isLoad, HReg dD, HReg rN,
917                                        UInt uimm12 /* 0 .. 32760, 0 % 8 */ );
918extern ARM64Instr* ARM64Instr_VLdStQ  ( Bool isLoad, HReg rQ, HReg rN );
919extern ARM64Instr* ARM64Instr_VCvtI2F ( ARM64CvtOp how, HReg rD, HReg rS );
920extern ARM64Instr* ARM64Instr_VCvtF2I ( ARM64CvtOp how, HReg rD, HReg rS,
921                                        UChar armRM );
922extern ARM64Instr* ARM64Instr_VCvtSD  ( Bool sToD, HReg dst, HReg src );
923extern ARM64Instr* ARM64Instr_VCvtHS  ( Bool hToS, HReg dst, HReg src );
924extern ARM64Instr* ARM64Instr_VCvtHD  ( Bool hToD, HReg dst, HReg src );
925extern ARM64Instr* ARM64Instr_VUnaryD ( ARM64FpUnaryOp op, HReg dst, HReg src );
926extern ARM64Instr* ARM64Instr_VUnaryS ( ARM64FpUnaryOp op, HReg dst, HReg src );
927extern ARM64Instr* ARM64Instr_VBinD   ( ARM64FpBinOp op, HReg, HReg, HReg );
928extern ARM64Instr* ARM64Instr_VBinS   ( ARM64FpBinOp op, HReg, HReg, HReg );
929extern ARM64Instr* ARM64Instr_VCmpD   ( HReg argL, HReg argR );
930extern ARM64Instr* ARM64Instr_VCmpS   ( HReg argL, HReg argR );
931extern ARM64Instr* ARM64Instr_VFCSel  ( HReg dst, HReg argL, HReg argR,
932                                        ARM64CondCode cond, Bool isD );
933extern ARM64Instr* ARM64Instr_FPCR    ( Bool toFPCR, HReg iReg );
934extern ARM64Instr* ARM64Instr_FPSR    ( Bool toFPSR, HReg iReg );
935extern ARM64Instr* ARM64Instr_VBinV   ( ARM64VecBinOp op, HReg, HReg, HReg );
936extern ARM64Instr* ARM64Instr_VModifyV ( ARM64VecModifyOp, HReg, HReg );
937extern ARM64Instr* ARM64Instr_VUnaryV ( ARM64VecUnaryOp op, HReg, HReg );
938extern ARM64Instr* ARM64Instr_VNarrowV ( ARM64VecNarrowOp op, UInt dszBlg2,
939                                         HReg dst, HReg src );
940extern ARM64Instr* ARM64Instr_VShiftImmV ( ARM64VecShiftImmOp op,
941                                           HReg dst, HReg src, UInt amt );
942extern ARM64Instr* ARM64Instr_VExtV   ( HReg dst,
943                                        HReg srcLo, HReg srcHi, UInt amtB );
944extern ARM64Instr* ARM64Instr_VImmQ   ( HReg, UShort );
945extern ARM64Instr* ARM64Instr_VDfromX ( HReg rD, HReg rX );
946extern ARM64Instr* ARM64Instr_VQfromX ( HReg rQ, HReg rXlo );
947extern ARM64Instr* ARM64Instr_VQfromXX( HReg rQ, HReg rXhi, HReg rXlo );
948extern ARM64Instr* ARM64Instr_VXfromQ ( HReg rX, HReg rQ, UInt laneNo );
949extern ARM64Instr* ARM64Instr_VXfromDorS ( HReg rX, HReg rDorS, Bool fromD );
950extern ARM64Instr* ARM64Instr_VMov    ( UInt szB, HReg dst, HReg src );
951
952extern ARM64Instr* ARM64Instr_EvCheck ( ARM64AMode* amCounter,
953                                        ARM64AMode* amFailAddr );
954extern ARM64Instr* ARM64Instr_ProfInc ( void );
955
956extern void ppARM64Instr ( const ARM64Instr* );
957
958
959/* Some functions that insulate the register allocator from details
960   of the underlying instruction set. */
961extern void getRegUsage_ARM64Instr ( HRegUsage*, const ARM64Instr*, Bool );
962extern void mapRegs_ARM64Instr     ( HRegRemap*, ARM64Instr*, Bool );
963extern Bool isMove_ARM64Instr      ( const ARM64Instr*, HReg*, HReg* );
964extern Int  emit_ARM64Instr        ( /*MB_MOD*/Bool* is_profInc,
965                                     UChar* buf, Int nbuf, const ARM64Instr* i,
966                                     Bool mode64,
967                                     VexEndness endness_host,
968                                     const void* disp_cp_chain_me_to_slowEP,
969                                     const void* disp_cp_chain_me_to_fastEP,
970                                     const void* disp_cp_xindir,
971                                     const void* disp_cp_xassisted );
972
973extern void genSpill_ARM64  ( /*OUT*/HInstr** i1, /*OUT*/HInstr** i2,
974                              HReg rreg, Int offset, Bool );
975extern void genReload_ARM64 ( /*OUT*/HInstr** i1, /*OUT*/HInstr** i2,
976                              HReg rreg, Int offset, Bool );
977
978extern const RRegUniverse* getRRegUniverse_ARM64 ( void );
979
980extern HInstrArray* iselSB_ARM64 ( const IRSB*,
981                                   VexArch,
982                                   const VexArchInfo*,
983                                   const VexAbiInfo*,
984                                   Int offs_Host_EvC_Counter,
985                                   Int offs_Host_EvC_FailAddr,
986                                   Bool chainingAllowed,
987                                   Bool addProfInc,
988                                   Addr max_ga );
989
990/* How big is an event check?  This is kind of a kludge because it
991   depends on the offsets of host_EvC_FAILADDR and
992   host_EvC_COUNTER. */
993extern Int evCheckSzB_ARM64 (void);
994
995/* Perform a chaining and unchaining of an XDirect jump. */
996extern VexInvalRange chainXDirect_ARM64 ( VexEndness endness_host,
997                                          void* place_to_chain,
998                                          const void* disp_cp_chain_me_EXPECTED,
999                                          const void* place_to_jump_to );
1000
1001extern VexInvalRange unchainXDirect_ARM64 ( VexEndness endness_host,
1002                                            void* place_to_unchain,
1003                                            const void* place_to_jump_to_EXPECTED,
1004                                            const void* disp_cp_chain_me );
1005
1006/* Patch the counter location into an existing ProfInc point. */
1007extern VexInvalRange patchProfInc_ARM64 ( VexEndness endness_host,
1008                                          void*  place_to_patch,
1009                                          const ULong* location_of_counter );
1010
1011
1012#endif /* ndef __VEX_HOST_ARM64_DEFS_H */
1013
1014/*---------------------------------------------------------------*/
1015/*--- end                                   host_arm64_defs.h ---*/
1016/*---------------------------------------------------------------*/
1017