1# CS_ARCH_ARM, CS_MODE_ARM, None
20xb1,0x09,0x40,0xf2 = vmul.i8 d16, d16, d17
30xb1,0x09,0x50,0xf2 = vmul.i16 d16, d16, d17
40xb1,0x09,0x60,0xf2 = vmul.i32 d16, d16, d17
50xb1,0x0d,0x40,0xf3 = vmul.f32 d16, d16, d17
60xf2,0x09,0x40,0xf2 = vmul.i8 q8, q8, q9
70xf2,0x09,0x50,0xf2 = vmul.i16 q8, q8, q9
80xf2,0x09,0x60,0xf2 = vmul.i32 q8, q8, q9
90xf2,0x0d,0x40,0xf3 = vmul.f32 q8, q8, q9
100xb1,0x09,0x40,0xf3 = vmul.p8 d16, d16, d17
110xf2,0x09,0x40,0xf3 = vmul.p8 q8, q8, q9
120x68,0x28,0xd8,0xf2 = vmul.i16 d18, d8, d0[3]
130xb1,0x09,0x40,0xf2 = vmul.i8 d16, d16, d17
140xb1,0x09,0x50,0xf2 = vmul.i16 d16, d16, d17
150xb1,0x09,0x60,0xf2 = vmul.i32 d16, d16, d17
160xb1,0x0d,0x40,0xf3 = vmul.f32 d16, d16, d17
170xf2,0x09,0x40,0xf2 = vmul.i8 q8, q8, q9
180xf2,0x09,0x50,0xf2 = vmul.i16 q8, q8, q9
190xf2,0x09,0x60,0xf2 = vmul.i32 q8, q8, q9
200xf2,0x0d,0x40,0xf3 = vmul.f32 q8, q8, q9
210xb1,0x09,0x40,0xf3 = vmul.p8 d16, d16, d17
220xf2,0x09,0x40,0xf3 = vmul.p8 q8, q8, q9
230xa1,0x0b,0x50,0xf2 = vqdmulh.s16 d16, d16, d17
240xa1,0x0b,0x60,0xf2 = vqdmulh.s32 d16, d16, d17
250xe2,0x0b,0x50,0xf2 = vqdmulh.s16 q8, q8, q9
260xe2,0x0b,0x60,0xf2 = vqdmulh.s32 q8, q8, q9
270xa1,0x0b,0x50,0xf2 = vqdmulh.s16 d16, d16, d17
280xa1,0x0b,0x60,0xf2 = vqdmulh.s32 d16, d16, d17
290xe2,0x0b,0x50,0xf2 = vqdmulh.s16 q8, q8, q9
300xe2,0x0b,0x60,0xf2 = vqdmulh.s32 q8, q8, q9
310x43,0xbc,0x92,0xf2 = vqdmulh.s16 d11, d2, d3[0]
320xa1,0x0b,0x50,0xf3 = vqrdmulh.s16 d16, d16, d17
330xa1,0x0b,0x60,0xf3 = vqrdmulh.s32 d16, d16, d17
340xe2,0x0b,0x50,0xf3 = vqrdmulh.s16 q8, q8, q9
350xe2,0x0b,0x60,0xf3 = vqrdmulh.s32 q8, q8, q9
360xa1,0x0c,0xc0,0xf2 = vmull.s8 q8, d16, d17
370xa1,0x0c,0xd0,0xf2 = vmull.s16 q8, d16, d17
380xa1,0x0c,0xe0,0xf2 = vmull.s32 q8, d16, d17
390xa1,0x0c,0xc0,0xf3 = vmull.u8 q8, d16, d17
400xa1,0x0c,0xd0,0xf3 = vmull.u16 q8, d16, d17
410xa1,0x0c,0xe0,0xf3 = vmull.u32 q8, d16, d17
420xa1,0x0e,0xc0,0xf2 = vmull.p8 q8, d16, d17
430xa1,0x0d,0xd0,0xf2 = vqdmull.s16 q8, d16, d17
440xa1,0x0d,0xe0,0xf2 = vqdmull.s32 q8, d16, d17
450x64,0x08,0x90,0xf2 = vmul.i16 d0, d0, d4[2]
460x6f,0x18,0x91,0xf2 = vmul.i16 d1, d1, d7[3]
470x49,0x28,0x92,0xf2 = vmul.i16 d2, d2, d1[1]
480x42,0x38,0xa3,0xf2 = vmul.i32 d3, d3, d2[0]
490x63,0x48,0xa4,0xf2 = vmul.i32 d4, d4, d3[1]
500x44,0x58,0xa5,0xf2 = vmul.i32 d5, d5, d4[0]
510x65,0x69,0xa6,0xf2 = vmul.f32 d6, d6, d5[1]
520x64,0x08,0x90,0xf3 = vmul.i16 q0, q0, d4[2]
530x6f,0x28,0x92,0xf3 = vmul.i16 q1, q1, d7[3]
540x49,0x48,0x94,0xf3 = vmul.i16 q2, q2, d1[1]
550x42,0x68,0xa6,0xf3 = vmul.i32 q3, q3, d2[0]
560x63,0x88,0xa8,0xf3 = vmul.i32 q4, q4, d3[1]
570x44,0xa8,0xaa,0xf3 = vmul.i32 q5, q5, d4[0]
580x65,0xc9,0xac,0xf3 = vmul.f32 q6, q6, d5[1]
590x64,0x98,0x90,0xf2 = vmul.i16 d9, d0, d4[2]
600x6f,0x88,0x91,0xf2 = vmul.i16 d8, d1, d7[3]
610x49,0x78,0x92,0xf2 = vmul.i16 d7, d2, d1[1]
620x42,0x68,0xa3,0xf2 = vmul.i32 d6, d3, d2[0]
630x63,0x58,0xa4,0xf2 = vmul.i32 d5, d4, d3[1]
640x44,0x48,0xa5,0xf2 = vmul.i32 d4, d5, d4[0]
650x65,0x39,0xa6,0xf2 = vmul.f32 d3, d6, d5[1]
660x64,0x28,0xd0,0xf3 = vmul.i16 q9, q0, d4[2]
670x6f,0x08,0xd2,0xf3 = vmul.i16 q8, q1, d7[3]
680x49,0xe8,0x94,0xf3 = vmul.i16 q7, q2, d1[1]
690x42,0xc8,0xa6,0xf3 = vmul.i32 q6, q3, d2[0]
700x63,0xa8,0xa8,0xf3 = vmul.i32 q5, q4, d3[1]
710x44,0x88,0xaa,0xf3 = vmul.i32 q4, q5, d4[0]
720x65,0x69,0xac,0xf3 = vmul.f32 q3, q6, d5[1]
73