1# CS_ARCH_X86, CS_MODE_32, CS_OPT_SYNTAX_ATT
20xc5,0xca,0x58,0xd4 = vaddss %xmm4, %xmm6, %xmm2
30xc5,0xca,0x59,0xd4 = vmulss %xmm4, %xmm6, %xmm2
40xc5,0xca,0x5c,0xd4 = vsubss %xmm4, %xmm6, %xmm2
50xc5,0xca,0x5e,0xd4 = vdivss %xmm4, %xmm6, %xmm2
60xc5,0xcb,0x58,0xd4 = vaddsd %xmm4, %xmm6, %xmm2
70xc5,0xcb,0x59,0xd4 = vmulsd %xmm4, %xmm6, %xmm2
80xc5,0xcb,0x5c,0xd4 = vsubsd %xmm4, %xmm6, %xmm2
90xc5,0xcb,0x5e,0xd4 = vdivsd %xmm4, %xmm6, %xmm2
100xc5,0xea,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddss 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
110xc5,0xea,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubss 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
120xc5,0xea,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulss 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
130xc5,0xea,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivss 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
140xc5,0xeb,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddsd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
150xc5,0xeb,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubsd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
160xc5,0xeb,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulsd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
170xc5,0xeb,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivsd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
180xc5,0xc8,0x58,0xd4 = vaddps %xmm4, %xmm6, %xmm2
190xc5,0xc8,0x5c,0xd4 = vsubps %xmm4, %xmm6, %xmm2
200xc5,0xc8,0x59,0xd4 = vmulps %xmm4, %xmm6, %xmm2
210xc5,0xc8,0x5e,0xd4 = vdivps %xmm4, %xmm6, %xmm2
220xc5,0xc9,0x58,0xd4 = vaddpd %xmm4, %xmm6, %xmm2
230xc5,0xc9,0x5c,0xd4 = vsubpd %xmm4, %xmm6, %xmm2
240xc5,0xc9,0x59,0xd4 = vmulpd %xmm4, %xmm6, %xmm2
250xc5,0xc9,0x5e,0xd4 = vdivpd %xmm4, %xmm6, %xmm2
260xc5,0xe8,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddps 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
270xc5,0xe8,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubps 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
280xc5,0xe8,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulps 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
290xc5,0xe8,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivps 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
300xc5,0xe9,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddpd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
310xc5,0xe9,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubpd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
320xc5,0xe9,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulpd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
330xc5,0xe9,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivpd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
340xc5,0xda,0x5f,0xf2 = vmaxss %xmm2, %xmm4, %xmm6
350xc5,0xdb,0x5f,0xf2 = vmaxsd %xmm2, %xmm4, %xmm6
360xc5,0xda,0x5d,0xf2 = vminss %xmm2, %xmm4, %xmm6
370xc5,0xdb,0x5d,0xf2 = vminsd %xmm2, %xmm4, %xmm6
380xc5,0xea,0x5f,0x6c,0xcb,0xfc = vmaxss -4(%ebx,%ecx,8), %xmm2, %xmm5
390xc5,0xeb,0x5f,0x6c,0xcb,0xfc = vmaxsd -4(%ebx,%ecx,8), %xmm2, %xmm5
400xc5,0xea,0x5d,0x6c,0xcb,0xfc = vminss -4(%ebx,%ecx,8), %xmm2, %xmm5
410xc5,0xeb,0x5d,0x6c,0xcb,0xfc = vminsd -4(%ebx,%ecx,8), %xmm2, %xmm5
420xc5,0xd8,0x5f,0xf2 = vmaxps %xmm2, %xmm4, %xmm6
430xc5,0xd9,0x5f,0xf2 = vmaxpd %xmm2, %xmm4, %xmm6
440xc5,0xd8,0x5d,0xf2 = vminps %xmm2, %xmm4, %xmm6
450xc5,0xd9,0x5d,0xf2 = vminpd %xmm2, %xmm4, %xmm6
460xc5,0xe8,0x5f,0x6c,0xcb,0xfc = vmaxps -4(%ebx,%ecx,8), %xmm2, %xmm5
470xc5,0xe9,0x5f,0x6c,0xcb,0xfc = vmaxpd -4(%ebx,%ecx,8), %xmm2, %xmm5
480xc5,0xe8,0x5d,0x6c,0xcb,0xfc = vminps -4(%ebx,%ecx,8), %xmm2, %xmm5
490xc5,0xe9,0x5d,0x6c,0xcb,0xfc = vminpd -4(%ebx,%ecx,8), %xmm2, %xmm5
500xc5,0xd8,0x54,0xf2 = vandps %xmm2, %xmm4, %xmm6
510xc5,0xd9,0x54,0xf2 = vandpd %xmm2, %xmm4, %xmm6
520xc5,0xe8,0x54,0x6c,0xcb,0xfc = vandps -4(%ebx,%ecx,8), %xmm2, %xmm5
530xc5,0xe9,0x54,0x6c,0xcb,0xfc = vandpd -4(%ebx,%ecx,8), %xmm2, %xmm5
540xc5,0xd8,0x56,0xf2 = vorps %xmm2, %xmm4, %xmm6
550xc5,0xd9,0x56,0xf2 = vorpd %xmm2, %xmm4, %xmm6
560xc5,0xe8,0x56,0x6c,0xcb,0xfc = vorps -4(%ebx,%ecx,8), %xmm2, %xmm5
570xc5,0xe9,0x56,0x6c,0xcb,0xfc = vorpd -4(%ebx,%ecx,8), %xmm2, %xmm5
580xc5,0xd8,0x57,0xf2 = vxorps %xmm2, %xmm4, %xmm6
590xc5,0xd9,0x57,0xf2 = vxorpd %xmm2, %xmm4, %xmm6
600xc5,0xe8,0x57,0x6c,0xcb,0xfc = vxorps -4(%ebx,%ecx,8), %xmm2, %xmm5
610xc5,0xe9,0x57,0x6c,0xcb,0xfc = vxorpd -4(%ebx,%ecx,8), %xmm2, %xmm5
620xc5,0xd8,0x55,0xf2 = vandnps %xmm2, %xmm4, %xmm6
630xc5,0xd9,0x55,0xf2 = vandnpd %xmm2, %xmm4, %xmm6
640xc5,0xe8,0x55,0x6c,0xcb,0xfc = vandnps -4(%ebx,%ecx,8), %xmm2, %xmm5
650xc5,0xe9,0x55,0x6c,0xcb,0xfc = vandnpd -4(%ebx,%ecx,8), %xmm2, %xmm5
660xc5,0xfa,0x10,0x6c,0xcb,0xfc = vmovss -4(%ebx,%ecx,8), %xmm5
670xc5,0xea,0x10,0xec = vmovss %xmm4, %xmm2, %xmm5
680xc5,0xfb,0x10,0x6c,0xcb,0xfc = vmovsd -4(%ebx,%ecx,8), %xmm5
690xc5,0xeb,0x10,0xec = vmovsd %xmm4, %xmm2, %xmm5
700xc5,0xe8,0x15,0xe1 = vunpckhps %xmm1, %xmm2, %xmm4
710xc5,0xe9,0x15,0xe1 = vunpckhpd %xmm1, %xmm2, %xmm4
720xc5,0xe8,0x14,0xe1 = vunpcklps %xmm1, %xmm2, %xmm4
730xc5,0xe9,0x14,0xe1 = vunpcklpd %xmm1, %xmm2, %xmm4
740xc5,0xe8,0x15,0x6c,0xcb,0xfc = vunpckhps -4(%ebx,%ecx,8), %xmm2, %xmm5
750xc5,0xe9,0x15,0x6c,0xcb,0xfc = vunpckhpd -4(%ebx,%ecx,8), %xmm2, %xmm5
760xc5,0xe8,0x14,0x6c,0xcb,0xfc = vunpcklps -4(%ebx,%ecx,8), %xmm2, %xmm5
770xc5,0xe9,0x14,0x6c,0xcb,0xfc = vunpcklpd -4(%ebx,%ecx,8), %xmm2, %xmm5
780xc5,0xc8,0xc2,0xc8,0x00 = vcmpps $0, %xmm0, %xmm6, %xmm1
790xc5,0xc8,0xc2,0x08,0x00 = vcmpps $0, (%eax), %xmm6, %xmm1
800xc5,0xc8,0xc2,0xc8,0x07 = vcmpps $7, %xmm0, %xmm6, %xmm1
810xc5,0xc9,0xc2,0xc8,0x00 = vcmppd $0, %xmm0, %xmm6, %xmm1
820xc5,0xc9,0xc2,0x08,0x00 = vcmppd $0, (%eax), %xmm6, %xmm1
830xc5,0xc9,0xc2,0xc8,0x07 = vcmppd $7, %xmm0, %xmm6, %xmm1
840xc5,0xe8,0xc6,0xd9,0x08 = vshufps $8, %xmm1, %xmm2, %xmm3
850xc5,0xe8,0xc6,0x5c,0xcb,0xfc,0x08 = vshufps $8, -4(%ebx,%ecx,8), %xmm2, %xmm3
860xc5,0xe9,0xc6,0xd9,0x08 = vshufpd $8, %xmm1, %xmm2, %xmm3
870xc5,0xe9,0xc6,0x5c,0xcb,0xfc,0x08 = vshufpd $8, -4(%ebx,%ecx,8), %xmm2, %xmm3
880xc5,0xe8,0xc2,0xd9,0x00 = vcmpeqps %xmm1, %xmm2, %xmm3
890xc5,0xe8,0xc2,0xd9,0x02 = vcmpleps %xmm1, %xmm2, %xmm3
900xc5,0xe8,0xc2,0xd9,0x01 = vcmpltps %xmm1, %xmm2, %xmm3
910xc5,0xe8,0xc2,0xd9,0x04 = vcmpneqps %xmm1, %xmm2, %xmm3
920xc5,0xe8,0xc2,0xd9,0x06 = vcmpnleps %xmm1, %xmm2, %xmm3
930xc5,0xe8,0xc2,0xd9,0x05 = vcmpnltps %xmm1, %xmm2, %xmm3
940xc5,0xe8,0xc2,0xd9,0x07 = vcmpordps %xmm1, %xmm2, %xmm3
950xc5,0xe8,0xc2,0xd9,0x03 = vcmpunordps %xmm1, %xmm2, %xmm3
960xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqps -4(%ebx,%ecx,8), %xmm2, %xmm3
970xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x02 = vcmpleps -4(%ebx,%ecx,8), %xmm2, %xmm3
980xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltps -4(%ebx,%ecx,8), %xmm2, %xmm3
990xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqps -4(%ebx,%ecx,8), %xmm2, %xmm3
1000xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnleps -4(%ebx,%ecx,8), %xmm2, %xmm3
1010xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltps -4(%ebx,%ecx,8), %xmm2, %xmm3
1020xc5,0xc8,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordps -4(%ebx,%ecx,8), %xmm6, %xmm2
1030xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordps -4(%ebx,%ecx,8), %xmm2, %xmm3
1040xc5,0xe9,0xc2,0xd9,0x00 = vcmpeqpd %xmm1, %xmm2, %xmm3
1050xc5,0xe9,0xc2,0xd9,0x02 = vcmplepd %xmm1, %xmm2, %xmm3
1060xc5,0xe9,0xc2,0xd9,0x01 = vcmpltpd %xmm1, %xmm2, %xmm3
1070xc5,0xe9,0xc2,0xd9,0x04 = vcmpneqpd %xmm1, %xmm2, %xmm3
1080xc5,0xe9,0xc2,0xd9,0x06 = vcmpnlepd %xmm1, %xmm2, %xmm3
1090xc5,0xe9,0xc2,0xd9,0x05 = vcmpnltpd %xmm1, %xmm2, %xmm3
1100xc5,0xe9,0xc2,0xd9,0x07 = vcmpordpd %xmm1, %xmm2, %xmm3
1110xc5,0xe9,0xc2,0xd9,0x03 = vcmpunordpd %xmm1, %xmm2, %xmm3
1120xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqpd -4(%ebx,%ecx,8), %xmm2, %xmm3
1130xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x02 = vcmplepd -4(%ebx,%ecx,8), %xmm2, %xmm3
1140xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltpd -4(%ebx,%ecx,8), %xmm2, %xmm3
1150xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqpd -4(%ebx,%ecx,8), %xmm2, %xmm3
1160xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnlepd -4(%ebx,%ecx,8), %xmm2, %xmm3
1170xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltpd -4(%ebx,%ecx,8), %xmm2, %xmm3
1180xc5,0xc9,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordpd -4(%ebx,%ecx,8), %xmm6, %xmm2
1190xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordpd -4(%ebx,%ecx,8), %xmm2, %xmm3
1200xc5,0xf8,0x50,0xc2 = vmovmskps %xmm2, %eax
1210xc5,0xf9,0x50,0xc2 = vmovmskpd %xmm2, %eax
1220xc5,0xfc,0x50,0xc2 = vmovmskps %ymm2, %eax
1230xc5,0xfd,0x50,0xc2 = vmovmskpd %ymm2, %eax
1240xc5,0xea,0xc2,0xd9,0x00 = vcmpeqss %xmm1, %xmm2, %xmm3
1250xc5,0xea,0xc2,0xd9,0x02 = vcmpless %xmm1, %xmm2, %xmm3
1260xc5,0xea,0xc2,0xd9,0x01 = vcmpltss %xmm1, %xmm2, %xmm3
1270xc5,0xea,0xc2,0xd9,0x04 = vcmpneqss %xmm1, %xmm2, %xmm3
1280xc5,0xea,0xc2,0xd9,0x06 = vcmpnless %xmm1, %xmm2, %xmm3
1290xc5,0xea,0xc2,0xd9,0x05 = vcmpnltss %xmm1, %xmm2, %xmm3
1300xc5,0xea,0xc2,0xd9,0x07 = vcmpordss %xmm1, %xmm2, %xmm3
1310xc5,0xea,0xc2,0xd9,0x03 = vcmpunordss %xmm1, %xmm2, %xmm3
1320xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqss -4(%ebx,%ecx,8), %xmm2, %xmm3
1330xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x02 = vcmpless -4(%ebx,%ecx,8), %xmm2, %xmm3
1340xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltss -4(%ebx,%ecx,8), %xmm2, %xmm3
1350xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqss -4(%ebx,%ecx,8), %xmm2, %xmm3
1360xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnless -4(%ebx,%ecx,8), %xmm2, %xmm3
1370xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltss -4(%ebx,%ecx,8), %xmm2, %xmm3
1380xc5,0xca,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordss -4(%ebx,%ecx,8), %xmm6, %xmm2
1390xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordss -4(%ebx,%ecx,8), %xmm2, %xmm3
1400xc5,0xeb,0xc2,0xd9,0x00 = vcmpeqsd %xmm1, %xmm2, %xmm3
1410xc5,0xeb,0xc2,0xd9,0x02 = vcmplesd %xmm1, %xmm2, %xmm3
1420xc5,0xeb,0xc2,0xd9,0x01 = vcmpltsd %xmm1, %xmm2, %xmm3
1430xc5,0xeb,0xc2,0xd9,0x04 = vcmpneqsd %xmm1, %xmm2, %xmm3
1440xc5,0xeb,0xc2,0xd9,0x06 = vcmpnlesd %xmm1, %xmm2, %xmm3
1450xc5,0xeb,0xc2,0xd9,0x05 = vcmpnltsd %xmm1, %xmm2, %xmm3
1460xc5,0xeb,0xc2,0xd9,0x07 = vcmpordsd %xmm1, %xmm2, %xmm3
1470xc5,0xeb,0xc2,0xd9,0x03 = vcmpunordsd %xmm1, %xmm2, %xmm3
1480xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqsd -4(%ebx,%ecx,8), %xmm2, %xmm3
1490xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x02 = vcmplesd -4(%ebx,%ecx,8), %xmm2, %xmm3
1500xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltsd -4(%ebx,%ecx,8), %xmm2, %xmm3
1510xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqsd -4(%ebx,%ecx,8), %xmm2, %xmm3
1520xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnlesd -4(%ebx,%ecx,8), %xmm2, %xmm3
1530xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltsd -4(%ebx,%ecx,8), %xmm2, %xmm3
1540xc5,0xcb,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordsd -4(%ebx,%ecx,8), %xmm6, %xmm2
1550xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordsd -4(%ebx,%ecx,8), %xmm2, %xmm3
1560xc5,0xf8,0x2e,0xd1 = vucomiss %xmm1, %xmm2
1570xc5,0xf8,0x2e,0x10 = vucomiss (%eax), %xmm2
1580xc5,0xf8,0x2f,0xd1 = vcomiss %xmm1, %xmm2
1590xc5,0xf8,0x2f,0x10 = vcomiss (%eax), %xmm2
1600xc5,0xf9,0x2e,0xd1 = vucomisd %xmm1, %xmm2
1610xc5,0xf9,0x2e,0x10 = vucomisd (%eax), %xmm2
1620xc5,0xf9,0x2f,0xd1 = vcomisd %xmm1, %xmm2
1630xc5,0xf9,0x2f,0x10 = vcomisd (%eax), %xmm2
1640xc5,0xfa,0x2c,0xc1 = vcvttss2si %xmm1, %eax
1650xc5,0xfa,0x2c,0x01 = vcvttss2si (%ecx), %eax
1660xc5,0xf2,0x2a,0x10 = vcvtsi2ss (%eax), %xmm1, %xmm2
1670xc5,0xf2,0x2a,0x10 = vcvtsi2ss (%eax), %xmm1, %xmm2
1680xc5,0xf2,0x2a,0x10 = vcvtsi2ssl (%eax), %xmm1, %xmm2
1690xc5,0xf2,0x2a,0x10 = vcvtsi2ssl (%eax), %xmm1, %xmm2
1700xc5,0xfb,0x2c,0xc1 = vcvttsd2si %xmm1, %eax
1710xc5,0xfb,0x2c,0x01 = vcvttsd2si (%ecx), %eax
1720xc5,0xf3,0x2a,0x10 = vcvtsi2sd (%eax), %xmm1, %xmm2
1730xc5,0xf3,0x2a,0x10 = vcvtsi2sd (%eax), %xmm1, %xmm2
1740xc5,0xf3,0x2a,0x10 = vcvtsi2sdl (%eax), %xmm1, %xmm2
1750xc5,0xf3,0x2a,0x10 = vcvtsi2sdl (%eax), %xmm1, %xmm2
1760xc5,0xf8,0x28,0x10 = vmovaps (%eax), %xmm2
1770xc5,0xf8,0x28,0xd1 = vmovaps %xmm1, %xmm2
1780xc5,0xf8,0x29,0x08 = vmovaps %xmm1, (%eax)
1790xc5,0xf9,0x28,0x10 = vmovapd (%eax), %xmm2
1800xc5,0xf9,0x28,0xd1 = vmovapd %xmm1, %xmm2
1810xc5,0xf9,0x29,0x08 = vmovapd %xmm1, (%eax)
1820xc5,0xf8,0x10,0x10 = vmovups (%eax), %xmm2
1830xc5,0xf8,0x10,0xd1 = vmovups %xmm1, %xmm2
1840xc5,0xf8,0x11,0x08 = vmovups %xmm1, (%eax)
1850xc5,0xf9,0x10,0x10 = vmovupd (%eax), %xmm2
1860xc5,0xf9,0x10,0xd1 = vmovupd %xmm1, %xmm2
1870xc5,0xf9,0x11,0x08 = vmovupd %xmm1, (%eax)
1880xc5,0xf8,0x13,0x08 = vmovlps %xmm1, (%eax)
1890xc5,0xe8,0x12,0x18 = vmovlps (%eax), %xmm2, %xmm3
1900xc5,0xf9,0x13,0x08 = vmovlpd %xmm1, (%eax)
1910xc5,0xe9,0x12,0x18 = vmovlpd (%eax), %xmm2, %xmm3
1920xc5,0xf8,0x17,0x08 = vmovhps %xmm1, (%eax)
1930xc5,0xe8,0x16,0x18 = vmovhps (%eax), %xmm2, %xmm3
1940xc5,0xf9,0x17,0x08 = vmovhpd %xmm1, (%eax)
1950xc5,0xe9,0x16,0x18 = vmovhpd (%eax), %xmm2, %xmm3
1960xc5,0xe8,0x16,0xd9 = vmovlhps %xmm1, %xmm2, %xmm3
1970xc5,0xe8,0x12,0xd9 = vmovhlps %xmm1, %xmm2, %xmm3
1980xc5,0xfa,0x2d,0xc1 = vcvtss2si %xmm1, %eax
1990xc5,0xfa,0x2d,0x18 = vcvtss2si (%eax), %ebx
2000xc5,0xfa,0x2d,0xc1 = vcvtss2sil %xmm1, %eax
2010xc5,0xfa,0x2d,0x18 = vcvtss2sil (%eax), %ebx
2020xc5,0xf8,0x5b,0xf5 = vcvtdq2ps %xmm5, %xmm6
2030xc5,0xf8,0x5b,0x30 = vcvtdq2ps (%eax), %xmm6
2040xc5,0xdb,0x5a,0xf2 = vcvtsd2ss %xmm2, %xmm4, %xmm6
2050xc5,0xdb,0x5a,0x30 = vcvtsd2ss (%eax), %xmm4, %xmm6
2060xc5,0xf9,0x5b,0xda = vcvtps2dq %xmm2, %xmm3
2070xc5,0xf9,0x5b,0x18 = vcvtps2dq (%eax), %xmm3
2080xc5,0xda,0x5a,0xf2 = vcvtss2sd %xmm2, %xmm4, %xmm6
2090xc5,0xda,0x5a,0x30 = vcvtss2sd (%eax), %xmm4, %xmm6
2100xc5,0xf8,0x5b,0xf4 = vcvtdq2ps %xmm4, %xmm6
2110xc5,0xf8,0x5b,0x21 = vcvtdq2ps (%ecx), %xmm4
2120xc5,0xfa,0x5b,0xda = vcvttps2dq %xmm2, %xmm3
2130xc5,0xfa,0x5b,0x18 = vcvttps2dq (%eax), %xmm3
2140xc5,0xf8,0x5a,0xda = vcvtps2pd %xmm2, %xmm3
2150xc5,0xf8,0x5a,0x18 = vcvtps2pd (%eax), %xmm3
2160xc5,0xf9,0x5a,0xda = vcvtpd2ps %xmm2, %xmm3
2170xc5,0xf9,0x51,0xd1 = vsqrtpd %xmm1, %xmm2
2180xc5,0xf9,0x51,0x10 = vsqrtpd (%eax), %xmm2
2190xc5,0xf8,0x51,0xd1 = vsqrtps %xmm1, %xmm2
2200xc5,0xf8,0x51,0x10 = vsqrtps (%eax), %xmm2
2210xc5,0xeb,0x51,0xd9 = vsqrtsd %xmm1, %xmm2, %xmm3
2220xc5,0xeb,0x51,0x18 = vsqrtsd (%eax), %xmm2, %xmm3
2230xc5,0xea,0x51,0xd9 = vsqrtss %xmm1, %xmm2, %xmm3
2240xc5,0xea,0x51,0x18 = vsqrtss (%eax), %xmm2, %xmm3
2250xc5,0xf8,0x52,0xd1 = vrsqrtps %xmm1, %xmm2
2260xc5,0xf8,0x52,0x10 = vrsqrtps (%eax), %xmm2
2270xc5,0xea,0x52,0xd9 = vrsqrtss %xmm1, %xmm2, %xmm3
2280xc5,0xea,0x52,0x18 = vrsqrtss (%eax), %xmm2, %xmm3
2290xc5,0xf8,0x53,0xd1 = vrcpps %xmm1, %xmm2
2300xc5,0xf8,0x53,0x10 = vrcpps (%eax), %xmm2
2310xc5,0xea,0x53,0xd9 = vrcpss %xmm1, %xmm2, %xmm3
2320xc5,0xea,0x53,0x18 = vrcpss (%eax), %xmm2, %xmm3
2330xc5,0xf9,0xe7,0x08 = vmovntdq %xmm1, (%eax)
2340xc5,0xf9,0x2b,0x08 = vmovntpd %xmm1, (%eax)
2350xc5,0xf8,0x2b,0x08 = vmovntps %xmm1, (%eax)
2360xc5,0xf8,0xae,0x10 = vldmxcsr (%eax)
2370xc5,0xf8,0xae,0x18 = vstmxcsr (%eax)
2380xc5,0xf8,0xae,0x15,0xef,0xbe,0xad,0xde = vldmxcsr 0xdeadbeef
2390xc5,0xf8,0xae,0x1d,0xef,0xbe,0xad,0xde = vstmxcsr 0xdeadbeef
2400xc5,0xe9,0xf8,0xd9 = vpsubb %xmm1, %xmm2, %xmm3
2410xc5,0xe9,0xf8,0x18 = vpsubb (%eax), %xmm2, %xmm3
2420xc5,0xe9,0xf9,0xd9 = vpsubw %xmm1, %xmm2, %xmm3
2430xc5,0xe9,0xf9,0x18 = vpsubw (%eax), %xmm2, %xmm3
2440xc5,0xe9,0xfa,0xd9 = vpsubd %xmm1, %xmm2, %xmm3
2450xc5,0xe9,0xfa,0x18 = vpsubd (%eax), %xmm2, %xmm3
2460xc5,0xe9,0xfb,0xd9 = vpsubq %xmm1, %xmm2, %xmm3
2470xc5,0xe9,0xfb,0x18 = vpsubq (%eax), %xmm2, %xmm3
2480xc5,0xe9,0xe8,0xd9 = vpsubsb %xmm1, %xmm2, %xmm3
2490xc5,0xe9,0xe8,0x18 = vpsubsb (%eax), %xmm2, %xmm3
2500xc5,0xe9,0xe9,0xd9 = vpsubsw %xmm1, %xmm2, %xmm3
2510xc5,0xe9,0xe9,0x18 = vpsubsw (%eax), %xmm2, %xmm3
2520xc5,0xe9,0xd8,0xd9 = vpsubusb %xmm1, %xmm2, %xmm3
2530xc5,0xe9,0xd8,0x18 = vpsubusb (%eax), %xmm2, %xmm3
2540xc5,0xe9,0xd9,0xd9 = vpsubusw %xmm1, %xmm2, %xmm3
2550xc5,0xe9,0xd9,0x18 = vpsubusw (%eax), %xmm2, %xmm3
2560xc5,0xe9,0xfc,0xd9 = vpaddb %xmm1, %xmm2, %xmm3
2570xc5,0xe9,0xfc,0x18 = vpaddb (%eax), %xmm2, %xmm3
2580xc5,0xe9,0xfd,0xd9 = vpaddw %xmm1, %xmm2, %xmm3
2590xc5,0xe9,0xfd,0x18 = vpaddw (%eax), %xmm2, %xmm3
2600xc5,0xe9,0xfe,0xd9 = vpaddd %xmm1, %xmm2, %xmm3
2610xc5,0xe9,0xfe,0x18 = vpaddd (%eax), %xmm2, %xmm3
2620xc5,0xe9,0xd4,0xd9 = vpaddq %xmm1, %xmm2, %xmm3
2630xc5,0xe9,0xd4,0x18 = vpaddq (%eax), %xmm2, %xmm3
2640xc5,0xe9,0xec,0xd9 = vpaddsb %xmm1, %xmm2, %xmm3
2650xc5,0xe9,0xec,0x18 = vpaddsb (%eax), %xmm2, %xmm3
2660xc5,0xe9,0xed,0xd9 = vpaddsw %xmm1, %xmm2, %xmm3
2670xc5,0xe9,0xed,0x18 = vpaddsw (%eax), %xmm2, %xmm3
2680xc5,0xe9,0xdc,0xd9 = vpaddusb %xmm1, %xmm2, %xmm3
2690xc5,0xe9,0xdc,0x18 = vpaddusb (%eax), %xmm2, %xmm3
2700xc5,0xe9,0xdd,0xd9 = vpaddusw %xmm1, %xmm2, %xmm3
2710xc5,0xe9,0xdd,0x18 = vpaddusw (%eax), %xmm2, %xmm3
2720xc5,0xe9,0xe4,0xd9 = vpmulhuw %xmm1, %xmm2, %xmm3
2730xc5,0xe9,0xe4,0x18 = vpmulhuw (%eax), %xmm2, %xmm3
2740xc5,0xe9,0xe5,0xd9 = vpmulhw %xmm1, %xmm2, %xmm3
2750xc5,0xe9,0xe5,0x18 = vpmulhw (%eax), %xmm2, %xmm3
2760xc5,0xe9,0xd5,0xd9 = vpmullw %xmm1, %xmm2, %xmm3
2770xc5,0xe9,0xd5,0x18 = vpmullw (%eax), %xmm2, %xmm3
2780xc5,0xe9,0xf4,0xd9 = vpmuludq %xmm1, %xmm2, %xmm3
2790xc5,0xe9,0xf4,0x18 = vpmuludq (%eax), %xmm2, %xmm3
2800xc5,0xe9,0xe0,0xd9 = vpavgb %xmm1, %xmm2, %xmm3
2810xc5,0xe9,0xe0,0x18 = vpavgb (%eax), %xmm2, %xmm3
2820xc5,0xe9,0xe3,0xd9 = vpavgw %xmm1, %xmm2, %xmm3
2830xc5,0xe9,0xe3,0x18 = vpavgw (%eax), %xmm2, %xmm3
2840xc5,0xe9,0xea,0xd9 = vpminsw %xmm1, %xmm2, %xmm3
2850xc5,0xe9,0xea,0x18 = vpminsw (%eax), %xmm2, %xmm3
2860xc5,0xe9,0xda,0xd9 = vpminub %xmm1, %xmm2, %xmm3
2870xc5,0xe9,0xda,0x18 = vpminub (%eax), %xmm2, %xmm3
2880xc5,0xe9,0xee,0xd9 = vpmaxsw %xmm1, %xmm2, %xmm3
2890xc5,0xe9,0xee,0x18 = vpmaxsw (%eax), %xmm2, %xmm3
2900xc5,0xe9,0xde,0xd9 = vpmaxub %xmm1, %xmm2, %xmm3
2910xc5,0xe9,0xde,0x18 = vpmaxub (%eax), %xmm2, %xmm3
2920xc5,0xe9,0xf6,0xd9 = vpsadbw %xmm1, %xmm2, %xmm3
2930xc5,0xe9,0xf6,0x18 = vpsadbw (%eax), %xmm2, %xmm3
2940xc5,0xe9,0xf1,0xd9 = vpsllw %xmm1, %xmm2, %xmm3
2950xc5,0xe9,0xf1,0x18 = vpsllw (%eax), %xmm2, %xmm3
2960xc5,0xe9,0xf2,0xd9 = vpslld %xmm1, %xmm2, %xmm3
2970xc5,0xe9,0xf2,0x18 = vpslld (%eax), %xmm2, %xmm3
2980xc5,0xe9,0xf3,0xd9 = vpsllq %xmm1, %xmm2, %xmm3
2990xc5,0xe9,0xf3,0x18 = vpsllq (%eax), %xmm2, %xmm3
3000xc5,0xe9,0xe1,0xd9 = vpsraw %xmm1, %xmm2, %xmm3
3010xc5,0xe9,0xe1,0x18 = vpsraw (%eax), %xmm2, %xmm3
3020xc5,0xe9,0xe2,0xd9 = vpsrad %xmm1, %xmm2, %xmm3
3030xc5,0xe9,0xe2,0x18 = vpsrad (%eax), %xmm2, %xmm3
3040xc5,0xe9,0xd1,0xd9 = vpsrlw %xmm1, %xmm2, %xmm3
3050xc5,0xe9,0xd1,0x18 = vpsrlw (%eax), %xmm2, %xmm3
3060xc5,0xe9,0xd2,0xd9 = vpsrld %xmm1, %xmm2, %xmm3
3070xc5,0xe9,0xd2,0x18 = vpsrld (%eax), %xmm2, %xmm3
3080xc5,0xe9,0xd3,0xd9 = vpsrlq %xmm1, %xmm2, %xmm3
3090xc5,0xe9,0xd3,0x18 = vpsrlq (%eax), %xmm2, %xmm3
3100xc5,0xe1,0x72,0xf2,0x0a = vpslld $10, %xmm2, %xmm3
3110xc5,0xe1,0x73,0xfa,0x0a = vpslldq $10, %xmm2, %xmm3
3120xc5,0xe1,0x73,0xf2,0x0a = vpsllq $10, %xmm2, %xmm3
3130xc5,0xe1,0x71,0xf2,0x0a = vpsllw $10, %xmm2, %xmm3
3140xc5,0xe1,0x72,0xe2,0x0a = vpsrad $10, %xmm2, %xmm3
3150xc5,0xe1,0x71,0xe2,0x0a = vpsraw $10, %xmm2, %xmm3
3160xc5,0xe1,0x72,0xd2,0x0a = vpsrld $10, %xmm2, %xmm3
3170xc5,0xe1,0x73,0xda,0x0a = vpsrldq $10, %xmm2, %xmm3
3180xc5,0xe1,0x73,0xd2,0x0a = vpsrlq $10, %xmm2, %xmm3
3190xc5,0xe1,0x71,0xd2,0x0a = vpsrlw $10, %xmm2, %xmm3
3200xc5,0xe1,0x72,0xf2,0x0a = vpslld $10, %xmm2, %xmm3
3210xc5,0xe9,0xdb,0xd9 = vpand %xmm1, %xmm2, %xmm3
3220xc5,0xe9,0xdb,0x18 = vpand (%eax), %xmm2, %xmm3
3230xc5,0xe9,0xeb,0xd9 = vpor %xmm1, %xmm2, %xmm3
3240xc5,0xe9,0xeb,0x18 = vpor (%eax), %xmm2, %xmm3
3250xc5,0xe9,0xef,0xd9 = vpxor %xmm1, %xmm2, %xmm3
3260xc5,0xe9,0xef,0x18 = vpxor (%eax), %xmm2, %xmm3
3270xc5,0xe9,0xdf,0xd9 = vpandn %xmm1, %xmm2, %xmm3
3280xc5,0xe9,0xdf,0x18 = vpandn (%eax), %xmm2, %xmm3
3290xc5,0xe9,0x74,0xd9 = vpcmpeqb %xmm1, %xmm2, %xmm3
3300xc5,0xe9,0x74,0x18 = vpcmpeqb (%eax), %xmm2, %xmm3
3310xc5,0xe9,0x75,0xd9 = vpcmpeqw %xmm1, %xmm2, %xmm3
3320xc5,0xe9,0x75,0x18 = vpcmpeqw (%eax), %xmm2, %xmm3
3330xc5,0xe9,0x76,0xd9 = vpcmpeqd %xmm1, %xmm2, %xmm3
3340xc5,0xe9,0x76,0x18 = vpcmpeqd (%eax), %xmm2, %xmm3
3350xc5,0xe9,0x64,0xd9 = vpcmpgtb %xmm1, %xmm2, %xmm3
3360xc5,0xe9,0x64,0x18 = vpcmpgtb (%eax), %xmm2, %xmm3
3370xc5,0xe9,0x65,0xd9 = vpcmpgtw %xmm1, %xmm2, %xmm3
3380xc5,0xe9,0x65,0x18 = vpcmpgtw (%eax), %xmm2, %xmm3
3390xc5,0xe9,0x66,0xd9 = vpcmpgtd %xmm1, %xmm2, %xmm3
3400xc5,0xe9,0x66,0x18 = vpcmpgtd (%eax), %xmm2, %xmm3
3410xc5,0xe9,0x63,0xd9 = vpacksswb %xmm1, %xmm2, %xmm3
3420xc5,0xe9,0x63,0x18 = vpacksswb (%eax), %xmm2, %xmm3
3430xc5,0xe9,0x6b,0xd9 = vpackssdw %xmm1, %xmm2, %xmm3
3440xc5,0xe9,0x6b,0x18 = vpackssdw (%eax), %xmm2, %xmm3
3450xc5,0xe9,0x67,0xd9 = vpackuswb %xmm1, %xmm2, %xmm3
3460xc5,0xe9,0x67,0x18 = vpackuswb (%eax), %xmm2, %xmm3
3470xc5,0xf9,0x70,0xda,0x04 = vpshufd $4, %xmm2, %xmm3
3480xc5,0xf9,0x70,0x18,0x04 = vpshufd $4, (%eax), %xmm3
3490xc5,0xfa,0x70,0xda,0x04 = vpshufhw $4, %xmm2, %xmm3
3500xc5,0xfa,0x70,0x18,0x04 = vpshufhw $4, (%eax), %xmm3
3510xc5,0xfb,0x70,0xda,0x04 = vpshuflw $4, %xmm2, %xmm3
3520xc5,0xfb,0x70,0x18,0x04 = vpshuflw $4, (%eax), %xmm3
3530xc5,0xe9,0x60,0xd9 = vpunpcklbw %xmm1, %xmm2, %xmm3
3540xc5,0xe9,0x60,0x18 = vpunpcklbw (%eax), %xmm2, %xmm3
3550xc5,0xe9,0x61,0xd9 = vpunpcklwd %xmm1, %xmm2, %xmm3
3560xc5,0xe9,0x61,0x18 = vpunpcklwd (%eax), %xmm2, %xmm3
3570xc5,0xe9,0x62,0xd9 = vpunpckldq %xmm1, %xmm2, %xmm3
3580xc5,0xe9,0x62,0x18 = vpunpckldq (%eax), %xmm2, %xmm3
3590xc5,0xe9,0x6c,0xd9 = vpunpcklqdq %xmm1, %xmm2, %xmm3
3600xc5,0xe9,0x6c,0x18 = vpunpcklqdq (%eax), %xmm2, %xmm3
3610xc5,0xe9,0x68,0xd9 = vpunpckhbw %xmm1, %xmm2, %xmm3
3620xc5,0xe9,0x68,0x18 = vpunpckhbw (%eax), %xmm2, %xmm3
3630xc5,0xe9,0x69,0xd9 = vpunpckhwd %xmm1, %xmm2, %xmm3
3640xc5,0xe9,0x69,0x18 = vpunpckhwd (%eax), %xmm2, %xmm3
3650xc5,0xe9,0x6a,0xd9 = vpunpckhdq %xmm1, %xmm2, %xmm3
3660xc5,0xe9,0x6a,0x18 = vpunpckhdq (%eax), %xmm2, %xmm3
3670xc5,0xe9,0x6d,0xd9 = vpunpckhqdq %xmm1, %xmm2, %xmm3
3680xc5,0xe9,0x6d,0x18 = vpunpckhqdq (%eax), %xmm2, %xmm3
3690xc5,0xe9,0xc4,0xd8,0x07 = vpinsrw $7, %eax, %xmm2, %xmm3
3700xc5,0xe9,0xc4,0x18,0x07 = vpinsrw $7, (%eax), %xmm2, %xmm3
3710xc5,0xf9,0xc5,0xc2,0x07 = vpextrw $7, %xmm2, %eax
3720xc5,0xf9,0xd7,0xc1 = vpmovmskb %xmm1, %eax
3730xc5,0xf9,0xf7,0xd1 = vmaskmovdqu %xmm1, %xmm2
3740xc5,0xf9,0x7e,0xc8 = vmovd %xmm1, %eax
3750xc5,0xf9,0x7e,0x08 = vmovd %xmm1, (%eax)
3760xc5,0xf9,0x6e,0xc8 = vmovd %eax, %xmm1
3770xc5,0xf9,0x6e,0x08 = vmovd (%eax), %xmm1
3780xc5,0xf9,0xd6,0x08 = vmovq %xmm1, (%eax)
3790xc5,0xfa,0x7e,0xd1 = vmovq %xmm1, %xmm2
3800xc5,0xfa,0x7e,0x08 = vmovq (%eax), %xmm1
3810xc5,0xfb,0xe6,0xd1 = vcvtpd2dq %xmm1, %xmm2
3820xc5,0xfa,0xe6,0xd1 = vcvtdq2pd %xmm1, %xmm2
3830xc5,0xfa,0xe6,0x10 = vcvtdq2pd (%eax), %xmm2
3840xc5,0xfa,0x16,0xd1 = vmovshdup %xmm1, %xmm2
3850xc5,0xfa,0x16,0x10 = vmovshdup (%eax), %xmm2
3860xc5,0xfa,0x12,0xd1 = vmovsldup %xmm1, %xmm2
3870xc5,0xfa,0x12,0x10 = vmovsldup (%eax), %xmm2
3880xc5,0xfb,0x12,0xd1 = vmovddup %xmm1, %xmm2
3890xc5,0xfb,0x12,0x10 = vmovddup (%eax), %xmm2
3900xc5,0xeb,0xd0,0xd9 = vaddsubps %xmm1, %xmm2, %xmm3
3910xc5,0xf3,0xd0,0x10 = vaddsubps (%eax), %xmm1, %xmm2
3920xc5,0xe9,0xd0,0xd9 = vaddsubpd %xmm1, %xmm2, %xmm3
3930xc5,0xf1,0xd0,0x10 = vaddsubpd (%eax), %xmm1, %xmm2
3940xc5,0xeb,0x7c,0xd9 = vhaddps %xmm1, %xmm2, %xmm3
3950xc5,0xeb,0x7c,0x18 = vhaddps (%eax), %xmm2, %xmm3
3960xc5,0xe9,0x7c,0xd9 = vhaddpd %xmm1, %xmm2, %xmm3
3970xc5,0xe9,0x7c,0x18 = vhaddpd (%eax), %xmm2, %xmm3
3980xc5,0xeb,0x7d,0xd9 = vhsubps %xmm1, %xmm2, %xmm3
3990xc5,0xeb,0x7d,0x18 = vhsubps (%eax), %xmm2, %xmm3
4000xc5,0xe9,0x7d,0xd9 = vhsubpd %xmm1, %xmm2, %xmm3
4010xc5,0xe9,0x7d,0x18 = vhsubpd (%eax), %xmm2, %xmm3
4020xc4,0xe2,0x79,0x1c,0xd1 = vpabsb %xmm1, %xmm2
4030xc4,0xe2,0x79,0x1c,0x10 = vpabsb (%eax), %xmm2
4040xc4,0xe2,0x79,0x1d,0xd1 = vpabsw %xmm1, %xmm2
4050xc4,0xe2,0x79,0x1d,0x10 = vpabsw (%eax), %xmm2
4060xc4,0xe2,0x79,0x1e,0xd1 = vpabsd %xmm1, %xmm2
4070xc4,0xe2,0x79,0x1e,0x10 = vpabsd (%eax), %xmm2
4080xc4,0xe2,0x69,0x01,0xd9 = vphaddw %xmm1, %xmm2, %xmm3
4090xc4,0xe2,0x69,0x01,0x18 = vphaddw (%eax), %xmm2, %xmm3
4100xc4,0xe2,0x69,0x02,0xd9 = vphaddd %xmm1, %xmm2, %xmm3
4110xc4,0xe2,0x69,0x02,0x18 = vphaddd (%eax), %xmm2, %xmm3
4120xc4,0xe2,0x69,0x03,0xd9 = vphaddsw %xmm1, %xmm2, %xmm3
4130xc4,0xe2,0x69,0x03,0x18 = vphaddsw (%eax), %xmm2, %xmm3
4140xc4,0xe2,0x69,0x05,0xd9 = vphsubw %xmm1, %xmm2, %xmm3
4150xc4,0xe2,0x69,0x05,0x18 = vphsubw (%eax), %xmm2, %xmm3
4160xc4,0xe2,0x69,0x06,0xd9 = vphsubd %xmm1, %xmm2, %xmm3
4170xc4,0xe2,0x69,0x06,0x18 = vphsubd (%eax), %xmm2, %xmm3
4180xc4,0xe2,0x69,0x07,0xd9 = vphsubsw %xmm1, %xmm2, %xmm3
4190xc4,0xe2,0x69,0x07,0x18 = vphsubsw (%eax), %xmm2, %xmm3
4200xc4,0xe2,0x69,0x04,0xd9 = vpmaddubsw %xmm1, %xmm2, %xmm3
4210xc4,0xe2,0x69,0x04,0x18 = vpmaddubsw (%eax), %xmm2, %xmm3
4220xc4,0xe2,0x69,0x00,0xd9 = vpshufb %xmm1, %xmm2, %xmm3
4230xc4,0xe2,0x69,0x00,0x18 = vpshufb (%eax), %xmm2, %xmm3
4240xc4,0xe2,0x69,0x08,0xd9 = vpsignb %xmm1, %xmm2, %xmm3
4250xc4,0xe2,0x69,0x08,0x18 = vpsignb (%eax), %xmm2, %xmm3
4260xc4,0xe2,0x69,0x09,0xd9 = vpsignw %xmm1, %xmm2, %xmm3
4270xc4,0xe2,0x69,0x09,0x18 = vpsignw (%eax), %xmm2, %xmm3
4280xc4,0xe2,0x69,0x0a,0xd9 = vpsignd %xmm1, %xmm2, %xmm3
4290xc4,0xe2,0x69,0x0a,0x18 = vpsignd (%eax), %xmm2, %xmm3
4300xc4,0xe2,0x69,0x0b,0xd9 = vpmulhrsw %xmm1, %xmm2, %xmm3
4310xc4,0xe2,0x69,0x0b,0x18 = vpmulhrsw (%eax), %xmm2, %xmm3
4320xc4,0xe3,0x69,0x0f,0xd9,0x07 = vpalignr $7, %xmm1, %xmm2, %xmm3
4330xc4,0xe3,0x69,0x0f,0x18,0x07 = vpalignr $7, (%eax), %xmm2, %xmm3
4340xc4,0xe3,0x69,0x0b,0xd9,0x07 = vroundsd $7, %xmm1, %xmm2, %xmm3
4350xc4,0xe3,0x69,0x0b,0x18,0x07 = vroundsd $7, (%eax), %xmm2, %xmm3
4360xc4,0xe3,0x69,0x0a,0xd9,0x07 = vroundss $7, %xmm1, %xmm2, %xmm3
4370xc4,0xe3,0x69,0x0a,0x18,0x07 = vroundss $7, (%eax), %xmm2, %xmm3
4380xc4,0xe3,0x79,0x09,0xda,0x07 = vroundpd $7, %xmm2, %xmm3
4390xc4,0xe3,0x79,0x09,0x18,0x07 = vroundpd $7, (%eax), %xmm3
4400xc4,0xe3,0x79,0x08,0xda,0x07 = vroundps $7, %xmm2, %xmm3
4410xc4,0xe3,0x79,0x08,0x18,0x07 = vroundps $7, (%eax), %xmm3
4420xc4,0xe2,0x79,0x41,0xda = vphminposuw %xmm2, %xmm3
4430xc4,0xe2,0x79,0x41,0x10 = vphminposuw (%eax), %xmm2
4440xc4,0xe2,0x61,0x2b,0xca = vpackusdw %xmm2, %xmm3, %xmm1
4450xc4,0xe2,0x69,0x2b,0x18 = vpackusdw (%eax), %xmm2, %xmm3
4460xc4,0xe2,0x61,0x29,0xca = vpcmpeqq %xmm2, %xmm3, %xmm1
4470xc4,0xe2,0x69,0x29,0x18 = vpcmpeqq (%eax), %xmm2, %xmm3
4480xc4,0xe2,0x61,0x38,0xca = vpminsb %xmm2, %xmm3, %xmm1
4490xc4,0xe2,0x69,0x38,0x18 = vpminsb (%eax), %xmm2, %xmm3
4500xc4,0xe2,0x61,0x39,0xca = vpminsd %xmm2, %xmm3, %xmm1
4510xc4,0xe2,0x69,0x39,0x18 = vpminsd (%eax), %xmm2, %xmm3
4520xc4,0xe2,0x61,0x3b,0xca = vpminud %xmm2, %xmm3, %xmm1
4530xc4,0xe2,0x69,0x3b,0x18 = vpminud (%eax), %xmm2, %xmm3
4540xc4,0xe2,0x61,0x3a,0xca = vpminuw %xmm2, %xmm3, %xmm1
4550xc4,0xe2,0x69,0x3a,0x18 = vpminuw (%eax), %xmm2, %xmm3
4560xc4,0xe2,0x61,0x3c,0xca = vpmaxsb %xmm2, %xmm3, %xmm1
4570xc4,0xe2,0x69,0x3c,0x18 = vpmaxsb (%eax), %xmm2, %xmm3
4580xc4,0xe2,0x61,0x3d,0xca = vpmaxsd %xmm2, %xmm3, %xmm1
4590xc4,0xe2,0x69,0x3d,0x18 = vpmaxsd (%eax), %xmm2, %xmm3
4600xc4,0xe2,0x61,0x3f,0xca = vpmaxud %xmm2, %xmm3, %xmm1
4610xc4,0xe2,0x69,0x3f,0x18 = vpmaxud (%eax), %xmm2, %xmm3
4620xc4,0xe2,0x61,0x3e,0xca = vpmaxuw %xmm2, %xmm3, %xmm1
4630xc4,0xe2,0x69,0x3e,0x18 = vpmaxuw (%eax), %xmm2, %xmm3
4640xc4,0xe2,0x61,0x28,0xca = vpmuldq %xmm2, %xmm3, %xmm1
4650xc4,0xe2,0x69,0x28,0x18 = vpmuldq (%eax), %xmm2, %xmm3
4660xc4,0xe2,0x51,0x40,0xca = vpmulld %xmm2, %xmm5, %xmm1
4670xc4,0xe2,0x51,0x40,0x18 = vpmulld (%eax), %xmm5, %xmm3
4680xc4,0xe3,0x51,0x0c,0xca,0x03 = vblendps $3, %xmm2, %xmm5, %xmm1
4690xc4,0xe3,0x51,0x0c,0x08,0x03 = vblendps $3, (%eax), %xmm5, %xmm1
4700xc4,0xe3,0x51,0x0d,0xca,0x03 = vblendpd $3, %xmm2, %xmm5, %xmm1
4710xc4,0xe3,0x51,0x0d,0x08,0x03 = vblendpd $3, (%eax), %xmm5, %xmm1
4720xc4,0xe3,0x51,0x0e,0xca,0x03 = vpblendw $3, %xmm2, %xmm5, %xmm1
4730xc4,0xe3,0x51,0x0e,0x08,0x03 = vpblendw $3, (%eax), %xmm5, %xmm1
4740xc4,0xe3,0x51,0x42,0xca,0x03 = vmpsadbw $3, %xmm2, %xmm5, %xmm1
4750xc4,0xe3,0x51,0x42,0x08,0x03 = vmpsadbw $3, (%eax), %xmm5, %xmm1
4760xc4,0xe3,0x51,0x40,0xca,0x03 = vdpps $3, %xmm2, %xmm5, %xmm1
4770xc4,0xe3,0x51,0x40,0x08,0x03 = vdpps $3, (%eax), %xmm5, %xmm1
4780xc4,0xe3,0x51,0x41,0xca,0x03 = vdppd $3, %xmm2, %xmm5, %xmm1
4790xc4,0xe3,0x51,0x41,0x08,0x03 = vdppd $3, (%eax), %xmm5, %xmm1
4800xc4,0xe3,0x71,0x4b,0xdd,0x20 = vblendvpd %xmm2, %xmm5, %xmm1, %xmm3
4810xc4,0xe3,0x71,0x4b,0x18,0x20 = vblendvpd %xmm2, (%eax), %xmm1, %xmm3
4820xc4,0xe3,0x71,0x4a,0xdd,0x20 = vblendvps %xmm2, %xmm5, %xmm1, %xmm3
4830xc4,0xe3,0x71,0x4a,0x18,0x20 = vblendvps %xmm2, (%eax), %xmm1, %xmm3
4840xc4,0xe3,0x71,0x4c,0xdd,0x20 = vpblendvb %xmm2, %xmm5, %xmm1, %xmm3
4850xc4,0xe3,0x71,0x4c,0x18,0x20 = vpblendvb %xmm2, (%eax), %xmm1, %xmm3
4860xc4,0xe2,0x79,0x20,0xea = vpmovsxbw %xmm2, %xmm5
4870xc4,0xe2,0x79,0x20,0x10 = vpmovsxbw (%eax), %xmm2
4880xc4,0xe2,0x79,0x23,0xea = vpmovsxwd %xmm2, %xmm5
4890xc4,0xe2,0x79,0x23,0x10 = vpmovsxwd (%eax), %xmm2
4900xc4,0xe2,0x79,0x25,0xea = vpmovsxdq %xmm2, %xmm5
4910xc4,0xe2,0x79,0x25,0x10 = vpmovsxdq (%eax), %xmm2
4920xc4,0xe2,0x79,0x30,0xea = vpmovzxbw %xmm2, %xmm5
4930xc4,0xe2,0x79,0x30,0x10 = vpmovzxbw (%eax), %xmm2
4940xc4,0xe2,0x79,0x33,0xea = vpmovzxwd %xmm2, %xmm5
4950xc4,0xe2,0x79,0x33,0x10 = vpmovzxwd (%eax), %xmm2
4960xc4,0xe2,0x79,0x35,0xea = vpmovzxdq %xmm2, %xmm5
4970xc4,0xe2,0x79,0x35,0x10 = vpmovzxdq (%eax), %xmm2
4980xc4,0xe2,0x79,0x22,0xea = vpmovsxbq %xmm2, %xmm5
4990xc4,0xe2,0x79,0x22,0x10 = vpmovsxbq (%eax), %xmm2
5000xc4,0xe2,0x79,0x32,0xea = vpmovzxbq %xmm2, %xmm5
5010xc4,0xe2,0x79,0x32,0x10 = vpmovzxbq (%eax), %xmm2
5020xc4,0xe2,0x79,0x21,0xea = vpmovsxbd %xmm2, %xmm5
5030xc4,0xe2,0x79,0x21,0x10 = vpmovsxbd (%eax), %xmm2
5040xc4,0xe2,0x79,0x24,0xea = vpmovsxwq %xmm2, %xmm5
5050xc4,0xe2,0x79,0x24,0x10 = vpmovsxwq (%eax), %xmm2
5060xc4,0xe2,0x79,0x31,0xea = vpmovzxbd %xmm2, %xmm5
5070xc4,0xe2,0x79,0x31,0x10 = vpmovzxbd (%eax), %xmm2
5080xc4,0xe2,0x79,0x34,0xea = vpmovzxwq %xmm2, %xmm5
5090xc4,0xe2,0x79,0x34,0x10 = vpmovzxwq (%eax), %xmm2
5100xc5,0xf9,0xc5,0xc2,0x07 = vpextrw $7, %xmm2, %eax
5110xc4,0xe3,0x79,0x15,0x10,0x07 = vpextrw $7, %xmm2, (%eax)
5120xc4,0xe3,0x79,0x16,0xd0,0x07 = vpextrd $7, %xmm2, %eax
5130xc4,0xe3,0x79,0x16,0x10,0x07 = vpextrd $7, %xmm2, (%eax)
5140xc4,0xe3,0x79,0x14,0xd0,0x07 = vpextrb $7, %xmm2, %eax
5150xc4,0xe3,0x79,0x14,0x10,0x07 = vpextrb $7, %xmm2, (%eax)
5160xc4,0xe3,0x79,0x17,0x10,0x07 = vextractps $7, %xmm2, (%eax)
5170xc4,0xe3,0x79,0x17,0xd0,0x07 = vextractps $7, %xmm2, %eax
5180xc5,0xe9,0xc4,0xe8,0x07 = vpinsrw $7, %eax, %xmm2, %xmm5
5190xc5,0xe9,0xc4,0x28,0x07 = vpinsrw $7, (%eax), %xmm2, %xmm5
5200xc4,0xe3,0x69,0x20,0xe8,0x07 = vpinsrb $7, %eax, %xmm2, %xmm5
5210xc4,0xe3,0x69,0x20,0x28,0x07 = vpinsrb $7, (%eax), %xmm2, %xmm5
5220xc4,0xe3,0x69,0x22,0xe8,0x07 = vpinsrd $7, %eax, %xmm2, %xmm5
5230xc4,0xe3,0x69,0x22,0x28,0x07 = vpinsrd $7, (%eax), %xmm2, %xmm5
5240xc4,0xe3,0x51,0x21,0xca,0x07 = vinsertps $7, %xmm2, %xmm5, %xmm1
5250xc4,0xe3,0x51,0x21,0x08,0x07 = vinsertps $7, (%eax), %xmm5, %xmm1
5260xc4,0xe2,0x79,0x17,0xea = vptest %xmm2, %xmm5
5270xc4,0xe2,0x79,0x17,0x10 = vptest (%eax), %xmm2
5280xc4,0xe2,0x79,0x2a,0x10 = vmovntdqa (%eax), %xmm2
5290xc4,0xe2,0x51,0x37,0xca = vpcmpgtq %xmm2, %xmm5, %xmm1
5300xc4,0xe2,0x51,0x37,0x18 = vpcmpgtq (%eax), %xmm5, %xmm3
5310xc4,0xe3,0x79,0x62,0xea,0x07 = vpcmpistrm $7, %xmm2, %xmm5
5320xc4,0xe3,0x79,0x62,0x28,0x07 = vpcmpistrm $7, (%eax), %xmm5
5330xc4,0xe3,0x79,0x60,0xea,0x07 = vpcmpestrm $7, %xmm2, %xmm5
5340xc4,0xe3,0x79,0x60,0x28,0x07 = vpcmpestrm $7, (%eax), %xmm5
5350xc4,0xe3,0x79,0x63,0xea,0x07 = vpcmpistri $7, %xmm2, %xmm5
5360xc4,0xe3,0x79,0x63,0x28,0x07 = vpcmpistri $7, (%eax), %xmm5
5370xc4,0xe3,0x79,0x61,0xea,0x07 = vpcmpestri $7, %xmm2, %xmm5
5380xc4,0xe3,0x79,0x61,0x28,0x07 = vpcmpestri $7, (%eax), %xmm5
5390xc4,0xe2,0x79,0xdb,0xea = vaesimc %xmm2, %xmm5
5400xc4,0xe2,0x79,0xdb,0x10 = vaesimc (%eax), %xmm2
5410xc4,0xe2,0x51,0xdc,0xca = vaesenc %xmm2, %xmm5, %xmm1
5420xc4,0xe2,0x51,0xdc,0x18 = vaesenc (%eax), %xmm5, %xmm3
5430xc4,0xe2,0x51,0xdd,0xca = vaesenclast %xmm2, %xmm5, %xmm1
5440xc4,0xe2,0x51,0xdd,0x18 = vaesenclast (%eax), %xmm5, %xmm3
5450xc4,0xe2,0x51,0xde,0xca = vaesdec %xmm2, %xmm5, %xmm1
5460xc4,0xe2,0x51,0xde,0x18 = vaesdec (%eax), %xmm5, %xmm3
5470xc4,0xe2,0x51,0xdf,0xca = vaesdeclast %xmm2, %xmm5, %xmm1
5480xc4,0xe2,0x51,0xdf,0x18 = vaesdeclast (%eax), %xmm5, %xmm3
5490xc4,0xe3,0x79,0xdf,0xea,0x07 = vaeskeygenassist $7, %xmm2, %xmm5
5500xc4,0xe3,0x79,0xdf,0x28,0x07 = vaeskeygenassist $7, (%eax), %xmm5
5510xc5,0xe8,0xc2,0xd9,0x08 = vcmpeq_uqps %xmm1, %xmm2, %xmm3
5520xc5,0xe8,0xc2,0xd9,0x09 = vcmpngeps %xmm1, %xmm2, %xmm3
5530xc5,0xe8,0xc2,0xd9,0x0a = vcmpngtps %xmm1, %xmm2, %xmm3
5540xc5,0xe8,0xc2,0xd9,0x0b = vcmpfalseps %xmm1, %xmm2, %xmm3
5550xc5,0xe8,0xc2,0xd9,0x0c = vcmpneq_oqps %xmm1, %xmm2, %xmm3
5560xc5,0xe8,0xc2,0xd9,0x0d = vcmpgeps %xmm1, %xmm2, %xmm3
5570xc5,0xe8,0xc2,0xd9,0x0e = vcmpgtps %xmm1, %xmm2, %xmm3
5580xc5,0xe8,0xc2,0xd9,0x0f = vcmptrueps %xmm1, %xmm2, %xmm3
5590xc5,0xe8,0xc2,0xd9,0x10 = vcmpeq_osps %xmm1, %xmm2, %xmm3
5600xc5,0xe8,0xc2,0xd9,0x11 = vcmplt_oqps %xmm1, %xmm2, %xmm3
5610xc5,0xe8,0xc2,0xd9,0x12 = vcmple_oqps %xmm1, %xmm2, %xmm3
5620xc5,0xe8,0xc2,0xd9,0x13 = vcmpunord_sps %xmm1, %xmm2, %xmm3
5630xc5,0xe8,0xc2,0xd9,0x14 = vcmpneq_usps %xmm1, %xmm2, %xmm3
5640xc5,0xe8,0xc2,0xd9,0x15 = vcmpnlt_uqps %xmm1, %xmm2, %xmm3
5650xc5,0xe8,0xc2,0xd9,0x16 = vcmpnle_uqps %xmm1, %xmm2, %xmm3
5660xc5,0xe8,0xc2,0xd9,0x17 = vcmpord_sps %xmm1, %xmm2, %xmm3
5670xc5,0xe8,0xc2,0xd9,0x18 = vcmpeq_usps %xmm1, %xmm2, %xmm3
5680xc5,0xe8,0xc2,0xd9,0x19 = vcmpnge_uqps %xmm1, %xmm2, %xmm3
5690xc5,0xe8,0xc2,0xd9,0x1a = vcmpngt_uqps %xmm1, %xmm2, %xmm3
5700xc5,0xe8,0xc2,0xd9,0x1b = vcmpfalse_osps %xmm1, %xmm2, %xmm3
5710xc5,0xe8,0xc2,0xd9,0x1c = vcmpneq_osps %xmm1, %xmm2, %xmm3
5720xc5,0xe8,0xc2,0xd9,0x1d = vcmpge_oqps %xmm1, %xmm2, %xmm3
5730xc5,0xe8,0xc2,0xd9,0x1e = vcmpgt_oqps %xmm1, %xmm2, %xmm3
5740xc5,0xe8,0xc2,0xd9,0x1f = vcmptrue_usps %xmm1, %xmm2, %xmm3
5750xc5,0xfc,0x28,0x10 = vmovaps (%eax), %ymm2
5760xc5,0xfc,0x28,0xd1 = vmovaps %ymm1, %ymm2
5770xc5,0xfc,0x29,0x08 = vmovaps %ymm1, (%eax)
5780xc5,0xfd,0x28,0x10 = vmovapd (%eax), %ymm2
5790xc5,0xfd,0x28,0xd1 = vmovapd %ymm1, %ymm2
5800xc5,0xfd,0x29,0x08 = vmovapd %ymm1, (%eax)
5810xc5,0xfc,0x10,0x10 = vmovups (%eax), %ymm2
5820xc5,0xfc,0x10,0xd1 = vmovups %ymm1, %ymm2
5830xc5,0xfc,0x11,0x08 = vmovups %ymm1, (%eax)
5840xc5,0xfd,0x10,0x10 = vmovupd (%eax), %ymm2
5850xc5,0xfd,0x10,0xd1 = vmovupd %ymm1, %ymm2
5860xc5,0xfd,0x11,0x08 = vmovupd %ymm1, (%eax)
5870xc5,0xec,0x15,0xe1 = vunpckhps %ymm1, %ymm2, %ymm4
5880xc5,0xed,0x15,0xe1 = vunpckhpd %ymm1, %ymm2, %ymm4
5890xc5,0xec,0x14,0xe1 = vunpcklps %ymm1, %ymm2, %ymm4
5900xc5,0xed,0x14,0xe1 = vunpcklpd %ymm1, %ymm2, %ymm4
5910xc5,0xec,0x15,0x6c,0xcb,0xfc = vunpckhps -4(%ebx,%ecx,8), %ymm2, %ymm5
5920xc5,0xed,0x15,0x6c,0xcb,0xfc = vunpckhpd -4(%ebx,%ecx,8), %ymm2, %ymm5
5930xc5,0xec,0x14,0x6c,0xcb,0xfc = vunpcklps -4(%ebx,%ecx,8), %ymm2, %ymm5
5940xc5,0xed,0x14,0x6c,0xcb,0xfc = vunpcklpd -4(%ebx,%ecx,8), %ymm2, %ymm5
5950xc5,0xfd,0xe7,0x08 = vmovntdq %ymm1, (%eax)
5960xc5,0xfd,0x2b,0x08 = vmovntpd %ymm1, (%eax)
5970xc5,0xfc,0x2b,0x08 = vmovntps %ymm1, (%eax)
5980xc5,0xf8,0x50,0xc2 = vmovmskps %xmm2, %eax
5990xc5,0xf9,0x50,0xc2 = vmovmskpd %xmm2, %eax
6000xc5,0xdc,0x5f,0xf2 = vmaxps %ymm2, %ymm4, %ymm6
6010xc5,0xdd,0x5f,0xf2 = vmaxpd %ymm2, %ymm4, %ymm6
6020xc5,0xdc,0x5d,0xf2 = vminps %ymm2, %ymm4, %ymm6
6030xc5,0xdd,0x5d,0xf2 = vminpd %ymm2, %ymm4, %ymm6
6040xc5,0xdc,0x5c,0xf2 = vsubps %ymm2, %ymm4, %ymm6
6050xc5,0xdd,0x5c,0xf2 = vsubpd %ymm2, %ymm4, %ymm6
6060xc5,0xdc,0x5e,0xf2 = vdivps %ymm2, %ymm4, %ymm6
6070xc5,0xdd,0x5e,0xf2 = vdivpd %ymm2, %ymm4, %ymm6
6080xc5,0xdc,0x58,0xf2 = vaddps %ymm2, %ymm4, %ymm6
6090xc5,0xdd,0x58,0xf2 = vaddpd %ymm2, %ymm4, %ymm6
6100xc5,0xdc,0x59,0xf2 = vmulps %ymm2, %ymm4, %ymm6
6110xc5,0xdd,0x59,0xf2 = vmulpd %ymm2, %ymm4, %ymm6
6120xc5,0xdc,0x5f,0x30 = vmaxps (%eax), %ymm4, %ymm6
6130xc5,0xdd,0x5f,0x30 = vmaxpd (%eax), %ymm4, %ymm6
6140xc5,0xdc,0x5d,0x30 = vminps (%eax), %ymm4, %ymm6
6150xc5,0xdd,0x5d,0x30 = vminpd (%eax), %ymm4, %ymm6
6160xc5,0xdc,0x5c,0x30 = vsubps (%eax), %ymm4, %ymm6
6170xc5,0xdd,0x5c,0x30 = vsubpd (%eax), %ymm4, %ymm6
6180xc5,0xdc,0x5e,0x30 = vdivps (%eax), %ymm4, %ymm6
6190xc5,0xdd,0x5e,0x30 = vdivpd (%eax), %ymm4, %ymm6
6200xc5,0xdc,0x58,0x30 = vaddps (%eax), %ymm4, %ymm6
6210xc5,0xdd,0x58,0x30 = vaddpd (%eax), %ymm4, %ymm6
6220xc5,0xdc,0x59,0x30 = vmulps (%eax), %ymm4, %ymm6
6230xc5,0xdd,0x59,0x30 = vmulpd (%eax), %ymm4, %ymm6
6240xc5,0xfd,0x51,0xd1 = vsqrtpd %ymm1, %ymm2
6250xc5,0xfd,0x51,0x10 = vsqrtpd (%eax), %ymm2
6260xc5,0xfc,0x51,0xd1 = vsqrtps %ymm1, %ymm2
6270xc5,0xfc,0x51,0x10 = vsqrtps (%eax), %ymm2
6280xc5,0xfc,0x52,0xd1 = vrsqrtps %ymm1, %ymm2
6290xc5,0xfc,0x52,0x10 = vrsqrtps (%eax), %ymm2
6300xc5,0xfc,0x53,0xd1 = vrcpps %ymm1, %ymm2
6310xc5,0xfc,0x53,0x10 = vrcpps (%eax), %ymm2
6320xc5,0xdc,0x54,0xf2 = vandps %ymm2, %ymm4, %ymm6
6330xc5,0xdd,0x54,0xf2 = vandpd %ymm2, %ymm4, %ymm6
6340xc5,0xec,0x54,0x6c,0xcb,0xfc = vandps -4(%ebx,%ecx,8), %ymm2, %ymm5
6350xc5,0xed,0x54,0x6c,0xcb,0xfc = vandpd -4(%ebx,%ecx,8), %ymm2, %ymm5
6360xc5,0xdc,0x56,0xf2 = vorps %ymm2, %ymm4, %ymm6
6370xc5,0xdd,0x56,0xf2 = vorpd %ymm2, %ymm4, %ymm6
6380xc5,0xec,0x56,0x6c,0xcb,0xfc = vorps -4(%ebx,%ecx,8), %ymm2, %ymm5
6390xc5,0xed,0x56,0x6c,0xcb,0xfc = vorpd -4(%ebx,%ecx,8), %ymm2, %ymm5
6400xc5,0xdc,0x57,0xf2 = vxorps %ymm2, %ymm4, %ymm6
6410xc5,0xdd,0x57,0xf2 = vxorpd %ymm2, %ymm4, %ymm6
6420xc5,0xec,0x57,0x6c,0xcb,0xfc = vxorps -4(%ebx,%ecx,8), %ymm2, %ymm5
6430xc5,0xed,0x57,0x6c,0xcb,0xfc = vxorpd -4(%ebx,%ecx,8), %ymm2, %ymm5
6440xc5,0xdc,0x55,0xf2 = vandnps %ymm2, %ymm4, %ymm6
6450xc5,0xdd,0x55,0xf2 = vandnpd %ymm2, %ymm4, %ymm6
6460xc5,0xec,0x55,0x6c,0xcb,0xfc = vandnps -4(%ebx,%ecx,8), %ymm2, %ymm5
6470xc5,0xed,0x55,0x6c,0xcb,0xfc = vandnpd -4(%ebx,%ecx,8), %ymm2, %ymm5
6480xc5,0xfc,0x5a,0xd3 = vcvtps2pd %xmm3, %ymm2
6490xc5,0xfc,0x5a,0x10 = vcvtps2pd (%eax), %ymm2
6500xc5,0xfe,0xe6,0xd3 = vcvtdq2pd %xmm3, %ymm2
6510xc5,0xfe,0xe6,0x10 = vcvtdq2pd (%eax), %ymm2
6520xc5,0xfc,0x5b,0xea = vcvtdq2ps %ymm2, %ymm5
6530xc5,0xfc,0x5b,0x10 = vcvtdq2ps (%eax), %ymm2
6540xc5,0xfd,0x5b,0xea = vcvtps2dq %ymm2, %ymm5
6550xc5,0xfd,0x5b,0x28 = vcvtps2dq (%eax), %ymm5
6560xc5,0xfe,0x5b,0xea = vcvttps2dq %ymm2, %ymm5
6570xc5,0xfe,0x5b,0x28 = vcvttps2dq (%eax), %ymm5
6580xc5,0xf9,0xe6,0xe9 = vcvttpd2dq %xmm1, %xmm5
6590xc5,0xfd,0xe6,0xea = vcvttpd2dq %ymm2, %xmm5
6600xc5,0xf9,0xe6,0xe9 = vcvttpd2dqx %xmm1, %xmm5
6610xc5,0xf9,0xe6,0x08 = vcvttpd2dqx (%eax), %xmm1
6620xc5,0xfd,0xe6,0xca = vcvttpd2dqy %ymm2, %xmm1
6630xc5,0xfd,0xe6,0x08 = vcvttpd2dqy (%eax), %xmm1
6640xc5,0xfd,0x5a,0xea = vcvtpd2ps %ymm2, %xmm5
6650xc5,0xf9,0x5a,0xe9 = vcvtpd2psx %xmm1, %xmm5
6660xc5,0xf9,0x5a,0x08 = vcvtpd2psx (%eax), %xmm1
6670xc5,0xfd,0x5a,0xca = vcvtpd2psy %ymm2, %xmm1
6680xc5,0xfd,0x5a,0x08 = vcvtpd2psy (%eax), %xmm1
6690xc5,0xff,0xe6,0xea = vcvtpd2dq %ymm2, %xmm5
6700xc5,0xff,0xe6,0xca = vcvtpd2dqy %ymm2, %xmm1
6710xc5,0xff,0xe6,0x08 = vcvtpd2dqy (%eax), %xmm1
6720xc5,0xfb,0xe6,0xe9 = vcvtpd2dqx %xmm1, %xmm5
6730xc5,0xfb,0xe6,0x08 = vcvtpd2dqx (%eax), %xmm1
6740xc5,0xec,0xc2,0xd9,0x00 = vcmpeqps %ymm1, %ymm2, %ymm3
6750xc5,0xec,0xc2,0xd9,0x02 = vcmpleps %ymm1, %ymm2, %ymm3
6760xc5,0xec,0xc2,0xd9,0x01 = vcmpltps %ymm1, %ymm2, %ymm3
6770xc5,0xec,0xc2,0xd9,0x04 = vcmpneqps %ymm1, %ymm2, %ymm3
6780xc5,0xec,0xc2,0xd9,0x06 = vcmpnleps %ymm1, %ymm2, %ymm3
6790xc5,0xec,0xc2,0xd9,0x05 = vcmpnltps %ymm1, %ymm2, %ymm3
6800xc5,0xec,0xc2,0xd9,0x07 = vcmpordps %ymm1, %ymm2, %ymm3
6810xc5,0xec,0xc2,0xd9,0x03 = vcmpunordps %ymm1, %ymm2, %ymm3
6820xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqps -4(%ebx,%ecx,8), %ymm2, %ymm3
6830xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x02 = vcmpleps -4(%ebx,%ecx,8), %ymm2, %ymm3
6840xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltps -4(%ebx,%ecx,8), %ymm2, %ymm3
6850xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqps -4(%ebx,%ecx,8), %ymm2, %ymm3
6860xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnleps -4(%ebx,%ecx,8), %ymm2, %ymm3
6870xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltps -4(%ebx,%ecx,8), %ymm2, %ymm3
6880xc5,0xcc,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordps -4(%ebx,%ecx,8), %ymm6, %ymm2
6890xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordps -4(%ebx,%ecx,8), %ymm2, %ymm3
6900xc5,0xed,0xc2,0xd9,0x00 = vcmpeqpd %ymm1, %ymm2, %ymm3
6910xc5,0xed,0xc2,0xd9,0x02 = vcmplepd %ymm1, %ymm2, %ymm3
6920xc5,0xed,0xc2,0xd9,0x01 = vcmpltpd %ymm1, %ymm2, %ymm3
6930xc5,0xed,0xc2,0xd9,0x04 = vcmpneqpd %ymm1, %ymm2, %ymm3
6940xc5,0xed,0xc2,0xd9,0x06 = vcmpnlepd %ymm1, %ymm2, %ymm3
6950xc5,0xed,0xc2,0xd9,0x05 = vcmpnltpd %ymm1, %ymm2, %ymm3
6960xc5,0xed,0xc2,0xd9,0x07 = vcmpordpd %ymm1, %ymm2, %ymm3
6970xc5,0xed,0xc2,0xd9,0x03 = vcmpunordpd %ymm1, %ymm2, %ymm3
6980xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqpd -4(%ebx,%ecx,8), %ymm2, %ymm3
6990xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x02 = vcmplepd -4(%ebx,%ecx,8), %ymm2, %ymm3
7000xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltpd -4(%ebx,%ecx,8), %ymm2, %ymm3
7010xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqpd -4(%ebx,%ecx,8), %ymm2, %ymm3
7020xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnlepd -4(%ebx,%ecx,8), %ymm2, %ymm3
7030xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltpd -4(%ebx,%ecx,8), %ymm2, %ymm3
7040xc5,0xcd,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordpd -4(%ebx,%ecx,8), %ymm6, %ymm2
7050xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordpd -4(%ebx,%ecx,8), %ymm2, %ymm3
7060xc5,0xec,0xc2,0xd9,0x08 = vcmpeq_uqps %ymm1, %ymm2, %ymm3
7070xc5,0xec,0xc2,0xd9,0x09 = vcmpngeps %ymm1, %ymm2, %ymm3
7080xc5,0xec,0xc2,0xd9,0x0a = vcmpngtps %ymm1, %ymm2, %ymm3
7090xc5,0xec,0xc2,0xd9,0x0b = vcmpfalseps %ymm1, %ymm2, %ymm3
7100xc5,0xec,0xc2,0xd9,0x0c = vcmpneq_oqps %ymm1, %ymm2, %ymm3
7110xc5,0xec,0xc2,0xd9,0x0d = vcmpgeps %ymm1, %ymm2, %ymm3
7120xc5,0xec,0xc2,0xd9,0x0e = vcmpgtps %ymm1, %ymm2, %ymm3
7130xc5,0xec,0xc2,0xd9,0x0f = vcmptrueps %ymm1, %ymm2, %ymm3
7140xc5,0xec,0xc2,0xd9,0x10 = vcmpeq_osps %ymm1, %ymm2, %ymm3
7150xc5,0xec,0xc2,0xd9,0x11 = vcmplt_oqps %ymm1, %ymm2, %ymm3
7160xc5,0xec,0xc2,0xd9,0x12 = vcmple_oqps %ymm1, %ymm2, %ymm3
7170xc5,0xec,0xc2,0xd9,0x13 = vcmpunord_sps %ymm1, %ymm2, %ymm3
7180xc5,0xec,0xc2,0xd9,0x14 = vcmpneq_usps %ymm1, %ymm2, %ymm3
7190xc5,0xec,0xc2,0xd9,0x15 = vcmpnlt_uqps %ymm1, %ymm2, %ymm3
7200xc5,0xec,0xc2,0xd9,0x16 = vcmpnle_uqps %ymm1, %ymm2, %ymm3
7210xc5,0xec,0xc2,0xd9,0x17 = vcmpord_sps %ymm1, %ymm2, %ymm3
7220xc5,0xec,0xc2,0xd9,0x18 = vcmpeq_usps %ymm1, %ymm2, %ymm3
7230xc5,0xec,0xc2,0xd9,0x19 = vcmpnge_uqps %ymm1, %ymm2, %ymm3
7240xc5,0xec,0xc2,0xd9,0x1a = vcmpngt_uqps %ymm1, %ymm2, %ymm3
7250xc5,0xec,0xc2,0xd9,0x1b = vcmpfalse_osps %ymm1, %ymm2, %ymm3
7260xc5,0xec,0xc2,0xd9,0x1c = vcmpneq_osps %ymm1, %ymm2, %ymm3
7270xc5,0xec,0xc2,0xd9,0x1d = vcmpge_oqps %ymm1, %ymm2, %ymm3
7280xc5,0xec,0xc2,0xd9,0x1e = vcmpgt_oqps %ymm1, %ymm2, %ymm3
7290xc5,0xec,0xc2,0xd9,0x1f = vcmptrue_usps %ymm1, %ymm2, %ymm3
7300xc5,0xef,0xd0,0xd9 = vaddsubps %ymm1, %ymm2, %ymm3
7310xc5,0xf7,0xd0,0x10 = vaddsubps (%eax), %ymm1, %ymm2
7320xc5,0xed,0xd0,0xd9 = vaddsubpd %ymm1, %ymm2, %ymm3
7330xc5,0xf5,0xd0,0x10 = vaddsubpd (%eax), %ymm1, %ymm2
7340xc5,0xef,0x7c,0xd9 = vhaddps %ymm1, %ymm2, %ymm3
7350xc5,0xef,0x7c,0x18 = vhaddps (%eax), %ymm2, %ymm3
7360xc5,0xed,0x7c,0xd9 = vhaddpd %ymm1, %ymm2, %ymm3
7370xc5,0xed,0x7c,0x18 = vhaddpd (%eax), %ymm2, %ymm3
7380xc5,0xef,0x7d,0xd9 = vhsubps %ymm1, %ymm2, %ymm3
7390xc5,0xef,0x7d,0x18 = vhsubps (%eax), %ymm2, %ymm3
7400xc5,0xed,0x7d,0xd9 = vhsubpd %ymm1, %ymm2, %ymm3
7410xc5,0xed,0x7d,0x18 = vhsubpd (%eax), %ymm2, %ymm3
7420xc4,0xe3,0x55,0x0c,0xca,0x03 = vblendps $3, %ymm2, %ymm5, %ymm1
7430xc4,0xe3,0x55,0x0c,0x08,0x03 = vblendps $3, (%eax), %ymm5, %ymm1
7440xc4,0xe3,0x55,0x0d,0xca,0x03 = vblendpd $3, %ymm2, %ymm5, %ymm1
7450xc4,0xe3,0x55,0x0d,0x08,0x03 = vblendpd $3, (%eax), %ymm5, %ymm1
7460xc4,0xe3,0x55,0x40,0xca,0x03 = vdpps $3, %ymm2, %ymm5, %ymm1
7470xc4,0xe3,0x55,0x40,0x08,0x03 = vdpps $3, (%eax), %ymm5, %ymm1
7480xc4,0xe2,0x7d,0x1a,0x10 = vbroadcastf128 (%eax), %ymm2
7490xc4,0xe2,0x7d,0x19,0x10 = vbroadcastsd (%eax), %ymm2
7500xc4,0xe2,0x79,0x18,0x10 = vbroadcastss (%eax), %xmm2
7510xc4,0xe2,0x7d,0x18,0x10 = vbroadcastss (%eax), %ymm2
7520xc4,0xe3,0x6d,0x18,0xea,0x07 = vinsertf128 $7, %xmm2, %ymm2, %ymm5
7530xc4,0xe3,0x6d,0x18,0x28,0x07 = vinsertf128 $7, (%eax), %ymm2, %ymm5
7540xc4,0xe3,0x7d,0x19,0xd2,0x07 = vextractf128 $7, %ymm2, %xmm2
7550xc4,0xe3,0x7d,0x19,0x10,0x07 = vextractf128 $7, %ymm2, (%eax)
7560xc4,0xe2,0x51,0x2f,0x10 = vmaskmovpd %xmm2, %xmm5, (%eax)
7570xc4,0xe2,0x55,0x2f,0x10 = vmaskmovpd %ymm2, %ymm5, (%eax)
7580xc4,0xe2,0x69,0x2d,0x28 = vmaskmovpd (%eax), %xmm2, %xmm5
7590xc4,0xe2,0x6d,0x2d,0x28 = vmaskmovpd (%eax), %ymm2, %ymm5
7600xc4,0xe2,0x51,0x2e,0x10 = vmaskmovps %xmm2, %xmm5, (%eax)
7610xc4,0xe2,0x55,0x2e,0x10 = vmaskmovps %ymm2, %ymm5, (%eax)
7620xc4,0xe2,0x69,0x2c,0x28 = vmaskmovps (%eax), %xmm2, %xmm5
7630xc4,0xe2,0x6d,0x2c,0x28 = vmaskmovps (%eax), %ymm2, %ymm5
7640xc4,0xe3,0x79,0x04,0xe9,0x07 = vpermilps $7, %xmm1, %xmm5
7650xc4,0xe3,0x7d,0x04,0xcd,0x07 = vpermilps $7, %ymm5, %ymm1
7660xc4,0xe3,0x79,0x04,0x28,0x07 = vpermilps $7, (%eax), %xmm5
7670xc4,0xe3,0x7d,0x04,0x28,0x07 = vpermilps $7, (%eax), %ymm5
7680xc4,0xe2,0x51,0x0c,0xc9 = vpermilps %xmm1, %xmm5, %xmm1
7690xc4,0xe2,0x55,0x0c,0xc9 = vpermilps %ymm1, %ymm5, %ymm1
7700xc4,0xe2,0x51,0x0c,0x18 = vpermilps (%eax), %xmm5, %xmm3
7710xc4,0xe2,0x55,0x0c,0x08 = vpermilps (%eax), %ymm5, %ymm1
7720xc4,0xe3,0x79,0x05,0xe9,0x07 = vpermilpd $7, %xmm1, %xmm5
7730xc4,0xe3,0x7d,0x05,0xcd,0x07 = vpermilpd $7, %ymm5, %ymm1
7740xc4,0xe3,0x79,0x05,0x28,0x07 = vpermilpd $7, (%eax), %xmm5
7750xc4,0xe3,0x7d,0x05,0x28,0x07 = vpermilpd $7, (%eax), %ymm5
7760xc4,0xe2,0x51,0x0d,0xc9 = vpermilpd %xmm1, %xmm5, %xmm1
7770xc4,0xe2,0x55,0x0d,0xc9 = vpermilpd %ymm1, %ymm5, %ymm1
7780xc4,0xe2,0x51,0x0d,0x18 = vpermilpd (%eax), %xmm5, %xmm3
7790xc4,0xe2,0x55,0x0d,0x08 = vpermilpd (%eax), %ymm5, %ymm1
7800xc4,0xe3,0x55,0x06,0xca,0x07 = vperm2f128 $7, %ymm2, %ymm5, %ymm1
7810xc4,0xe3,0x55,0x06,0x08,0x07 = vperm2f128 $7, (%eax), %ymm5, %ymm1
7820xc5,0xfc,0x77 = vzeroall
7830xc5,0xf8,0x77 = vzeroupper
7840xc5,0xfb,0x2d,0xcc = vcvtsd2sil %xmm4, %ecx
7850xc5,0xfb,0x2d,0x09 = vcvtsd2sil (%ecx), %ecx
7860xc5,0xfb,0x2d,0xcc = vcvtsd2si %xmm4, %ecx
7870xc5,0xfb,0x2d,0x09 = vcvtsd2si (%ecx), %ecx
7880xc5,0xfb,0x2a,0x7d,0x00 = vcvtsi2sdl (%ebp), %xmm0, %xmm7
7890xc5,0xfb,0x2a,0x3c,0x24 = vcvtsi2sdl (%esp), %xmm0, %xmm7
7900xc5,0xfb,0x2a,0x7d,0x00 = vcvtsi2sd (%ebp), %xmm0, %xmm7
7910xc5,0xfb,0x2a,0x3c,0x24 = vcvtsi2sd (%esp), %xmm0, %xmm7
7920xc5,0xff,0xf0,0x10 = vlddqu (%eax), %ymm2
7930xc5,0xff,0x12,0xea = vmovddup %ymm2, %ymm5
7940xc5,0xff,0x12,0x10 = vmovddup (%eax), %ymm2
7950xc5,0xfd,0x6f,0xea = vmovdqa %ymm2, %ymm5
7960xc5,0xfd,0x7f,0x10 = vmovdqa %ymm2, (%eax)
7970xc5,0xfd,0x6f,0x10 = vmovdqa (%eax), %ymm2
7980xc5,0xfe,0x6f,0xea = vmovdqu %ymm2, %ymm5
7990xc5,0xfe,0x7f,0x10 = vmovdqu %ymm2, (%eax)
8000xc5,0xfe,0x6f,0x10 = vmovdqu (%eax), %ymm2
8010xc5,0xfe,0x16,0xea = vmovshdup %ymm2, %ymm5
8020xc5,0xfe,0x16,0x10 = vmovshdup (%eax), %ymm2
8030xc5,0xfe,0x12,0xea = vmovsldup %ymm2, %ymm5
8040xc5,0xfe,0x12,0x10 = vmovsldup (%eax), %ymm2
8050xc4,0xe2,0x7d,0x17,0xea = vptest %ymm2, %ymm5
8060xc4,0xe2,0x7d,0x17,0x10 = vptest (%eax), %ymm2
8070xc4,0xe3,0x7d,0x09,0xcd,0x07 = vroundpd $7, %ymm5, %ymm1
8080xc4,0xe3,0x7d,0x09,0x28,0x07 = vroundpd $7, (%eax), %ymm5
8090xc4,0xe3,0x7d,0x08,0xcd,0x07 = vroundps $7, %ymm5, %ymm1
8100xc4,0xe3,0x7d,0x08,0x28,0x07 = vroundps $7, (%eax), %ymm5
8110xc5,0xd5,0xc6,0xca,0x07 = vshufpd $7, %ymm2, %ymm5, %ymm1
8120xc5,0xd5,0xc6,0x08,0x07 = vshufpd $7, (%eax), %ymm5, %ymm1
8130xc5,0xd4,0xc6,0xca,0x07 = vshufps $7, %ymm2, %ymm5, %ymm1
8140xc5,0xd4,0xc6,0x08,0x07 = vshufps $7, (%eax), %ymm5, %ymm1
8150xc4,0xe2,0x79,0x0f,0xea = vtestpd %xmm2, %xmm5
8160xc4,0xe2,0x7d,0x0f,0xea = vtestpd %ymm2, %ymm5
8170xc4,0xe2,0x79,0x0f,0x10 = vtestpd (%eax), %xmm2
8180xc4,0xe2,0x7d,0x0f,0x10 = vtestpd (%eax), %ymm2
8190xc4,0xe2,0x79,0x0e,0xea = vtestps %xmm2, %xmm5
8200xc4,0xe2,0x7d,0x0e,0xea = vtestps %ymm2, %ymm5
8210xc4,0xe2,0x79,0x0e,0x10 = vtestps (%eax), %xmm2
8220xc4,0xe2,0x7d,0x0e,0x10 = vtestps (%eax), %ymm2
8230xc4,0xe3,0x75,0x4b,0x94,0x20,0xad,0xde,0x00,0x00,0x00 = vblendvpd %ymm0, 0xdead(%eax,%eiz), %ymm1, %ymm2
8240xc4,0xe3,0x51,0x44,0xca,0x11 = vpclmulhqhqdq %xmm2, %xmm5, %xmm1
8250xc4,0xe3,0x51,0x44,0x18,0x11 = vpclmulhqhqdq (%eax), %xmm5, %xmm3
8260xc4,0xe3,0x51,0x44,0xca,0x01 = vpclmulhqlqdq %xmm2, %xmm5, %xmm1
8270xc4,0xe3,0x51,0x44,0x18,0x01 = vpclmulhqlqdq (%eax), %xmm5, %xmm3
8280xc4,0xe3,0x51,0x44,0xca,0x10 = vpclmullqhqdq %xmm2, %xmm5, %xmm1
8290xc4,0xe3,0x51,0x44,0x18,0x10 = vpclmullqhqdq (%eax), %xmm5, %xmm3
8300xc4,0xe3,0x51,0x44,0xca,0x00 = vpclmullqlqdq %xmm2, %xmm5, %xmm1
8310xc4,0xe3,0x51,0x44,0x18,0x00 = vpclmullqlqdq (%eax), %xmm5, %xmm3
8320xc4,0xe3,0x51,0x44,0xca,0x11 = vpclmulqdq $17, %xmm2, %xmm5, %xmm1
8330xc4,0xe3,0x51,0x44,0x18,0x11 = vpclmulqdq $17, (%eax), %xmm5, %xmm3
834