Lines Matching refs:ib

13269       /* 66 0F 71 /2 ib = PSRLW by immediate */
13276 /* 66 0F 71 /4 ib = PSRAW by immediate */
13283 /* 66 0F 71 /6 ib = PSLLW by immediate */
13293 /* 66 0F 72 /2 ib = PSRLD by immediate */
13300 /* 66 0F 72 /4 ib = PSRAD by immediate */
13307 /* 66 0F 72 /6 ib = PSLLD by immediate */
13317 /* 66 0F 73 /3 ib = PSRLDQ by immediate */
13331 /* 66 0F 73 /7 ib = PSLLDQ by immediate */
13346 /* 66 0F 73 /2 ib = PSRLQ by immediate */
13353 /* 66 0F 73 /6 ib = PSLLQ by immediate */
13887 /* 0F C6 /r ib = SHUFPS -- shuffle packed F32s */
13912 /* 66 0F C6 /r ib = SHUFPD -- shuffle packed F64s */
16201 UChar ib = getUChar(delta);
16202 rIS4 = (ib >> 4) & 0xF;
16210 UChar ib = getUChar(delta);
16211 rIS4 = (ib >> 4) & 0xF;
16240 UChar ib = getUChar(delta);
16241 rIS4 = (ib >> 4) & 0xF;
16249 UChar ib = getUChar(delta);
16250 rIS4 = (ib >> 4) & 0xF;
18659 /* 66 0F 3A 08 /r ib = ROUNDPS imm8, xmm2/m128, xmm1 */
18729 /* 66 0F 3A 09 /r ib = ROUNDPD imm8, xmm2/m128, xmm1 */
18784 /* 66 0F 3A 0A /r ib = ROUNDSS imm8, xmm2/m32, xmm1
18785 66 0F 3A 0B /r ib = ROUNDSD imm8, xmm2/m64, xmm1
18837 /* 66 0F 3A 0C /r ib = BLENDPS xmm1, xmm2/m128, imm8
18874 /* 66 0F 3A 0D /r ib = BLENDPD xmm1, xmm2/m128, imm8
18910 /* 66 0F 3A 0E /r ib = PBLENDW xmm1, xmm2/m128, imm8
18947 /* 66 0F 3A 14 /r ib = PEXTRB r/m16, xmm, imm8
18957 /* 66 0F 3A 15 /r ib = PEXTRW r/m16, xmm, imm8
18967 /* 66 no-REX.W 0F 3A 16 /r ib = PEXTRD reg/mem32, xmm2, imm8
18976 /* 66 REX.W 0F 3A 16 /r ib = PEXTRQ reg/mem64, xmm2, imm8
18988 /* 66 0F 3A 17 /r ib = EXTRACTPS reg/mem32, xmm2, imm8 Extract
19000 /* 66 0F 3A 20 /r ib = PINSRB xmm1, r32/m8, imm8
19031 /* 66 0F 3A 21 /r ib = INSERTPS imm8, xmm2/m32, xmm1
19070 /* 66 no-REX.W 0F 3A 22 /r ib = PINSRD xmm1, r/m32, imm8
19101 /* 66 REX.W 0F 3A 22 /r ib = PINSRQ xmm1, r/m64, imm8
19135 /* 66 0F 3A 40 /r ib = DPPS xmm1, xmm2/m128, imm8
19168 /* 66 0F 3A 41 /r ib = DPPD xmm1, xmm2/m128, imm8
19201 /* 66 0F 3A 42 /r ib = MPSADBW xmm1, xmm2/m128, imm8
19236 /* 66 0F 3A 44 /r ib = PCLMULQDQ xmm1, xmm2/m128, imm8
19277 /* 66 0F 3A 63 /r ib = PCMPISTRI imm8, xmm2/m128, xmm1
19278 66 0F 3A 62 /r ib = PCMPISTRM imm8, xmm2/m128, xmm1
19279 66 0F 3A 61 /r ib = PCMPESTRI imm8, xmm2/m128, xmm1
19280 66 0F 3A 60 /r ib = PCMPESTRM imm8, xmm2/m128, xmm1
19293 /* 66 0F 3A DF /r ib = AESKEYGENASSIST imm8, xmm2/m128, xmm1 */
25245 /* VPSHUFD imm8, xmm2/m128, xmm1 = VEX.128.66.0F.WIG 70 /r ib */
25250 /* VPSHUFD imm8, ymm2/m256, ymm1 = VEX.256.66.0F.WIG 70 /r ib */
25255 /* VPSHUFLW imm8, xmm2/m128, xmm1 = VEX.128.F2.0F.WIG 70 /r ib */
25261 /* VPSHUFLW imm8, ymm2/m256, ymm1 = VEX.256.F2.0F.WIG 70 /r ib */
25266 /* VPSHUFHW imm8, xmm2/m128, xmm1 = VEX.128.F3.0F.WIG 70 /r ib */
25272 /* VPSHUFHW imm8, ymm2/m256, ymm1 = VEX.256.F3.0F.WIG 70 /r ib */
25280 /* VPSRLW imm8, xmm2, xmm1 = VEX.NDD.128.66.0F.WIG 71 /2 ib */
25281 /* VPSRAW imm8, xmm2, xmm1 = VEX.NDD.128.66.0F.WIG 71 /4 ib */
25282 /* VPSLLW imm8, xmm2, xmm1 = VEX.NDD.128.66.0F.WIG 71 /6 ib */
25306 /* VPSRLW imm8, ymm2, ymm1 = VEX.NDD.256.66.0F.WIG 71 /2 ib */
25307 /* VPSRAW imm8, ymm2, ymm1 = VEX.NDD.256.66.0F.WIG 71 /4 ib */
25308 /* VPSLLW imm8, ymm2, ymm1 = VEX.NDD.256.66.0F.WIG 71 /6 ib */
25335 /* VPSRLD imm8, xmm2, xmm1 = VEX.NDD.128.66.0F.WIG 72 /2 ib */
25336 /* VPSRAD imm8, xmm2, xmm1 = VEX.NDD.128.66.0F.WIG 72 /4 ib */
25337 /* VPSLLD imm8, xmm2, xmm1 = VEX.NDD.128.66.0F.WIG 72 /6 ib */
25361 /* VPSRLD imm8, ymm2, ymm1 = VEX.NDD.256.66.0F.WIG 72 /2 ib */
25362 /* VPSRAD imm8, ymm2, ymm1 = VEX.NDD.256.66.0F.WIG 72 /4 ib */
25363 /* VPSLLD imm8, ymm2, ymm1 = VEX.NDD.256.66.0F.WIG 72 /6 ib */
25390 /* VPSRLDQ imm8, xmm2, xmm1 = VEX.NDD.128.66.0F.WIG 73 /3 ib */
25391 /* VPSLLDQ imm8, xmm2, xmm1 = VEX.NDD.128.66.0F.WIG 73 /7 ib */
25392 /* VPSRLQ imm8, xmm2, xmm1 = VEX.NDD.128.66.0F.WIG 73 /2 ib */
25393 /* VPSLLQ imm8, xmm2, xmm1 = VEX.NDD.128.66.0F.WIG 73 /6 ib */
25431 /* VPSRLDQ imm8, ymm2, ymm1 = VEX.NDD.256.66.0F.WIG 73 /3 ib */
25432 /* VPSLLDQ imm8, ymm2, ymm1 = VEX.NDD.256.66.0F.WIG 73 /7 ib */
25433 /* VPSRLQ imm8, ymm2, ymm1 = VEX.NDD.256.66.0F.WIG 73 /2 ib */
25434 /* VPSLLQ imm8, ymm2, ymm1 = VEX.NDD.256.66.0F.WIG 73 /6 ib */
25829 /* = VEX.NDS.LIG.F2.0F.WIG C2 /r ib */
25839 /* = VEX.NDS.LIG.F3.0F.WIG C2 /r ib */
25849 /* = VEX.NDS.128.66.0F.WIG C2 /r ib */
25859 /* = VEX.NDS.256.66.0F.WIG C2 /r ib */
25868 /* = VEX.NDS.128.0F.WIG C2 /r ib */
25878 /* = VEX.NDS.256.0F.WIG C2 /r ib */
25889 /* VPINSRW r32/m16, xmm2, xmm1 = VEX.NDS.128.66.0F.WIG C4 /r ib */
25923 /* VPEXTRW imm8, xmm1, reg32 = VEX.128.66.0F.W0 C5 /r ib */
25936 /* = VEX.NDS.128.0F.WIG C6 /r ib */
25966 /* = VEX.NDS.256.0F.WIG C6 /r ib */
25996 /* = VEX.NDS.128.66.0F.WIG C6 /r ib */
26026 /* = VEX.NDS.256.66.0F.WIG C6 /r ib */
29537 /* VPERMQ imm8, ymm2/m256, ymm1 = VEX.256.66.0F3A.W1 00 /r ib */
29538 /* VPERMPD imm8, ymm2/m256, ymm1 = VEX.256.66.0F3A.W1 01 /r ib */
29577 /* VPBLENDD imm8, xmm3/m128, xmm2, xmm1 = VEX.NDS.128.66.0F3A.W0 02 /r ib */
29617 /* VPBLENDD imm8, ymm3/m256, ymm2, ymm1 = VEX.NDS.256.66.0F3A.W0 02 /r ib */
29661 /* VPERMILPS imm8, ymm2/m256, ymm1 = VEX.256.66.0F3A.WIG 04 /r ib */
29691 /* VPERMILPS imm8, xmm2/m128, xmm1 = VEX.128.66.0F3A.WIG 04 /r ib */
29719 /* VPERMILPD imm8, xmm2/m128, xmm1 = VEX.128.66.0F3A.WIG 05 /r ib */
29752 /* VPERMILPD imm8, ymm2/m256, ymm1 = VEX.256.66.0F3A.WIG 05 /r ib */
29789 /* VPERM2F128 imm8, ymm3/m256, ymm2, ymm1 = VEX.NDS.66.0F3A.W0 06 /r ib */
29836 /* VROUNDPS = VEX.NDS.128.66.0F3A.WIG 08 ib */
29884 /* VROUNDPS = VEX.NDS.256.66.0F3A.WIG 08 ib */
29941 /* VROUNDPD = VEX.NDS.128.66.0F3A.WIG 09 ib */
29985 /* VROUNDPD = VEX.NDS.256.66.0F3A.WIG 09 ib */
30035 /* VROUNDSS = VEX.NDS.128.66.0F3A.WIG 0A ib */
30037 /* VROUNDSD = VEX.NDS.128.66.0F3A.WIG 0B ib */
30092 /* VBLENDPS = VEX.NDS.256.66.0F3A.WIG 0C /r ib */
30123 /* VBLENDPS = VEX.NDS.128.66.0F3A.WIG 0C /r ib */
30157 /* VBLENDPD = VEX.NDS.256.66.0F3A.WIG 0D /r ib */
30188 /* VBLENDPD = VEX.NDS.128.66.0F3A.WIG 0D /r ib */
30222 /* VPBLENDW = VEX.NDS.128.66.0F3A.WIG 0E /r ib */
30253 /* VPBLENDW = VEX.NDS.256.66.0F3A.WIG 0E /r ib */
30292 /* VPALIGNR = VEX.NDS.128.66.0F3A.WIG 0F /r ib */
30325 /* VPALIGNR = VEX.NDS.256.66.0F3A.WIG 0F /r ib */
30366 /* VPEXTRB imm8, xmm2, reg/m8 = VEX.128.66.0F3A.W0 14 /r ib */
30376 /* VPEXTRW = VEX.128.66.0F3A.W0 15 /r ib */
30386 /* VPEXTRD = VEX.128.66.0F3A.W0 16 /r ib */
30392 /* VPEXTRQ = VEX.128.66.0F3A.W1 16 /r ib */
30401 /* VEXTRACTPS imm8, xmm1, r32/m32 = VEX.128.66.0F3A.WIG 17 /r ib */
30411 /* VINSERTF128 = VEX.NDS.256.66.0F3A.W0 18 /r ib */
30415 UInt ib = 0;
30423 ib = getUChar(delta);
30425 ib, nameXMMReg(rE), nameYMMReg(rV), nameYMMReg(rG));
30430 ib = getUChar(delta);
30432 ib, dis_buf, nameYMMReg(rV), nameYMMReg(rG));
30437 putYMMRegLane128(rG, ib & 1, mkexpr(t128));
30446 /* VEXTRACTF128 = VEX.256.66.0F3A.W0 19 /r ib */
30450 UInt ib = 0;
30456 ib = getUChar(delta);
30457 assign(t128, getYMMRegLane128(rS, ib & 1));
30460 ib, nameXMMReg(rS), nameYMMReg(rD));
30464 ib = getUChar(delta);
30465 assign(t128, getYMMRegLane128(rS, ib & 1));
30468 ib, nameYMMReg(rS), dis_buf);
30477 /* VPINSRB r32/m8, xmm2, xmm1 = VEX.NDS.128.66.0F3A.W0 20 /r ib */
30513 = VEX.NDS.128.66.0F3A.WIG 21 /r ib */
30552 /* VPINSRD r32/m32, xmm2, xmm1 = VEX.NDS.128.66.0F3A.W0 22 /r ib */
30584 /* VPINSRQ r64/m64, xmm2, xmm1 = VEX.NDS.128.66.0F3A.W1 22 /r ib */
30621 /* VINSERTI128 = VEX.NDS.256.66.0F3A.W0 38 /r ib */
30625 UInt ib = 0;
30633 ib = getUChar(delta);
30635 ib, nameXMMReg(rE), nameYMMReg(rV), nameYMMReg(rG));
30640 ib = getUChar(delta);
30642 ib, dis_buf, nameYMMReg(rV), nameYMMReg(rG));
30647 putYMMRegLane128(rG, ib & 1, mkexpr(t128));
30656 /* VEXTRACTI128 = VEX.256.66.0F3A.W0 39 /r ib */
30660 UInt ib = 0;
30666 ib = getUChar(delta);
30667 assign(t128, getYMMRegLane128(rS, ib & 1));
30670 ib, nameXMMReg(rS), nameYMMReg(rD));
30674 ib = getUChar(delta);
30675 assign(t128, getYMMRegLane128(rS, ib & 1));
30678 ib, nameYMMReg(rS), dis_buf);
30687 /* VDPPS imm8, xmm3/m128,xmm2,xmm1 = VEX.NDS.128.66.0F3A.WIG 40 /r ib */
30717 /* VDPPS imm8, ymm3/m128,ymm2,ymm1 = VEX.NDS.256.66.0F3A.WIG 40 /r ib */
30755 /* VDPPD imm8, xmm3/m128,xmm2,xmm1 = VEX.NDS.128.66.0F3A.WIG 41 /r ib */
30789 /* VMPSADBW = VEX.NDS.128.66.0F3A.WIG 42 /r ib */
30824 /* VMPSADBW = VEX.NDS.256.66.0F3A.WIG 42 /r ib */
30867 /* VPCLMULQDQ = VEX.NDS.128.66.0F3A.WIG 44 /r ib */
30868 /* 66 0F 3A 44 /r ib = PCLMULQDQ xmm1, xmm2/m128, imm8
30906 /* VPERM2I128 imm8, ymm3/m256, ymm2, ymm1 = VEX.NDS.66.0F3A.W0 46 /r ib */
31018 /* VEX.128.66.0F3A.WIG 63 /r ib = VPCMPISTRI imm8, xmm2/m128, xmm1
31019 VEX.128.66.0F3A.WIG 62 /r ib = VPCMPISTRM imm8, xmm2/m128, xmm1
31020 VEX.128.66.0F3A.WIG 61 /r ib = VPCMPESTRI imm8, xmm2/m128, xmm1
31021 VEX.128.66.0F3A.WIG 60 /r ib = VPCMPESTRM imm8, xmm2/m128, xmm1