Lines Matching refs:x13

273   __ Mvn(x13, Operand(x2, SXTH, 3));
293 ASSERT_EQUAL_64(0xffffffffffff0007, x13);
345 __ Mov(x13, 0x0000000000001234);
375 ASSERT_EQUAL_64(0x0000000000001234, x13);
437 __ Mov(x24, Operand(x13, SXTB, 1));
439 __ Mov(x26, Operand(x13, SXTH, 3));
460 ASSERT_EQUAL_64(0x00001ffe, x13);
528 __ Orr(x13, x0, Operand(x1, SXTX, 3));
540 ASSERT_EQUAL_64(0x0000000400040401, x13);
617 __ Orn(x13, x0, Operand(x1, SXTX, 3));
629 ASSERT_EQUAL_64(0xfffffffbfffbfbf7, x13);
684 __ And(x13, x0, Operand(x1, SXTX, 3));
696 ASSERT_EQUAL_64(0x0000000400040408, x13);
822 __ Bic(x13, x0, Operand(x1, SXTX, 3));
834 ASSERT_EQUAL_64(0xfffffffbfffbfbf7, x13);
946 __ Eor(x13, x0, Operand(x1, SXTX, 3));
958 ASSERT_EQUAL_64(0x1111111511151519, x13);
1013 __ Eon(x13, x0, Operand(x1, SXTX, 3));
1025 ASSERT_EQUAL_64(0xeeeeeeeaeeeaeae6, x13);
1076 ASSERT_EQUAL_64(1, x13);
1133 __ Madd(x13, x16, x16, x17);
1163 ASSERT_EQUAL_64(1, x13);
1202 __ Msub(x13, x16, x16, x17);
1232 ASSERT_EQUAL_64(1, x13);
1310 __ Umaddl(x13, w17, w18, x20);
1322 ASSERT_EQUAL_64(0x0000000100000003, x13);
1345 __ Umsubl(x13, w17, w18, x20);
1357 ASSERT_EQUAL_64(0xffffffff00000005, x13);
1392 __ Sdiv(x13, x19, x21);
1428 ASSERT_EQUAL_64(0x0000000040000000, x13);
2833 __ Add(x13, x2, Operand(1));
2856 ASSERT_EQUAL_64(0x0, x13);
2901 ASSERT_EQUAL_64(0x0, x13);
2923 __ Add(x13, x0, Operand(x1, ASR, 8));
2944 ASSERT_EQUAL_64(0x000123456789abcd, x13);
2975 __ Add(x13, x0, Operand(x1, UXTW, 4));
3005 ASSERT_EQUAL_64(0x89abcdef0, x13);
3047 __ Sub(x13, x0, -600);
3064 ASSERT_EQUAL_64(600, x13);
3177 ASSERT_EQUAL_64(0x65432110, x13);
3395 __ Adc(x13, x1, Operand(x2, UXTX, 4));
3419 ASSERT_EQUAL_64(0x123456789abcdef1, x13);
3531 __ Ngc(x13, Operand(x0));
3542 ASSERT_EQUAL_64(-1, x13);
3935 ASSERT_EQUAL_64(0x0000000f, x13);
3967 __ Csel(x13, x20, Operand(x20, ASR, 63), ne);
3989 ASSERT_EQUAL_64(-1, x13);
4218 __ Mov(x13, 0x8888888888888888);
4230 __ Bfxil(x13, x1, 16, 8);
4243 ASSERT_EQUAL_64(0x88888888888888ab, x13);
4259 __ sbfm(x13, x1, 48, 35);
4287 ASSERT_EQUAL_64(0x000789abcdef0000, x13);
4324 __ ubfm(x13, x1, 48, 35);
4347 ASSERT_EQUAL_64(0x000789abcdef0000, x13);
4390 ASSERT_EQUAL_64(0x89abcdef, x13);
5345 __ Mrs(x13, NZCV);
6108 ASSERT_EQUAL_64(0x80000000, x13);
6208 ASSERT_EQUAL_64(0, x13);
6311 ASSERT_EQUAL_64(0x80000000, x13);
6414 ASSERT_EQUAL_64(0, x13);
6517 ASSERT_EQUAL_64(0x80000000, x13);
6617 ASSERT_EQUAL_64(0, x13);
6720 ASSERT_EQUAL_64(0x80000000, x13);
6822 ASSERT_EQUAL_64(0, x13);
7477 x12.Bit() | x13.Bit();
7531 ASSERT_EQUAL_64((literal_base * 4) & 0xffffffff, x13);