Searched refs:U8 (Results 1 - 25 of 39) sorted by last modified time

12

/external/valgrind/main/VEX/priv/
H A Dhost_amd64_isel.c98 && e->Iex.Const.con->Ico.U8 == 0;
1067 nshift = e->Iex.Binop.arg2->Iex.Const.con->Ico.U8;
1971 && imm8->Iex.Const.con->Ico.U8 < 4
1976 UInt shift = imm8->Iex.Const.con->Ico.U8;
1992 UInt shift = e->Iex.Binop.arg2->Iex.Binop.arg2->Iex.Const.con->Ico.U8;
2066 return AMD64RMI_Imm(0xFF & e->Iex.Const.con->Ico.U8); break;
2135 return AMD64RI_Imm(0xFF & e->Iex.Const.con->Ico.U8);
H A Dhost_arm64_isel.c1054 //ZZ case Ico_U8: u = 0xFF & (e->Iex.Const.con->Ico.U8); break;
1431 UInt u = e->Iex.Const.con->Ico.U8;
1872 //ZZ index = e->Iex.Binop.arg2->Iex.Const.con->Ico.U8;
1897 //ZZ index = e->Iex.Binop.arg2->Iex.Const.con->Ico.U8;
2344 case Ico_U8: u = e->Iex.Const.con->Ico.U8; break;
3232 //ZZ imm = e->Iex.Binop.arg2->Iex.Const.con->Ico.U8;
3256 //ZZ imm = e->Iex.Binop.arg2->Iex.Const.con->Ico.U8;
3280 //ZZ imm = e->Iex.Binop.arg2->Iex.Const.con->Ico.U8;
3327 //ZZ Int nshift = e->Iex.Binop.arg2->Iex.Const.con->Ico.U8;
3661 //ZZ imm6 = e->Iex.Binop.arg2->Iex.Const.con->Ico.U8;
[all...]
H A Dhost_arm_isel.c980 case Ico_U8: u = 0xFF & (e->Iex.Const.con->Ico.U8); break;
1034 case Ico_U8: u = 0xFF & (e->Iex.Const.con->Ico.U8); break;
1384 index = e->Iex.Binop.arg2->Iex.Const.con->Ico.U8;
1409 index = e->Iex.Binop.arg2->Iex.Const.con->Ico.U8;
1861 case Ico_U8: u = 0xFF & (e->Iex.Const.con->Ico.U8); break;
2687 imm = e->Iex.Binop.arg2->Iex.Const.con->Ico.U8;
2711 imm = e->Iex.Binop.arg2->Iex.Const.con->Ico.U8;
2735 imm = e->Iex.Binop.arg2->Iex.Const.con->Ico.U8;
2782 Int nshift = e->Iex.Binop.arg2->Iex.Const.con->Ico.U8;
3116 imm6 = e->Iex.Binop.arg2->Iex.Const.con->Ico.U8;
[all...]
H A Dhost_mips_isel.c1823 l = (Long) (Int) (Char) con->Ico.U8;
1924 u = 0x000000FF & con->Ico.U8;
1977 && e->Iex.Const.con->Ico.U8 >= 1 && e->Iex.Const.con->Ico.U8 <= 31) {
1978 return MIPSRH_Imm(False /*unsigned */ , e->Iex.Const.con->Ico.U8);
2016 && e->Iex.Const.con->Ico.U8 >= 1 && e->Iex.Const.con->Ico.U8 <= 63)
2019 e->Iex.Const.con->Ico.U8);
H A Dhost_ppc_isel.c213 && e->Iex.Const.con->Ico.U8 == 0;
2348 case Ico_U8: l = (Long)(Int)(Char )con->Ico.U8; break;
2563 case Ico_U8: u = 0x000000FF & con->Ico.U8; break;
2620 case Ico_U8: l = (Long)(Int)(Char )con->Ico.U8; break;
2666 && e->Iex.Const.con->Ico.U8 >= 1
2667 && e->Iex.Const.con->Ico.U8 <= 31) {
2668 return PPCRH_Imm(False/*unsigned*/, e->Iex.Const.con->Ico.U8);
2711 && e->Iex.Const.con->Ico.U8 >= 1
2712 && e->Iex.Const.con->Ico.U8 <= 63) {
2713 return PPCRH_Imm(False/*unsigned*/, e->Iex.Const.con->Ico.U8);
[all...]
H A Dhost_s390_isel.c415 case Ico_U8: value = expr->Iex.Const.con->Ico.U8; break;
437 case Ico_U8: value = con->Ico.U8; return (ULong) ((value << 56) >> 56);
1846 case Ico_U8: value = con->Ico.U8; break;
1923 case Ico_U8: value = expr->Iex.Const.con->Ico.U8; break;
H A Dhost_x86_isel.c110 && e->Iex.Const.con->Ico.U8 == 0;
1023 nshift = e->Iex.Binop.arg2->Iex.Const.con->Ico.U8;
1576 ->Iex.Binop.arg2->Iex.Binop.arg2->Iex.Const.con->Ico.U8;
1593 UInt shift = e->Iex.Binop.arg2->Iex.Binop.arg2->Iex.Const.con->Ico.U8;
1655 case Ico_U8: u = 0xFF & (e->Iex.Const.con->Ico.U8); break;
1715 case Ico_U8: u = 0xFF & (e->Iex.Const.con->Ico.U8); break;
H A Dir_defs.c76 case Ico_U8: vex_printf( "0x%x:I8", (UInt)(con->Ico.U8)); break;
1595 c->Ico.U8 = u8;
2153 case Ico_U8: return IRConst_U8(c->Ico.U8);
4453 case Ico_U8: return toBool( c1->Ico.U8 == c2->Ico.U8 );
H A Dir_opt.c1091 case Ico_U8: return toBool( c1->Ico.U8 == c2->Ico.U8 );
1211 case Ico_U8: return toBool( e->Iex.Const.con->Ico.U8 == 0);
1224 case Ico_U8: return toBool( e->Iex.Const.con->Ico.U8 == 0xFF);
1396 /* signed */ Int s32 = e->Iex.Unop.arg->Iex.Const.con->Ico.U8;
1411 0xFFULL & e->Iex.Unop.arg->Iex.Const.con->Ico.U8));
1419 0xFF & e->Iex.Unop.arg->Iex.Const.con->Ico.U8));
1422 /* signed */ Short s16 = e->Iex.Unop.arg->Iex.Const.con->Ico.U8;
1430 0xFF & e->Iex.Unop.arg->Iex.Const.con->Ico.U8));
1473 ~ (e->Iex.Unop.arg->Iex.Const.con->Ico.U8))));
[all...]
/external/valgrind/main/VEX/pub/
H A Dlibvex_ir.h295 UChar U8; member in union:_IRConst::__anon32279
1028 * by the U8 operand. Digits shifted out of the leftmost digit are
1033 * D64 x U8 -> D64 left shift and right shift respectively */
1036 /* D128 x U8 -> D128 left shift and right shift respectively */
/external/valgrind/main/VEX/
H A Dtest_main.c2535 case Ico_U8: n = (ULong)con->Ico.U8; break;
/external/valgrind/main/memcheck/
H A Dmc_translate.c5832 case Ico_U8: n = (ULong)con->Ico.U8; break;
/external/valgrind/main/memcheck/tests/common/
H A Dsh-mem-vec128.tmpl.c25 typedef unsigned long long U8; typedef
/external/valgrind/main/memcheck/tests/
H A Dsh-mem-random.c16 typedef unsigned long long U8; typedef
37 U8 build(int size, U1 byte)
40 U8 mask = 0;
41 U8 shres;
42 U8 res = 0xffffffffffffffffULL, res2;
48 mask |= (U8)byte;
75 U8 sum = 0;
85 sum += (U8)arr_i;
153 case 3: { // U8
156 *(U8*)(ar
[all...]
H A Dsh-mem.c15 typedef unsigned long long U8; typedef
24 U8 a [SZB_OF_a / 8]; // Type is U8 to ensure it's 8-aligned
25 U8 b [SZB_OF_a / 8]; // same size as a[]
38 U8 build(int size, U1 byte)
41 U8 mask = 0;
42 U8 shres;
43 U8 res = 0xffffffffffffffffULL, res2;
49 mask |= (U8)byte;
109 assert(8 == sizeof(U8));
[all...]
H A Dsh-mem.stderr.exp16 -- NNN: 8 U8 U8 ------------------------
25 -- NNN: 8 F8 U8 ------------------------
/external/skia/include/xml/
H A DSkBML_WXMLParser.h42 // important that these are U8, so we get automatic wrap-around
43 U8 fNextElem, fNextAttrName, fNextAttrValue;
/external/robolectric/lib/main/
H A Dandroid.jarMETA-INF/ META-INF/MANIFEST.MF com/ com/android/ com/android/internal/ com/android/internal/util/ ...
/external/llvm/unittests/ADT/
H A DStringRefTest.cpp414 uint8_t U8; local
420 bool U8Success = StringRef(Unsigned[i].Str).getAsInteger(0, U8);
423 EXPECT_EQ(U8, Unsigned[i].Expected);
/external/libhevc/common/arm/
H A Dihevc_sao_band_offset_chroma.s157 VCLE.U8 D13,D4,D30 @vcle_u8(band_table.val[3], vdup_n_u8(16))
160 VORR.U8 D4,D4,D13 @band_table.val[3] = vorr_u8(band_table.val[3], au1_cmp)
166 VCLE.U8 D14,D3,D30 @vcle_u8(band_table.val[2], vdup_n_u8(16))
168 VORR.U8 D3,D3,D14 @band_table.val[2] = vorr_u8(band_table.val[2], au1_cmp)
170 VAND.U8 D4,D4,D13 @band_table.val[3] = vand_u8(band_table.val[3], au1_cmp)
176 VCLE.U8 D15,D2,D30 @vcle_u8(band_table.val[1], vdup_n_u8(16))
178 VORR.U8 D2,D2,D15 @band_table.val[1] = vorr_u8(band_table.val[1], au1_cmp)
180 VAND.U8 D3,D3,D14 @band_table.val[2] = vand_u8(band_table.val[2], au1_cmp)
186 VCLE.U8 D16,D1,D30 @vcle_u8(band_table.val[0], vdup_n_u8(16))
187 VORR.U8 D
[all...]
H A Dihevc_sao_band_offset_luma.s143 VCLE.U8 D12,D4,D29 @vcle_u8(band_table.val[3], vdup_n_u8(16))
146 VORR.U8 D4,D4,D12 @band_table.val[3] = vorr_u8(band_table.val[3], au1_cmp)
151 VCLE.U8 D11,D3,D29 @vcle_u8(band_table.val[2], vdup_n_u8(16))
154 VORR.U8 D3,D3,D11 @band_table.val[2] = vorr_u8(band_table.val[2], au1_cmp)
156 VAND.U8 D4,D4,D12 @band_table.val[3] = vand_u8(band_table.val[3], au1_cmp)
161 VCLE.U8 D10,D2,D29 @vcle_u8(band_table.val[1], vdup_n_u8(16))
164 VORR.U8 D2,D2,D10 @band_table.val[1] = vorr_u8(band_table.val[1], au1_cmp)
166 VAND.U8 D3,D3,D11 @band_table.val[2] = vand_u8(band_table.val[2], au1_cmp)
173 VCLE.U8 D9,D1,D29 @vcle_u8(band_table.val[0], vdup_n_u8(16))
174 VORR.U8 D
[all...]
H A Dihevc_sao_edge_offset_class0.s157 VCGT.U8 Q8,Q6,Q7 @vcgtq_u8(pu1_cur_row, pu1_cur_row_tmp)
162 VCLT.U8 Q9,Q6,Q7 @vcltq_u8(pu1_cur_row, pu1_cur_row_tmp)
174 VCGT.U8 Q15,Q13,Q14 @II vcgtq_u8(pu1_cur_row, pu1_cur_row_tmp)
180 VCLT.U8 Q0,Q13,Q14 @II vcltq_u8(pu1_cur_row, pu1_cur_row_tmp)
185 VCGT.U8 Q8,Q6,Q7 @vcgtq_u8(pu1_cur_row, pu1_cur_row_tmp)
188 VCLT.U8 Q9,Q6,Q7 @vcltq_u8(pu1_cur_row, pu1_cur_row_tmp)
199 VMOVL.U8 Q9,D12 @pi2_tmp_cur_row.val[0] = vreinterpretq_s16_u16(vmovl_u8(vget_low_u8(pu1_cur_row)))
203 VCGT.U8 Q15,Q13,Q14 @II vcgtq_u8(pu1_cur_row, pu1_cur_row_tmp)
205 VCLT.U8 Q0,Q13,Q14 @II vcltq_u8(pu1_cur_row, pu1_cur_row_tmp)
211 VMOVL.U8 Q
[all...]
H A Dihevc_sao_edge_offset_class0_chroma.s163 VCGT.U8 Q8,Q6,Q7 @vcgtq_u8(pu1_cur_row, pu1_cur_row_tmp)
167 VCLT.U8 Q9,Q6,Q7 @vcltq_u8(pu1_cur_row, pu1_cur_row_tmp)
170 VSUB.U8 Q10,Q9,Q8 @sign_left = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
177 VCGT.U8 Q13,Q15,Q14 @II vcgtq_u8(pu1_cur_row, pu1_cur_row_tmp)
180 VCLT.U8 Q12,Q15,Q14 @II vcltq_u8(pu1_cur_row, pu1_cur_row_tmp)
191 VCGT.U8 Q8,Q6,Q7 @vcgtq_u8(pu1_cur_row, pu1_cur_row_tmp)
194 VCLT.U8 Q9,Q6,Q7 @vcltq_u8(pu1_cur_row, pu1_cur_row_tmp)
197 VSUB.U8 Q11,Q9,Q8 @sign_right = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
200 VADD.U8 Q7,Q1,Q10 @edge_idx = vaddq_s8(const_2, sign_left)
202 VADD.U8 Q
[all...]
H A Dihevc_sao_edge_offset_class1.s132 VCGT.U8 Q6,Q5,Q4 @vcgtq_u8(pu1_cur_row, pu1_top_row)
135 VCLT.U8 Q7,Q5,Q4 @vcltq_u8(pu1_cur_row, pu1_top_row)
137 VSUB.U8 Q8,Q7,Q6 @sign_up = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
147 VCGT.U8 Q6,Q5,Q9 @vcgtq_u8(pu1_cur_row, pu1_top_row)
152 VCLT.U8 Q7,Q5,Q9 @vcltq_u8(pu1_cur_row, pu1_top_row)
155 VSUB.U8 Q10,Q7,Q6 @sign_down = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
156 VMOVL.U8 Q13,D18 @II pi2_tmp_cur_row.val[0] = vreinterpretq_s16_u16(vmovl_u8(vget_low_u8(pu1_cur_row)))
159 VMOVL.U8 Q14,D19 @II pi2_tmp_cur_row.val[1] = vreinterpretq_s16_u16(vmovl_u8(vget_high_u8(pu1_cur_row)))
162 VCGT.U8 Q11,Q9,Q15 @II vcgtq_u8(pu1_cur_row, pu1_top_row)
166 VCLT.U8 Q1
[all...]
H A Dihevc_sao_edge_offset_class1_chroma.s137 VCGT.U8 Q6,Q5,Q14 @vcgtq_u8(pu1_cur_row, pu1_top_row)
140 VCLT.U8 Q7,Q5,Q14 @vcltq_u8(pu1_cur_row, pu1_top_row)
142 VSUB.U8 Q8,Q7,Q6 @sign_up = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
152 VCGT.U8 Q6,Q5,Q9 @vcgtq_u8(pu1_cur_row, pu1_top_row)
157 VCLT.U8 Q7,Q5,Q9 @vcltq_u8(pu1_cur_row, pu1_top_row)
160 VSUB.U8 Q10,Q7,Q6 @sign_down = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
161 VMOVL.U8 Q13,D18 @II pi2_tmp_cur_row.val[0] = vreinterpretq_s16_u16(vmovl_u8(vget_low_u8(pu1_cur_row)))
164 VMOVL.U8 Q14,D19 @II pi2_tmp_cur_row.val[1] = vreinterpretq_s16_u16(vmovl_u8(vget_high_u8(pu1_cur_row)))
167 VCGT.U8 Q11,Q9,Q15 @II vcgtq_u8(pu1_cur_row, pu1_top_row)
171 VCLT.U8 Q1
[all...]

Completed in 990 milliseconds

12