Lines Matching defs:flat2

16     __m128i mask, hev, flat, flat2;
150 flat2 = _mm_max_epu8(
167 flat2 = _mm_max_epu8(work, flat2);
168 flat2 = _mm_max_epu8(flat2, _mm_srli_si128(flat2, 8));
169 flat2 = _mm_subs_epu8(flat2, one);
170 flat2 = _mm_cmpeq_epi8(flat2, zero);
171 flat2 = _mm_and_si128(flat2, flat); // flat2 & flat & mask
337 flat2 = _mm_shuffle_epi32(flat2, 68);
351 q6p6 = _mm_andnot_si128(flat2, q6p6);
352 flat2_q6p6 = _mm_and_si128(flat2, flat2_q6p6);
357 q5p5 = _mm_andnot_si128(flat2, q5p5);
358 flat2_q5p5 = _mm_and_si128(flat2, flat2_q5p5);
363 q4p4 = _mm_andnot_si128(flat2, q4p4);
364 flat2_q4p4 = _mm_and_si128(flat2, flat2_q4p4);
369 q3p3 = _mm_andnot_si128(flat2, q3p3);
370 flat2_q3p3 = _mm_and_si128(flat2, flat2_q3p3);
375 q2p2 = _mm_andnot_si128(flat2, q2p2);
376 flat2_q2p2 = _mm_and_si128(flat2, flat2_q2p2);
381 q1p1 = _mm_andnot_si128(flat2, q1p1);
382 flat2_q1p1 = _mm_and_si128(flat2, flat2_q1p1);
387 q0p0 = _mm_andnot_si128(flat2, q0p0);
388 flat2_q0p0 = _mm_and_si128(flat2, flat2_q0p0);
398 __m128i mask, hev, flat, flat2;
539 flat2 = _mm_max_epu8(
543 flat2 = _mm_max_epu8(work, flat2);
550 flat2 = _mm_max_epu8(work, flat2);
558 flat2 = _mm_max_epu8(work, flat2);
559 flat2 = _mm_subs_epu8(flat2, one);
560 flat2 = _mm_cmpeq_epi8(flat2, zero);
561 flat2 = _mm_and_si128(flat2, flat); // flat2 & flat & mask
864 p6 = _mm_andnot_si128(flat2, p6);
865 flat2_p6 = _mm_and_si128(flat2, flat2_p6);
869 p5 = _mm_andnot_si128(flat2, p5);
870 flat2_p5 = _mm_and_si128(flat2, flat2_p5);
874 p4 = _mm_andnot_si128(flat2, p4);
875 flat2_p4 = _mm_and_si128(flat2, flat2_p4);
879 p3 = _mm_andnot_si128(flat2, p3);
880 flat2_p3 = _mm_and_si128(flat2, flat2_p3);
884 p2 = _mm_andnot_si128(flat2, p2);
885 flat2_p2 = _mm_and_si128(flat2, flat2_p2);
889 p1 = _mm_andnot_si128(flat2, p1);
890 flat2_p1 = _mm_and_si128(flat2, flat2_p1);
894 p0 = _mm_andnot_si128(flat2, p0);
895 flat2_p0 = _mm_and_si128(flat2, flat2_p0);
899 q0 = _mm_andnot_si128(flat2, q0);
900 flat2_q0 = _mm_and_si128(flat2, flat2_q0);
904 q1 = _mm_andnot_si128(flat2, q1);
905 flat2_q1 = _mm_and_si128(flat2, flat2_q1);
909 q2 = _mm_andnot_si128(flat2, q2);
910 flat2_q2 = _mm_and_si128(flat2, flat2_q2);
914 q3 = _mm_andnot_si128(flat2, q3);
915 flat2_q3 = _mm_and_si128(flat2, flat2_q3);
919 q4 = _mm_andnot_si128(flat2, q4);
920 flat2_q4 = _mm_and_si128(flat2, flat2_q4);
924 q5 = _mm_andnot_si128(flat2, q5);
925 flat2_q5 = _mm_and_si128(flat2, flat2_q5);
929 q6 = _mm_andnot_si128(flat2, q6);
930 flat2_q6 = _mm_and_si128(flat2, flat2_q6);