Lines Matching refs:out0

252 #define LD_B2(RTYPE, psrc, stride, out0, out1)  \
254 out0 = LD_B(RTYPE, (psrc)); \
258 #define LD_B4(RTYPE, psrc, stride, out0, out1, out2, out3) \
260 LD_B2(RTYPE, (psrc), stride, out0, out1); \
280 #define ADD2(in0, in1, in2, in3, out0, out1) \
282 out0 = in0 + in1; \
286 out0, out1, out2) \
288 ADD2(in0, in1, in2, in3, out0, out1); \
292 out0, out1, out2, out3) \
294 ADD2(in0, in1, in2, in3, out0, out1); \
298 #define ILVR_B2(RTYPE, in0, in1, in2, in3, out0, out1) \
300 out0 = (RTYPE) __msa_ilvr_b((v16i8) in0, (v16i8) in1); \
305 #define HSUB_UB2(RTYPE, in0, in1, out0, out1) \
307 out0 = (RTYPE) __msa_hsub_u_h((v16u8) in0, (v16u8) in0); \
312 #define SLDI_B2_0(RTYPE, in0, in1, out0, out1, slide_val) \
315 out0 = (RTYPE) __msa_sldi_b((v16i8) zero_m, (v16i8) in0, slide_val); \
320 #define SLDI_B3_0(RTYPE, in0, in1, in2, out0, out1, out2, slide_val) \
323 SLDI_B2_0(RTYPE, in0, in1, out0, out1, slide_val); \
328 #define ILVEV_W2(RTYPE, in0, in1, in2, in3, out0, out1) \
330 out0 = (RTYPE) __msa_ilvev_w((v4i32) in1, (v4i32) in0); \
335 #define ADD_ABS_H3(RTYPE, in0, in1, in2, out0, out1, out2) \
339 out0 = __msa_add_a_h((v8i16) zero, in0); \
345 #define VSHF_B2(RTYPE, in0, in1, in2, in3, mask0, mask1, out0, out1) \
347 out0 = (RTYPE) __msa_vshf_b((v16i8) mask0, (v16i8) in1, (v16i8) in0); \
352 #define CMP_AND_SELECT(inp0, inp1, inp2, inp3, inp4, inp5, out0) \
363 out0 += inp4; \
503 int64_t out0;
531 out0 = __msa_copy_s_d((v2i64) dst0, 0);
534 SD(out0, nxt);
549 int32_t inp0, inp1, out0;
561 out0 = __msa_copy_s_w((v4i32) src1, 0);
562 SW(out0, nxt);
600 int64_t out0;
643 out0 = __msa_copy_s_d((v2i64) dst0, 0);
646 SD(out0, nxt);
731 int64_t out0;
795 out0 = __msa_copy_s_d((v2i64) dst0, 0);
798 SD(out0, nxt);