Lines Matching refs:val

145             posSamp.val[0] = vrev64q_s16(posSamp.val[0]);
146 posSamp.val[1] = vrev64q_s16(posSamp.val[1]);
149 accum = vmlal_s16(accum, vget_low_s16(posSamp.val[0]), vget_high_s16(posCoef)); // r
150 accum = vmlal_s16(accum, vget_high_s16(posSamp.val[0]), vget_low_s16(posCoef)); // r
151 accum2 = vmlal_s16(accum2, vget_low_s16(posSamp.val[1]), vget_high_s16(posCoef)); // r
152 accum2 = vmlal_s16(accum2, vget_high_s16(posSamp.val[1]), vget_low_s16(posCoef)); // r
153 accum = vmlal_s16(accum, vget_low_s16(negSamp.val[0]), vget_low_s16(negCoef));
154 accum = vmlal_s16(accum, vget_high_s16(negSamp.val[0]), vget_high_s16(negCoef));
155 accum2 = vmlal_s16(accum2, vget_low_s16(negSamp.val[1]), vget_low_s16(negCoef));
156 accum2 = vmlal_s16(accum2, vget_high_s16(negSamp.val[1]), vget_high_s16(negCoef));
222 posCoef.val[0] = vld1q_s32(coefsP);
224 posCoef.val[1] = vld1q_s32(coefsP);
227 negCoef.val[0] = vld1q_s32(coefsN);
229 negCoef.val[1] = vld1q_s32(coefsN);
240 posCoef1.val[0] = vld1q_s32(coefsP1);
242 posCoef1.val[1] = vld1q_s32(coefsP1);
245 negCoef1.val[0] = vld1q_s32(coefsN1);
247 negCoef1.val[1] = vld1q_s32(coefsN1);
251 posCoef1.val[0] = vsubq_s32(posCoef1.val[0], posCoef.val[0]);
252 posCoef1.val[1] = vsubq_s32(posCoef1.val[1], posCoef.val[1]);
253 negCoef.val[0] = vsubq_s32(negCoef.val[0], negCoef1.val[0]);
254 negCoef.val[1] = vsubq_s32(negCoef.val[1], negCoef1.val[1]);
256 posCoef1.val[0] = vqrdmulhq_lane_s32(posCoef1.val[0], interp, 0);
257 posCoef1.val[1] = vqrdmulhq_lane_s32(posCoef1.val[1], interp, 0);
258 negCoef.val[0] = vqrdmulhq_lane_s32(negCoef.val[0], interp, 0);
259 negCoef.val[1] = vqrdmulhq_lane_s32(negCoef.val[1], interp, 0);
261 posCoef.val[0] = vaddq_s32(posCoef.val[0], posCoef1.val[0]);
262 posCoef.val[1] = vaddq_s32(posCoef.val[1], posCoef1.val[1]);
263 negCoef.val[0] = vaddq_s32(negCoef.val[0], negCoef1.val[0]);
264 negCoef.val[1] = vaddq_s32(negCoef.val[1], negCoef1.val[1]);
279 posSamp0 = vqrdmulhq_s32(posSamp0, posCoef.val[1]); // reversed
280 posSamp1 = vqrdmulhq_s32(posSamp1, posCoef.val[0]); // reversed
281 negSamp0 = vqrdmulhq_s32(negSamp0, negCoef.val[0]);
282 negSamp1 = vqrdmulhq_s32(negSamp1, negCoef.val[1]);
295 posSamp.val[0] = vrev64q_s16(posSamp.val[0]);
296 posSamp.val[1] = vrev64q_s16(posSamp.val[1]);
299 int32x4_t posSamp0 = vshll_n_s16(vget_low_s16(posSamp.val[0]), 15);
300 int32x4_t posSamp1 = vshll_n_s16(vget_high_s16(posSamp.val[0]), 15);
301 int32x4_t negSamp0 = vshll_n_s16(vget_low_s16(negSamp.val[0]), 15);
302 int32x4_t negSamp1 = vshll_n_s16(vget_high_s16(negSamp.val[0]), 15);
305 posSamp0 = vqrdmulhq_s32(posSamp0, posCoef.val[1]); // reversed
306 posSamp1 = vqrdmulhq_s32(posSamp1, posCoef.val[0]); // reversed
307 negSamp0 = vqrdmulhq_s32(negSamp0, negCoef.val[0]);
308 negSamp1 = vqrdmulhq_s32(negSamp1, negCoef.val[1]);
316 posSamp0 = vshll_n_s16(vget_low_s16(posSamp.val[1]), 15);
317 posSamp1 = vshll_n_s16(vget_high_s16(posSamp.val[1]), 15);
318 negSamp0 = vshll_n_s16(vget_low_s16(negSamp.val[1]), 15);
319 negSamp1 = vshll_n_s16(vget_high_s16(negSamp.val[1]), 15);
322 posSamp0 = vqrdmulhq_s32(posSamp0, posCoef.val[1]); // reversed
323 posSamp1 = vqrdmulhq_s32(posSamp1, posCoef.val[0]); // reversed
324 negSamp0 = vqrdmulhq_s32(negSamp0, negCoef.val[0]);
325 negSamp1 = vqrdmulhq_s32(negSamp1, negCoef.val[1]);
397 posCoef.val[0] = vld1q_f32(coefsP);
399 posCoef.val[1] = vld1q_f32(coefsP);
402 negCoef.val[0] = vld1q_f32(coefsN);
404 negCoef.val[1] = vld1q_f32(coefsN);
415 posCoef1.val[0] = vld1q_f32(coefsP1);
417 posCoef1.val[1] = vld1q_f32(coefsP1);
420 negCoef1.val[0] = vld1q_f32(coefsN1);
422 negCoef1.val[1] = vld1q_f32(coefsN1);
425 posCoef1.val[0] = vsubq_f32(posCoef1.val[0], posCoef.val[0]);
426 posCoef1.val[1] = vsubq_f32(posCoef1.val[1], posCoef.val[1]);
427 negCoef.val[0] = vsubq_f32(negCoef.val[0], negCoef1.val[0]);
428 negCoef.val[1] = vsubq_f32(negCoef.val[1], negCoef1.val[1]);
430 posCoef.val[0] = vmlaq_lane_f32(posCoef.val[0], posCoef1.val[0], interp, 0);
431 posCoef.val[1] = vmlaq_lane_f32(posCoef.val[1], posCoef1.val[1], interp, 0);
432 negCoef.val[0] = vmlaq_lane_f32(negCoef1.val[0], negCoef.val[0], interp, 0); // rev
433 negCoef.val[1] = vmlaq_lane_f32(negCoef1.val[1], negCoef.val[1], interp, 0); // rev
444 posSamp.val[0] = vld1q_f32(sP);
446 posSamp.val[1] = vld1q_f32(sP);
449 negSamp.val[0] = vld1q_f32(sN);
451 negSamp.val[1] = vld1q_f32(sN);
455 posSamp.val[0] = vrev64q_f32(posSamp.val[0]);
456 posSamp.val[1] = vrev64q_f32(posSamp.val[1]);
457 posSamp.val[0] = vcombine_f32(
458 vget_high_f32(posSamp.val[0]), vget_low_f32(posSamp.val[0]));
459 posSamp.val[1] = vcombine_f32(
460 vget_high_f32(posSamp.val[1]), vget_low_f32(posSamp.val[1]));
462 accum = vmlaq_f32(accum, posSamp.val[0], posCoef.val[1]);
463 accum = vmlaq_f32(accum, posSamp.val[1], posCoef.val[0]);
464 accum = vmlaq_f32(accum, negSamp.val[0], negCoef.val[0]);
465 accum = vmlaq_f32(accum, negSamp.val[1], negCoef.val[1]);
472 posSamp0.val[0] = vrev64q_f32(posSamp0.val[0]);
473 posSamp0.val[1] = vrev64q_f32(posSamp0.val[1]);
474 posSamp0.val[0] = vcombine_f32(
475 vget_high_f32(posSamp0.val[0]), vget_low_f32(posSamp0.val[0]));
476 posSamp0.val[1] = vcombine_f32(
477 vget_high_f32(posSamp0.val[1]), vget_low_f32(posSamp0.val[1]));
483 posSamp1.val[0] = vrev64q_f32(posSamp1.val[0]);
484 posSamp1.val[1] = vrev64q_f32(posSamp1.val[1]);
485 posSamp1.val[0] = vcombine_f32(
486 vget_high_f32(posSamp1.val[0]), vget_low_f32(posSamp1.val[0]));
487 posSamp1.val[1] = vcombine_f32(
488 vget_high_f32(posSamp1.val[1]), vget_low_f32(posSamp1.val[1]));
494 accum = vmlaq_f32(accum, negSamp0.val[0], negCoef.val[0]);
495 accum = vmlaq_f32(accum, negSamp1.val[0], negCoef.val[1]);
496 accum2 = vmlaq_f32(accum2, negSamp0.val[1], negCoef.val[0]);
497 accum2 = vmlaq_f32(accum2, negSamp1.val[1], negCoef.val[1]);
499 accum = vmlaq_f32(accum, posSamp0.val[0], posCoef.val[1]); // reversed
500 accum = vmlaq_f32(accum, posSamp1.val[0], posCoef.val[0]); // reversed
501 accum2 = vmlaq_f32(accum2, posSamp0.val[1], posCoef.val[1]); // reversed
502 accum2 = vmlaq_f32(accum2, posSamp1.val[1], posCoef.val[0]); // reversed