Lines Matching refs:uv_stride

128     int uv_stride,
158 usad = (vp8_variance8x8(u, uv_stride, ud, uvd_stride, &sse));
160 vsad = (vp8_variance8x8(v, uv_stride, vd, uvd_stride, &sse));
164 usad = (vp8_sad8x8(u, uv_stride, ud, uvd_stride, UINT_MAX) + 32) >> 6;
165 vsad = (vp8_sad8x8(v, uv_stride, vd, uvd_stride, UINT_MAX)+ 32) >> 6;
175 usad = (vp8_variance4x4(u, uv_stride, ud, uvd_stride, &sse));
177 vsad = (vp8_variance4x4(v, uv_stride, vd, uvd_stride, &sse));
181 usad = (vp8_sad4x4(u, uv_stride, ud, uvd_stride, UINT_MAX) + 8) >> 4;
182 vsad = (vp8_sad4x4(v, uv_stride, vd, uvd_stride, UINT_MAX) + 8) >> 4;
217 u, v, uv_stride,
227 vp8_copy_mem8x8(u, uv_stride, ud, uvd_stride);
228 vp8_copy_mem8x8(v, uv_stride, vd, uvd_stride);
233 for (up = u, udp = ud, i = 0; i < uvblksize; ++i, up += uv_stride, udp += uvd_stride)
235 for (vp = v, vdp = vd, i = 0; i < uvblksize; ++i, vp += uv_stride, vdp += uvd_stride)
321 u_ptr + 4*(i*show->uv_stride+j),
322 v_ptr + 4*(i*show->uv_stride+j),
324 show->uv_stride,
326 ud_ptr + 4*(i*dest->uv_stride+j),
327 vd_ptr + 4*(i*dest->uv_stride+j),
329 dest->uv_stride);
335 unsigned char *up = u_ptr + 4*(i*show->uv_stride+j);
336 unsigned char *udp = ud_ptr + 4*(i*dest->uv_stride+j);
337 unsigned char *vp = v_ptr + 4*(i*show->uv_stride+j);
338 unsigned char *vdp = vd_ptr + 4*(i*dest->uv_stride+j);
341 for (k = 0; k < 4; ++k, up += show->uv_stride, udp += dest->uv_stride,
342 vp += show->uv_stride, vdp += dest->uv_stride)
355 show->uv_stride,
358 dest->uv_stride);
364 vp8_copy_mem8x8(u_ptr, show->uv_stride, ud_ptr, dest->uv_stride);
365 vp8_copy_mem8x8(v_ptr, show->uv_stride, vd_ptr, dest->uv_stride);
377 u_ptr += show->uv_stride * 8 - 8 * cm->mb_cols;
378 v_ptr += show->uv_stride * 8 - 8 * cm->mb_cols;
380 ud_ptr += dest->uv_stride * 8 - 8 * cm->mb_cols;
381 vd_ptr += dest->uv_stride * 8 - 8 * cm->mb_cols;