Lines Matching refs:cm

45 static int is_compound_reference_allowed(const VP9_COMMON *cm) {
48 if (cm->ref_frame_sign_bias[i + 1] != cm->ref_frame_sign_bias[1])
54 static void setup_compound_reference_mode(VP9_COMMON *cm) {
55 if (cm->ref_frame_sign_bias[LAST_FRAME] ==
56 cm->ref_frame_sign_bias[GOLDEN_FRAME]) {
57 cm->comp_fixed_ref = ALTREF_FRAME;
58 cm->comp_var_ref[0] = LAST_FRAME;
59 cm->comp_var_ref[1] = GOLDEN_FRAME;
60 } else if (cm->ref_frame_sign_bias[LAST_FRAME] ==
61 cm->ref_frame_sign_bias[ALTREF_FRAME]) {
62 cm->comp_fixed_ref = GOLDEN_FRAME;
63 cm->comp_var_ref[0] = LAST_FRAME;
64 cm->comp_var_ref[1] = ALTREF_FRAME;
66 cm->comp_fixed_ref = LAST_FRAME;
67 cm->comp_var_ref[0] = GOLDEN_FRAME;
68 cm->comp_var_ref[1] = ALTREF_FRAME;
118 static REFERENCE_MODE read_frame_reference_mode(const VP9_COMMON *cm,
120 if (is_compound_reference_allowed(cm)) {
129 static void read_frame_reference_mode_probs(VP9_COMMON *cm, vp9_reader *r) {
130 FRAME_CONTEXT *const fc = &cm->fc;
133 if (cm->reference_mode == REFERENCE_MODE_SELECT)
137 if (cm->reference_mode != COMPOUND_REFERENCE)
143 if (cm->reference_mode != SINGLE_REFERENCE)
184 static void setup_plane_dequants(VP9_COMMON *cm, MACROBLOCKD *xd, int q_index) {
186 xd->plane[0].dequant = cm->y_dequant[q_index];
189 xd->plane[i].dequant = cm->uv_dequant[q_index];
240 VP9_COMMON *cm;
249 VP9_COMMON *const cm = args->cm;
266 const int eob = vp9_decode_block_tokens(cm, xd, plane, block,
275 VP9_COMMON *cm;
285 VP9_COMMON *const cm = args->cm;
290 eob = vp9_decode_block_tokens(cm, xd, plane, block, plane_bsize, x, y,
298 static MB_MODE_INFO *set_offsets(VP9_COMMON *const cm, MACROBLOCKD *const xd,
303 const int x_mis = MIN(bw, cm->mi_cols - mi_col);
304 const int y_mis = MIN(bh, cm->mi_rows - mi_row);
305 const int offset = mi_row * cm->mi_stride + mi_col;
308 xd->mi = cm->mi + offset;
314 xd->mi[y * cm->mi_stride + x].src_mi = &xd->mi[0];
321 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, cm->mi_rows, cm->mi_cols);
323 vp9_setup_dst_planes(xd->plane, get_frame_new_buffer(cm), mi_row, mi_col);
327 static void set_ref(VP9_COMMON *const cm, MACROBLOCKD *const xd,
330 RefBuffer *ref_buffer = &cm->frame_refs[mbmi->ref_frame[idx] - LAST_FRAME];
333 vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM,
336 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
343 static void decode_block(VP9_COMMON *const cm, MACROBLOCKD *const xd,
348 MB_MODE_INFO *mbmi = set_offsets(cm, xd, tile, bsize, mi_row, mi_col);
349 vp9_read_mode_info(cm, xd, tile, mi_row, mi_col, r);
357 if (cm->seg.enabled)
358 setup_plane_dequants(cm, xd, vp9_get_qindex(&cm->seg, mbmi->segment_id,
359 cm->base_qindex));
363 struct intra_args arg = { cm, xd, r };
368 set_ref(cm, xd, 0, mi_row, mi_col);
370 set_ref(cm, xd, 1, mi_row, mi_col);
378 struct inter_args arg = { cm, xd, r, &eobtotal };
388 static PARTITION_TYPE read_partition(VP9_COMMON *cm, MACROBLOCKD *xd, int hbs,
392 const vp9_prob *const probs = get_partition_probs(cm, ctx);
393 const int has_rows = (mi_row + hbs) < cm->mi_rows;
394 const int has_cols = (mi_col + hbs) < cm->mi_cols;
406 if (!cm->frame_parallel_decoding_mode)
407 ++cm->counts.partition[ctx][p];
412 static void decode_partition(VP9_COMMON *const cm, MACROBLOCKD *const xd,
420 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
423 partition = read_partition(cm, xd, hbs, mi_row, mi_col, bsize, r);
425 uv_subsize = ss_size_lookup[subsize][cm->subsampling_x][cm->subsampling_y];
427 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
430 decode_block(cm, xd, tile, mi_row, mi_col, r, subsize);
434 decode_block(cm, xd, tile, mi_row, mi_col, r, subsize);
437 decode_block(cm, xd, tile, mi_row, mi_col, r, subsize);
438 if (mi_row + hbs < cm->mi_rows)
439 decode_block(cm, xd, tile, mi_row + hbs, mi_col, r, subsize);
442 decode_block(cm, xd, tile, mi_row, mi_col, r, subsize);
443 if (mi_col + hbs < cm->mi_cols)
444 decode_block(cm, xd, tile, mi_row, mi_col + hbs, r, subsize);
447 decode_partition(cm, xd, tile, mi_row, mi_col, r, subsize);
448 decode_partition(cm, xd, tile, mi_row, mi_col + hbs, r, subsize);
449 decode_partition(cm, xd, tile, mi_row + hbs, mi_col, r, subsize);
450 decode_partition(cm, xd, tile, mi_row + hbs, mi_col + hbs, r, subsize);
587 static void setup_quantization(VP9_COMMON *const cm, MACROBLOCKD *const xd,
591 cm->base_qindex = vp9_rb_read_literal(rb, QINDEX_BITS);
592 update |= read_delta_q(rb, &cm->y_dc_delta_q);
593 update |= read_delta_q(rb, &cm->uv_dc_delta_q);
594 update |= read_delta_q(rb, &cm->uv_ac_delta_q);
596 vp9_init_dequantizer(cm);
598 xd->lossless = cm->base_qindex == 0 &&
599 cm->y_dc_delta_q == 0 &&
600 cm->uv_dc_delta_q == 0 &&
601 cm->uv_ac_delta_q == 0;
621 static void setup_display_size(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
622 cm->display_width = cm->width;
623 cm->display_height = cm->height;
625 vp9_read_frame_size(rb, &cm->display_width, &cm->display_height);
628 static void resize_context_buffers(VP9_COMMON *cm, int width, int height) {
631 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
634 if (cm->width != width || cm->height != height) {
642 if (new_mi_cols > cm->mi_cols || new_mi_rows > cm->mi_rows) {
643 if (vp9_alloc_context_buffers(cm, width, height))
644 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
647 vp9_set_mb_mi(cm, width, height);
649 vp9_init_context_buffers(cm);
650 cm->width = width;
651 cm->height = height;
655 static void setup_frame_size(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
658 resize_context_buffers(cm, width, height);
659 setup_display_size(cm, rb);
662 get_frame_new_buffer(cm), cm->width, cm->height,
663 cm->subsampling_x, cm->subsampling_y,
665 cm->use_highbitdepth,
668 &cm->frame_bufs[cm->new_fb_idx].raw_frame_buffer, cm->get_fb_cb,
669 cm->cb_priv)) {
670 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
673 cm->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
684 static void setup_frame_size_with_refs(VP9_COMMON *cm,
691 YV12_BUFFER_CONFIG *const buf = cm->frame_refs[i].buf;
695 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
707 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
713 RefBuffer *const ref_frame = &cm->frame_refs[i];
719 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
722 RefBuffer *const ref_frame = &cm->frame_refs[i];
727 cm->bit_depth,
728 cm->subsampling_x,
729 cm->subsampling_y))
730 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
734 resize_context_buffers(cm, width, height);
735 setup_display_size(cm, rb);
738 get_frame_new_buffer(cm), cm->width, cm->height,
739 cm->subsampling_x, cm->subsampling_y,
741 cm->use_highbitdepth,
744 &cm->frame_bufs[cm->new_fb_idx].raw_frame_buffer, cm->get_fb_cb,
745 cm->cb_priv)) {
746 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
749 cm->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
752 static void setup_tile_info(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
754 vp9_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
758 cm->log2_tile_cols = min_log2_tile_cols;
760 cm->log2_tile_cols++;
762 if (cm->log2_tile_cols > 6)
763 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
767 cm->log2_tile_rows = vp9_rb_read_bit(rb);
768 if (cm->log2_tile_rows)
769 cm->log2_tile_rows += vp9_rb_read_bit(rb);
835 VP9_COMMON *const cm = &pbi->common;
837 const int aligned_cols = mi_cols_aligned_to_sb(cm->mi_cols);
838 const int tile_cols = 1 << cm->log2_tile_cols;
839 const int tile_rows = 1 << cm->log2_tile_rows;
845 if (cm->lf.filter_level && pbi->lf_worker.data1 == NULL) {
846 CHECK_MEM_ERROR(cm, pbi->lf_worker.data1,
850 vpx_internal_error(&cm->error, VPX_CODEC_ERROR,
855 if (cm->lf.filter_level) {
859 lf_data->frame_buffer = get_frame_new_buffer(cm);
860 lf_data->cm = cm;
864 vp9_loop_filter_frame_init(cm, cm->lf.filter_level);
872 vpx_memset(cm->above_context, 0,
873 sizeof(*cm->above_context) * MAX_MB_PLANE * 2 * aligned_cols);
875 vpx_memset(cm->above_seg_context, 0,
876 sizeof(*cm->above_seg_context) * aligned_cols);
884 cm,
896 tile_data->cm = cm;
899 vp9_tile_init(&tile, tile_data->cm, tile_row, tile_col);
900 setup_token_decoder(buf->data, data_end, buf->size, &cm->error,
903 init_macroblockd(cm, &tile_data->xd);
910 vp9_tile_set_row(&tile, cm, tile_row);
917 vp9_tile_set_col(&tile, tile_data->cm, col);
922 decode_partition(tile_data->cm, &tile_data->xd, &tile, mi_row, mi_col,
928 if (cm->lf.filter_level && !pbi->mb.corrupted) {
936 if (mi_row + MI_BLOCK_SIZE >= cm->mi_rows) continue;
951 if (cm->lf.filter_level && !pbi->mb.corrupted) {
955 lf_data->stop = cm->mi_rows;
975 decode_partition(tile_data->cm, &tile_data->xd, tile,
998 VP9_COMMON *const cm = &pbi->common;
1001 const int aligned_mi_cols = mi_cols_aligned_to_sb(cm->mi_cols);
1002 const int tile_cols = 1 << cm->log2_tile_cols;
1003 const int tile_rows = 1 << cm->log2_tile_rows;
1020 CHECK_MEM_ERROR(cm, pbi->tile_workers,
1027 CHECK_MEM_ERROR(cm, worker->data1,
1029 CHECK_MEM_ERROR(cm, worker->data2, vpx_malloc(sizeof(TileInfo)));
1031 vpx_internal_error(&cm->error, VPX_CODEC_ERROR,
1045 vpx_memset(cm->above_context, 0,
1046 sizeof(*cm->above_context) * MAX_MB_PLANE * 2 * aligned_mi_cols);
1047 vpx_memset(cm->above_seg_context, 0,
1048 sizeof(*cm->above_seg_context) * aligned_mi_cols);
1082 tile_data->cm = cm;
1085 vp9_tile_init(tile, tile_data->cm, 0, buf->col);
1086 setup_token_decoder(buf->data, data_end, buf->size, &cm->error,
1089 init_macroblockd(cm, &tile_data->xd);
1122 VP9_COMMON *const cm = (VP9_COMMON *)data;
1123 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, "Truncated packet");
1141 VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
1142 if (cm->profile >= PROFILE_2)
1143 cm->bit_depth = vp9_rb_read_bit(rb) ? VPX_BITS_12 : VPX_BITS_10;
1144 cm->color_space = (COLOR_SPACE)vp9_rb_read_literal(rb, 3);
1145 if (cm->color_space != SRGB) {
1147 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
1148 cm->subsampling_x = vp9_rb_read_bit(rb);
1149 cm->subsampling_y = vp9_rb_read_bit(rb);
1150 if (cm->subsampling_x == 1 && cm->subsampling_y == 1)
1151 vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM,
1154 vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM,
1157 cm->subsampling_y = cm->subsampling_x = 1;
1160 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
1163 cm->subsampling_y = cm->subsampling_x = 0;
1165 vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM,
1168 vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM,
1176 VP9_COMMON *const cm = &pbi->common;
1180 cm->last_frame_type = cm->frame_type;
1183 vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM,
1186 cm->profile = vp9_read_profile(rb);
1188 if (cm->profile >= MAX_PROFILES)
1189 vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM,
1192 cm->show_existing_frame = vp9_rb_read_bit(rb);
1193 if (cm->show_existing_frame) {
1195 const int frame_to_show = cm->ref_frame_map[vp9_rb_read_literal(rb, 3)];
1197 if (frame_to_show < 0 || cm->frame_bufs[frame_to_show].ref_count < 1)
1198 vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM,
1202 ref_cnt_fb(cm->frame_bufs, &cm->new_fb_idx, frame_to_show);
1204 cm->lf.filter_level = 0;
1205 cm->show_frame = 1;
1209 cm->frame_type = (FRAME_TYPE) vp9_rb_read_bit(rb);
1210 cm->show_frame = vp9_rb_read_bit(rb);
1211 cm->error_resilient_mode = vp9_rb_read_bit(rb);
1213 if (cm->frame_type == KEY_FRAME) {
1215 vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM,
1218 read_bitdepth_colorspace_sampling(cm, rb);
1222 cm->frame_refs[i].idx = -1;
1223 cm->frame_refs[i].buf = NULL;
1226 setup_frame_size(cm, rb);
1229 cm->intra_only = cm->show_frame ? 0 : vp9_rb_read_bit(rb);
1231 cm->reset_frame_context = cm->error_resilient_mode ?
1234 if (cm->intra_only) {
1236 vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM,
1238 if (cm->profile > PROFILE_0) {
1239 read_bitdepth_colorspace_sampling(cm, rb);
1245 cm->color_space = BT_601;
1246 cm->subsampling_y = cm->subsampling_x = 1;
1250 setup_frame_size(cm, rb);
1256 const int idx = cm->ref_frame_map[ref];
1257 RefBuffer *const ref_frame = &cm->frame_refs[i];
1259 ref_frame->buf = &cm->frame_bufs[idx].buf;
1260 cm->ref_frame_sign_bias[LAST_FRAME + i] = vp9_rb_read_bit(rb);
1263 setup_frame_size_with_refs(cm, rb);
1265 cm->allow_high_precision_mv = vp9_rb_read_bit(rb);
1266 cm->interp_filter = read_interp_filter(rb);
1269 RefBuffer *const ref_buf = &cm->frame_refs[i];
1274 cm->width, cm->height,
1275 cm->use_highbitdepth);
1280 cm->width, cm->height);
1289 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
1294 if (!cm->error_resilient_mode) {
1295 cm->refresh_frame_context = vp9_rb_read_bit(rb);
1296 cm->frame_parallel_decoding_mode = vp9_rb_read_bit(rb);
1298 cm->refresh_frame_context = 0;
1299 cm->frame_parallel_decoding_mode = 1;
1304 cm->frame_context_idx = vp9_rb_read_literal(rb, FRAME_CONTEXTS_LOG2);
1306 if (frame_is_intra_only(cm) || cm->error_resilient_mode)
1307 vp9_setup_past_independence(cm);
1309 setup_loopfilter(&cm->lf, rb);
1310 setup_quantization(cm, &pbi->mb, rb);
1311 setup_segmentation(&cm->seg, rb);
1313 setup_tile_info(cm, rb);
1317 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
1325 VP9_COMMON *const cm = &pbi->common;
1327 FRAME_CONTEXT *const fc = &cm->fc;
1333 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
1336 cm->tx_mode = xd->lossless ? ONLY_4X4 : read_tx_mode(&r);
1337 if (cm->tx_mode == TX_MODE_SELECT)
1339 read_coef_probs(fc, cm->tx_mode, &r);
1344 if (!frame_is_intra_only(cm)) {
1350 if (cm->interp_filter == SWITCHABLE)
1356 cm->reference_mode = read_frame_reference_mode(cm, &r);
1357 if (cm->reference_mode != SINGLE_REFERENCE)
1358 setup_compound_reference_mode(cm);
1359 read_frame_reference_mode_probs(cm, &r);
1369 read_mv_probs(nmvc, cm->allow_high_precision_mv, &r);
1375 void vp9_init_dequantizer(VP9_COMMON *cm) {
1379 cm->y_dequant[q][0] = vp9_dc_quant(q, cm->y_dc_delta_q, cm->bit_depth);
1380 cm->y_dequant[q][1] = vp9_ac_quant(q, 0, cm->bit_depth);
1382 cm->uv_dequant[q][0] = vp9_dc_quant(q, cm->uv_dc_delta_q, cm->bit_depth);
1383 cm->uv_dequant[q][1] = vp9_ac_quant(q, cm->uv_ac_delta_q, cm->bit_depth);
1388 #define debug_check_frame_counts(cm) (void)0
1392 static void debug_check_frame_counts(const VP9_COMMON *const cm) {
1395 assert(cm->frame_parallel_decoding_mode || cm->error_resilient_mode);
1396 assert(!memcmp(cm->counts.y_mode, zero_counts.y_mode,
1397 sizeof(cm->counts.y_mode)));
1398 assert(!memcmp(cm->counts.uv_mode, zero_counts.uv_mode,
1399 sizeof(cm->counts.uv_mode)));
1400 assert(!memcmp(cm->counts.partition, zero_counts.partition,
1401 sizeof(cm->counts.partition)));
1402 assert(!memcmp(cm->counts.coef, zero_counts.coef,
1403 sizeof(cm->counts.coef)));
1404 assert(!memcmp(cm->counts.eob_branch, zero_counts.eob_branch,
1405 sizeof(cm->counts.eob_branch)));
1406 assert(!memcmp(cm->counts.switchable_interp, zero_counts.switchable_interp,
1407 sizeof(cm->counts.switchable_interp)));
1408 assert(!memcmp(cm->counts.inter_mode, zero_counts.inter_mode,
1409 sizeof(cm->counts.inter_mode)));
1410 assert(!memcmp(cm->counts.intra_inter, zero_counts.intra_inter,
1411 sizeof(cm->counts.intra_inter)));
1412 assert(!memcmp(cm->counts.comp_inter, zero_counts.comp_inter,
1413 sizeof(cm->counts.comp_inter)));
1414 assert(!memcmp(cm->counts.single_ref, zero_counts.single_ref,
1415 sizeof(cm->counts.single_ref)));
1416 assert(!memcmp(cm->counts.comp_ref, zero_counts.comp_ref,
1417 sizeof(cm->counts.comp_ref)));
1418 assert(!memcmp(&cm->counts.tx, &zero_counts.tx, sizeof(cm->counts.tx)));
1419 assert(!memcmp(cm->counts.skip, zero_counts.skip, sizeof(cm->counts.skip)));
1420 assert(!memcmp(&cm->counts.mv, &zero_counts.mv, sizeof(cm->counts.mv)));
1448 VP9_COMMON *const cm = &pbi->common;
1455 const int tile_rows = 1 << cm->log2_tile_rows;
1456 const int tile_cols = 1 << cm->log2_tile_cols;
1457 YV12_BUFFER_CONFIG *const new_fb = get_frame_new_buffer(cm);
1462 *p_data_end = data + (cm->profile <= PROFILE_2 ? 1 : 2);
1468 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
1471 init_macroblockd(cm, &pbi->mb);
1473 if (!cm->error_resilient_mode)
1474 set_prev_mi(cm);
1476 cm->prev_mi = NULL;
1478 setup_plane_dequants(cm, xd, cm->base_qindex);
1479 vp9_setup_block_planes(xd, cm->subsampling_x, cm->subsampling_y);
1481 cm->fc = cm->frame_contexts[cm->frame_context_idx];
1482 vp9_zero(cm->counts);
1491 cm->frame_parallel_decoding_mode) {
1496 vp9_loop_filter_frame_mt(new_fb, pbi, cm, cm->lf.filter_level, 0);
1505 if (!cm->error_resilient_mode && !cm->frame_parallel_decoding_mode) {
1506 vp9_adapt_coef_probs(cm);
1508 if (!frame_is_intra_only(cm)) {
1509 vp9_adapt_mode_probs(cm);
1510 vp9_adapt_mv_probs(cm, cm->allow_high_precision_mv);
1513 debug_check_frame_counts(cm);
1516 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
1520 if (cm->refresh_frame_context)
1521 cm->frame_contexts[cm->frame_context_idx] = cm->fc;