1/* 2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved. 3 * 4 * Use of this source code is governed by a BSD-style license 5 * that can be found in the LICENSE file in the root of the source 6 * tree. An additional intellectual property rights grant can be found 7 * in the file PATENTS. All contributing project authors may 8 * be found in the AUTHORS file in the root of the source tree. 9 */ 10 11 12#include <stdlib.h> 13#include <string.h> 14#include "vpx/vpx_decoder.h" 15#include "vpx/vp8dx.h" 16#include "vpx/internal/vpx_codec_internal.h" 17#include "vpx_version.h" 18#include "vp9/decoder/vp9_onyxd.h" 19#include "vp9/decoder/vp9_onyxd_int.h" 20#include "vp9/decoder/vp9_read_bit_buffer.h" 21#include "vp9/vp9_iface_common.h" 22 23#define VP9_CAP_POSTPROC (CONFIG_VP9_POSTPROC ? VPX_CODEC_CAP_POSTPROC : 0) 24typedef vpx_codec_stream_info_t vp9_stream_info_t; 25 26/* Structures for handling memory allocations */ 27typedef enum { 28 VP9_SEG_ALG_PRIV = 256, 29 VP9_SEG_MAX 30} mem_seg_id_t; 31#define NELEMENTS(x) ((int)(sizeof(x)/sizeof(x[0]))) 32 33static unsigned long priv_sz(const vpx_codec_dec_cfg_t *si, 34 vpx_codec_flags_t flags); 35 36static const mem_req_t vp9_mem_req_segs[] = { 37 {VP9_SEG_ALG_PRIV, 0, 8, VPX_CODEC_MEM_ZERO, priv_sz}, 38 {VP9_SEG_MAX, 0, 0, 0, NULL} 39}; 40 41struct vpx_codec_alg_priv { 42 vpx_codec_priv_t base; 43 vpx_codec_mmap_t mmaps[NELEMENTS(vp9_mem_req_segs) - 1]; 44 vpx_codec_dec_cfg_t cfg; 45 vp9_stream_info_t si; 46 int defer_alloc; 47 int decoder_init; 48 VP9D_PTR pbi; 49 int postproc_cfg_set; 50 vp8_postproc_cfg_t postproc_cfg; 51#if CONFIG_POSTPROC_VISUALIZER 52 unsigned int dbg_postproc_flag; 53 int dbg_color_ref_frame_flag; 54 int dbg_color_mb_modes_flag; 55 int dbg_color_b_modes_flag; 56 int dbg_display_mv_flag; 57#endif 58 vpx_image_t img; 59 int img_setup; 60 int img_avail; 61 int invert_tile_order; 62}; 63 64static unsigned long priv_sz(const vpx_codec_dec_cfg_t *si, 65 vpx_codec_flags_t flags) { 66 /* Although this declaration is constant, we can't use it in the requested 67 * segments list because we want to define the requested segments list 68 * before defining the private type (so that the number of memory maps is 69 * known) 70 */ 71 (void)si; 72 return sizeof(vpx_codec_alg_priv_t); 73} 74 75static void vp9_init_ctx(vpx_codec_ctx_t *ctx, const vpx_codec_mmap_t *mmap) { 76 int i; 77 78 ctx->priv = mmap->base; 79 ctx->priv->sz = sizeof(*ctx->priv); 80 ctx->priv->iface = ctx->iface; 81 ctx->priv->alg_priv = mmap->base; 82 83 for (i = 0; i < NELEMENTS(ctx->priv->alg_priv->mmaps); i++) 84 ctx->priv->alg_priv->mmaps[i].id = vp9_mem_req_segs[i].id; 85 86 ctx->priv->alg_priv->mmaps[0] = *mmap; 87 ctx->priv->alg_priv->si.sz = sizeof(ctx->priv->alg_priv->si); 88 ctx->priv->init_flags = ctx->init_flags; 89 90 if (ctx->config.dec) { 91 /* Update the reference to the config structure to an internal copy. */ 92 ctx->priv->alg_priv->cfg = *ctx->config.dec; 93 ctx->config.dec = &ctx->priv->alg_priv->cfg; 94 } 95} 96 97static void vp9_finalize_mmaps(vpx_codec_alg_priv_t *ctx) { 98 /* nothing to clean up */ 99} 100 101static vpx_codec_err_t vp9_init(vpx_codec_ctx_t *ctx, 102 vpx_codec_priv_enc_mr_cfg_t *data) { 103 vpx_codec_err_t res = VPX_CODEC_OK; 104 105 /* This function only allocates space for the vpx_codec_alg_priv_t 106 * structure. More memory may be required at the time the stream 107 * information becomes known. 108 */ 109 if (!ctx->priv) { 110 vpx_codec_mmap_t mmap; 111 112 mmap.id = vp9_mem_req_segs[0].id; 113 mmap.sz = sizeof(vpx_codec_alg_priv_t); 114 mmap.align = vp9_mem_req_segs[0].align; 115 mmap.flags = vp9_mem_req_segs[0].flags; 116 117 res = vpx_mmap_alloc(&mmap); 118 119 if (!res) { 120 vp9_init_ctx(ctx, &mmap); 121 122 ctx->priv->alg_priv->defer_alloc = 1; 123 /*post processing level initialized to do nothing */ 124 } 125 } 126 127 return res; 128} 129 130static vpx_codec_err_t vp9_destroy(vpx_codec_alg_priv_t *ctx) { 131 int i; 132 133 vp9_remove_decompressor(ctx->pbi); 134 135 for (i = NELEMENTS(ctx->mmaps) - 1; i >= 0; i--) { 136 if (ctx->mmaps[i].dtor) 137 ctx->mmaps[i].dtor(&ctx->mmaps[i]); 138 } 139 140 return VPX_CODEC_OK; 141} 142 143static vpx_codec_err_t vp9_peek_si(const uint8_t *data, 144 unsigned int data_sz, 145 vpx_codec_stream_info_t *si) { 146 if (data_sz <= 8) return VPX_CODEC_UNSUP_BITSTREAM; 147 if (data + data_sz <= data) return VPX_CODEC_INVALID_PARAM; 148 149 si->is_kf = 0; 150 si->w = si->h = 0; 151 152 { 153 struct vp9_read_bit_buffer rb = { data, data + data_sz, 0, NULL, NULL }; 154 const int frame_marker = vp9_rb_read_literal(&rb, 2); 155 const int version = vp9_rb_read_bit(&rb) | (vp9_rb_read_bit(&rb) << 1); 156 if (frame_marker != 0x2) return VPX_CODEC_UNSUP_BITSTREAM; 157#if CONFIG_NON420 158 if (version > 1) return VPX_CODEC_UNSUP_BITSTREAM; 159#else 160 if (version != 0) return VPX_CODEC_UNSUP_BITSTREAM; 161#endif 162 163 if (vp9_rb_read_bit(&rb)) { // show an existing frame 164 return VPX_CODEC_OK; 165 } 166 167 si->is_kf = !vp9_rb_read_bit(&rb); 168 if (si->is_kf) { 169 const int sRGB = 7; 170 int colorspace; 171 172 rb.bit_offset += 1; // show frame 173 rb.bit_offset += 1; // error resilient 174 175 if (vp9_rb_read_literal(&rb, 8) != SYNC_CODE_0 || 176 vp9_rb_read_literal(&rb, 8) != SYNC_CODE_1 || 177 vp9_rb_read_literal(&rb, 8) != SYNC_CODE_2) { 178 return VPX_CODEC_UNSUP_BITSTREAM; 179 } 180 181 colorspace = vp9_rb_read_literal(&rb, 3); 182 if (colorspace != sRGB) { 183 rb.bit_offset += 1; // [16,235] (including xvycc) vs [0,255] range 184 if (version == 1) { 185 rb.bit_offset += 2; // subsampling x/y 186 rb.bit_offset += 1; // has extra plane 187 } 188 } else { 189 if (version == 1) { 190 rb.bit_offset += 1; // has extra plane 191 } else { 192 // RGB is only available in version 1 193 return VPX_CODEC_UNSUP_BITSTREAM; 194 } 195 } 196 197 // TODO(jzern): these are available on non-keyframes in intra only mode. 198 si->w = vp9_rb_read_literal(&rb, 16) + 1; 199 si->h = vp9_rb_read_literal(&rb, 16) + 1; 200 } 201 } 202 203 return VPX_CODEC_OK; 204} 205 206static vpx_codec_err_t vp9_get_si(vpx_codec_alg_priv_t *ctx, 207 vpx_codec_stream_info_t *si) { 208 209 unsigned int sz; 210 211 if (si->sz >= sizeof(vp9_stream_info_t)) 212 sz = sizeof(vp9_stream_info_t); 213 else 214 sz = sizeof(vpx_codec_stream_info_t); 215 216 memcpy(si, &ctx->si, sz); 217 si->sz = sz; 218 219 return VPX_CODEC_OK; 220} 221 222 223static vpx_codec_err_t 224update_error_state(vpx_codec_alg_priv_t *ctx, 225 const struct vpx_internal_error_info *error) { 226 vpx_codec_err_t res; 227 228 if ((res = error->error_code)) 229 ctx->base.err_detail = error->has_detail 230 ? error->detail 231 : NULL; 232 233 return res; 234} 235 236static vpx_codec_err_t decode_one(vpx_codec_alg_priv_t *ctx, 237 const uint8_t **data, 238 unsigned int data_sz, 239 void *user_priv, 240 long deadline) { 241 vpx_codec_err_t res = VPX_CODEC_OK; 242 243 ctx->img_avail = 0; 244 245 /* Determine the stream parameters. Note that we rely on peek_si to 246 * validate that we have a buffer that does not wrap around the top 247 * of the heap. 248 */ 249 if (!ctx->si.h) 250 res = ctx->base.iface->dec.peek_si(*data, data_sz, &ctx->si); 251 252 253 /* Perform deferred allocations, if required */ 254 if (!res && ctx->defer_alloc) { 255 int i; 256 257 for (i = 1; !res && i < NELEMENTS(ctx->mmaps); i++) { 258 vpx_codec_dec_cfg_t cfg; 259 260 cfg.w = ctx->si.w; 261 cfg.h = ctx->si.h; 262 ctx->mmaps[i].id = vp9_mem_req_segs[i].id; 263 ctx->mmaps[i].sz = vp9_mem_req_segs[i].sz; 264 ctx->mmaps[i].align = vp9_mem_req_segs[i].align; 265 ctx->mmaps[i].flags = vp9_mem_req_segs[i].flags; 266 267 if (!ctx->mmaps[i].sz) 268 ctx->mmaps[i].sz = vp9_mem_req_segs[i].calc_sz(&cfg, 269 ctx->base.init_flags); 270 271 res = vpx_mmap_alloc(&ctx->mmaps[i]); 272 } 273 274 if (!res) 275 vp9_finalize_mmaps(ctx); 276 277 ctx->defer_alloc = 0; 278 } 279 280 /* Initialize the decoder instance on the first frame*/ 281 if (!res && !ctx->decoder_init) { 282 res = vpx_validate_mmaps(&ctx->si, ctx->mmaps, 283 vp9_mem_req_segs, NELEMENTS(vp9_mem_req_segs), 284 ctx->base.init_flags); 285 286 if (!res) { 287 VP9D_CONFIG oxcf; 288 VP9D_PTR optr; 289 290 vp9_initialize_dec(); 291 292 oxcf.width = ctx->si.w; 293 oxcf.height = ctx->si.h; 294 oxcf.version = 9; 295 oxcf.postprocess = 0; 296 oxcf.max_threads = ctx->cfg.threads; 297 oxcf.inv_tile_order = ctx->invert_tile_order; 298 optr = vp9_create_decompressor(&oxcf); 299 300 /* If postprocessing was enabled by the application and a 301 * configuration has not been provided, default it. 302 */ 303 if (!ctx->postproc_cfg_set 304 && (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC)) { 305 ctx->postproc_cfg.post_proc_flag = 306 VP8_DEBLOCK | VP8_DEMACROBLOCK; 307 ctx->postproc_cfg.deblocking_level = 4; 308 ctx->postproc_cfg.noise_level = 0; 309 } 310 311 if (!optr) 312 res = VPX_CODEC_ERROR; 313 else 314 ctx->pbi = optr; 315 } 316 317 ctx->decoder_init = 1; 318 } 319 320 if (!res && ctx->pbi) { 321 YV12_BUFFER_CONFIG sd; 322 int64_t time_stamp = 0, time_end_stamp = 0; 323 vp9_ppflags_t flags = {0}; 324 325 if (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC) { 326 flags.post_proc_flag = ctx->postproc_cfg.post_proc_flag 327#if CONFIG_POSTPROC_VISUALIZER 328 329 | ((ctx->dbg_color_ref_frame_flag != 0) ? VP9D_DEBUG_CLR_FRM_REF_BLKS : 0) 330 | ((ctx->dbg_color_mb_modes_flag != 0) ? VP9D_DEBUG_CLR_BLK_MODES : 0) 331 | ((ctx->dbg_color_b_modes_flag != 0) ? VP9D_DEBUG_CLR_BLK_MODES : 0) 332 | ((ctx->dbg_display_mv_flag != 0) ? VP9D_DEBUG_DRAW_MV : 0) 333#endif 334; 335 flags.deblocking_level = ctx->postproc_cfg.deblocking_level; 336 flags.noise_level = ctx->postproc_cfg.noise_level; 337#if CONFIG_POSTPROC_VISUALIZER 338 flags.display_ref_frame_flag = ctx->dbg_color_ref_frame_flag; 339 flags.display_mb_modes_flag = ctx->dbg_color_mb_modes_flag; 340 flags.display_b_modes_flag = ctx->dbg_color_b_modes_flag; 341 flags.display_mv_flag = ctx->dbg_display_mv_flag; 342#endif 343 } 344 345 if (vp9_receive_compressed_data(ctx->pbi, data_sz, data, deadline)) { 346 VP9D_COMP *pbi = (VP9D_COMP *)ctx->pbi; 347 res = update_error_state(ctx, &pbi->common.error); 348 } 349 350 if (!res && 0 == vp9_get_raw_frame(ctx->pbi, &sd, &time_stamp, 351 &time_end_stamp, &flags)) { 352 yuvconfig2image(&ctx->img, &sd, user_priv); 353 ctx->img_avail = 1; 354 } 355 } 356 357 return res; 358} 359 360static void parse_superframe_index(const uint8_t *data, 361 size_t data_sz, 362 uint32_t sizes[8], 363 int *count) { 364 uint8_t marker; 365 366 assert(data_sz); 367 marker = data[data_sz - 1]; 368 *count = 0; 369 370 if ((marker & 0xe0) == 0xc0) { 371 const uint32_t frames = (marker & 0x7) + 1; 372 const uint32_t mag = ((marker >> 3) & 0x3) + 1; 373 const size_t index_sz = 2 + mag * frames; 374 375 if (data_sz >= index_sz && data[data_sz - index_sz] == marker) { 376 // found a valid superframe index 377 uint32_t i, j; 378 const uint8_t *x = data + data_sz - index_sz + 1; 379 380 for (i = 0; i < frames; i++) { 381 uint32_t this_sz = 0; 382 383 for (j = 0; j < mag; j++) 384 this_sz |= (*x++) << (j * 8); 385 sizes[i] = this_sz; 386 } 387 388 *count = frames; 389 } 390 } 391} 392 393static vpx_codec_err_t vp9_decode(vpx_codec_alg_priv_t *ctx, 394 const uint8_t *data, 395 unsigned int data_sz, 396 void *user_priv, 397 long deadline) { 398 const uint8_t *data_start = data; 399 const uint8_t *data_end = data + data_sz; 400 vpx_codec_err_t res = 0; 401 uint32_t sizes[8]; 402 int frames_this_pts, frame_count = 0; 403 404 if (data == NULL || data_sz == 0) return VPX_CODEC_INVALID_PARAM; 405 406 parse_superframe_index(data, data_sz, sizes, &frames_this_pts); 407 408 do { 409 // Skip over the superframe index, if present 410 if (data_sz && (*data_start & 0xe0) == 0xc0) { 411 const uint8_t marker = *data_start; 412 const uint32_t frames = (marker & 0x7) + 1; 413 const uint32_t mag = ((marker >> 3) & 0x3) + 1; 414 const uint32_t index_sz = 2 + mag * frames; 415 416 if (data_sz >= index_sz && data_start[index_sz - 1] == marker) { 417 data_start += index_sz; 418 data_sz -= index_sz; 419 if (data_start < data_end) 420 continue; 421 else 422 break; 423 } 424 } 425 426 // Use the correct size for this frame, if an index is present. 427 if (frames_this_pts) { 428 uint32_t this_sz = sizes[frame_count]; 429 430 if (data_sz < this_sz) { 431 ctx->base.err_detail = "Invalid frame size in index"; 432 return VPX_CODEC_CORRUPT_FRAME; 433 } 434 435 data_sz = this_sz; 436 frame_count++; 437 } 438 439 res = decode_one(ctx, &data_start, data_sz, user_priv, deadline); 440 assert(data_start >= data); 441 assert(data_start <= data_end); 442 443 /* Early exit if there was a decode error */ 444 if (res) 445 break; 446 447 /* Account for suboptimal termination by the encoder. */ 448 while (data_start < data_end && *data_start == 0) 449 data_start++; 450 451 data_sz = data_end - data_start; 452 } while (data_start < data_end); 453 return res; 454} 455 456static vpx_image_t *vp9_get_frame(vpx_codec_alg_priv_t *ctx, 457 vpx_codec_iter_t *iter) { 458 vpx_image_t *img = NULL; 459 460 if (ctx->img_avail) { 461 /* iter acts as a flip flop, so an image is only returned on the first 462 * call to get_frame. 463 */ 464 if (!(*iter)) { 465 img = &ctx->img; 466 *iter = img; 467 } 468 } 469 ctx->img_avail = 0; 470 471 return img; 472} 473 474static vpx_codec_err_t vp9_xma_get_mmap(const vpx_codec_ctx_t *ctx, 475 vpx_codec_mmap_t *mmap, 476 vpx_codec_iter_t *iter) { 477 vpx_codec_err_t res; 478 const mem_req_t *seg_iter = *iter; 479 480 /* Get address of next segment request */ 481 do { 482 if (!seg_iter) 483 seg_iter = vp9_mem_req_segs; 484 else if (seg_iter->id != VP9_SEG_MAX) 485 seg_iter++; 486 487 *iter = (vpx_codec_iter_t)seg_iter; 488 489 if (seg_iter->id != VP9_SEG_MAX) { 490 mmap->id = seg_iter->id; 491 mmap->sz = seg_iter->sz; 492 mmap->align = seg_iter->align; 493 mmap->flags = seg_iter->flags; 494 495 if (!seg_iter->sz) 496 mmap->sz = seg_iter->calc_sz(ctx->config.dec, ctx->init_flags); 497 498 res = VPX_CODEC_OK; 499 } else 500 res = VPX_CODEC_LIST_END; 501 } while (!mmap->sz && res != VPX_CODEC_LIST_END); 502 503 return res; 504} 505 506static vpx_codec_err_t vp9_xma_set_mmap(vpx_codec_ctx_t *ctx, 507 const vpx_codec_mmap_t *mmap) { 508 vpx_codec_err_t res = VPX_CODEC_MEM_ERROR; 509 int i, done; 510 511 if (!ctx->priv) { 512 if (mmap->id == VP9_SEG_ALG_PRIV) { 513 if (!ctx->priv) { 514 vp9_init_ctx(ctx, mmap); 515 res = VPX_CODEC_OK; 516 } 517 } 518 } 519 520 done = 1; 521 522 if (!res && ctx->priv->alg_priv) { 523 for (i = 0; i < NELEMENTS(ctx->priv->alg_priv->mmaps); i++) { 524 if (ctx->priv->alg_priv->mmaps[i].id == mmap->id) 525 if (!ctx->priv->alg_priv->mmaps[i].base) { 526 ctx->priv->alg_priv->mmaps[i] = *mmap; 527 res = VPX_CODEC_OK; 528 } 529 530 done &= (ctx->priv->alg_priv->mmaps[i].base != NULL); 531 } 532 } 533 534 if (done && !res) { 535 vp9_finalize_mmaps(ctx->priv->alg_priv); 536 res = ctx->iface->init(ctx, NULL); 537 } 538 539 return res; 540} 541 542static vpx_codec_err_t set_reference(vpx_codec_alg_priv_t *ctx, 543 int ctr_id, 544 va_list args) { 545 546 vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *); 547 548 if (data) { 549 vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data; 550 YV12_BUFFER_CONFIG sd; 551 552 image2yuvconfig(&frame->img, &sd); 553 554 return vp9_set_reference_dec(ctx->pbi, 555 (VP9_REFFRAME)frame->frame_type, &sd); 556 } else 557 return VPX_CODEC_INVALID_PARAM; 558 559} 560 561static vpx_codec_err_t copy_reference(vpx_codec_alg_priv_t *ctx, 562 int ctr_id, 563 va_list args) { 564 565 vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *); 566 567 if (data) { 568 vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data; 569 YV12_BUFFER_CONFIG sd; 570 571 image2yuvconfig(&frame->img, &sd); 572 573 return vp9_copy_reference_dec(ctx->pbi, 574 (VP9_REFFRAME)frame->frame_type, &sd); 575 } else 576 return VPX_CODEC_INVALID_PARAM; 577 578} 579 580static vpx_codec_err_t get_reference(vpx_codec_alg_priv_t *ctx, 581 int ctr_id, 582 va_list args) { 583 vp9_ref_frame_t *data = va_arg(args, vp9_ref_frame_t *); 584 585 if (data) { 586 YV12_BUFFER_CONFIG* fb; 587 588 vp9_get_reference_dec(ctx->pbi, data->idx, &fb); 589 yuvconfig2image(&data->img, fb, NULL); 590 return VPX_CODEC_OK; 591 } else { 592 return VPX_CODEC_INVALID_PARAM; 593 } 594} 595 596static vpx_codec_err_t set_postproc(vpx_codec_alg_priv_t *ctx, 597 int ctr_id, 598 va_list args) { 599#if CONFIG_VP9_POSTPROC 600 vp8_postproc_cfg_t *data = va_arg(args, vp8_postproc_cfg_t *); 601 602 if (data) { 603 ctx->postproc_cfg_set = 1; 604 ctx->postproc_cfg = *((vp8_postproc_cfg_t *)data); 605 return VPX_CODEC_OK; 606 } else 607 return VPX_CODEC_INVALID_PARAM; 608 609#else 610 return VPX_CODEC_INCAPABLE; 611#endif 612} 613 614static vpx_codec_err_t set_dbg_options(vpx_codec_alg_priv_t *ctx, 615 int ctrl_id, 616 va_list args) { 617#if CONFIG_POSTPROC_VISUALIZER && CONFIG_POSTPROC 618 int data = va_arg(args, int); 619 620#define MAP(id, var) case id: var = data; break; 621 622 switch (ctrl_id) { 623 MAP(VP8_SET_DBG_COLOR_REF_FRAME, ctx->dbg_color_ref_frame_flag); 624 MAP(VP8_SET_DBG_COLOR_MB_MODES, ctx->dbg_color_mb_modes_flag); 625 MAP(VP8_SET_DBG_COLOR_B_MODES, ctx->dbg_color_b_modes_flag); 626 MAP(VP8_SET_DBG_DISPLAY_MV, ctx->dbg_display_mv_flag); 627 } 628 629 return VPX_CODEC_OK; 630#else 631 return VPX_CODEC_INCAPABLE; 632#endif 633} 634 635static vpx_codec_err_t get_last_ref_updates(vpx_codec_alg_priv_t *ctx, 636 int ctrl_id, 637 va_list args) { 638 int *update_info = va_arg(args, int *); 639 VP9D_COMP *pbi = (VP9D_COMP *)ctx->pbi; 640 641 if (update_info) { 642 *update_info = pbi->refresh_frame_flags; 643 644 return VPX_CODEC_OK; 645 } else 646 return VPX_CODEC_INVALID_PARAM; 647} 648 649 650static vpx_codec_err_t get_frame_corrupted(vpx_codec_alg_priv_t *ctx, 651 int ctrl_id, 652 va_list args) { 653 654 int *corrupted = va_arg(args, int *); 655 656 if (corrupted) { 657 VP9D_COMP *pbi = (VP9D_COMP *)ctx->pbi; 658 *corrupted = pbi->common.frame_to_show->corrupted; 659 660 return VPX_CODEC_OK; 661 } else 662 return VPX_CODEC_INVALID_PARAM; 663 664} 665 666static vpx_codec_err_t set_invert_tile_order(vpx_codec_alg_priv_t *ctx, 667 int ctr_id, 668 va_list args) { 669 ctx->invert_tile_order = va_arg(args, int); 670 return VPX_CODEC_OK; 671} 672 673static vpx_codec_ctrl_fn_map_t ctf_maps[] = { 674 {VP8_SET_REFERENCE, set_reference}, 675 {VP8_COPY_REFERENCE, copy_reference}, 676 {VP8_SET_POSTPROC, set_postproc}, 677 {VP8_SET_DBG_COLOR_REF_FRAME, set_dbg_options}, 678 {VP8_SET_DBG_COLOR_MB_MODES, set_dbg_options}, 679 {VP8_SET_DBG_COLOR_B_MODES, set_dbg_options}, 680 {VP8_SET_DBG_DISPLAY_MV, set_dbg_options}, 681 {VP8D_GET_LAST_REF_UPDATES, get_last_ref_updates}, 682 {VP8D_GET_FRAME_CORRUPTED, get_frame_corrupted}, 683 {VP9_GET_REFERENCE, get_reference}, 684 {VP9_INVERT_TILE_DECODE_ORDER, set_invert_tile_order}, 685 { -1, NULL}, 686}; 687 688 689#ifndef VERSION_STRING 690#define VERSION_STRING 691#endif 692CODEC_INTERFACE(vpx_codec_vp9_dx) = { 693 "WebM Project VP9 Decoder" VERSION_STRING, 694 VPX_CODEC_INTERNAL_ABI_VERSION, 695 VPX_CODEC_CAP_DECODER | VP9_CAP_POSTPROC, 696 /* vpx_codec_caps_t caps; */ 697 vp9_init, /* vpx_codec_init_fn_t init; */ 698 vp9_destroy, /* vpx_codec_destroy_fn_t destroy; */ 699 ctf_maps, /* vpx_codec_ctrl_fn_map_t *ctrl_maps; */ 700 vp9_xma_get_mmap, /* vpx_codec_get_mmap_fn_t get_mmap; */ 701 vp9_xma_set_mmap, /* vpx_codec_set_mmap_fn_t set_mmap; */ 702 { 703 vp9_peek_si, /* vpx_codec_peek_si_fn_t peek_si; */ 704 vp9_get_si, /* vpx_codec_get_si_fn_t get_si; */ 705 vp9_decode, /* vpx_codec_decode_fn_t decode; */ 706 vp9_get_frame, /* vpx_codec_frame_get_fn_t frame_get; */ 707 }, 708 { 709 /* encoder functions */ 710 NOT_IMPLEMENTED, 711 NOT_IMPLEMENTED, 712 NOT_IMPLEMENTED, 713 NOT_IMPLEMENTED, 714 NOT_IMPLEMENTED, 715 NOT_IMPLEMENTED 716 } 717}; 718