1/*
2 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
3 *
4 *  Use of this source code is governed by a BSD-style license
5 *  that can be found in the LICENSE file in the root of the source
6 *  tree. An additional intellectual property rights grant can be found
7 *  in the file PATENTS.  All contributing project authors may
8 *  be found in the AUTHORS file in the root of the source tree.
9 */
10
11
12#include <stdlib.h>
13#include <string.h>
14#include "vpx_rtcd.h"
15#include "vpx/vpx_decoder.h"
16#include "vpx/vp8dx.h"
17#include "vpx/internal/vpx_codec_internal.h"
18#include "vpx_version.h"
19#include "common/onyxd.h"
20#include "decoder/onyxd_int.h"
21#include "common/alloccommon.h"
22#include "vpx_mem/vpx_mem.h"
23#if CONFIG_ERROR_CONCEALMENT
24#include "decoder/error_concealment.h"
25#endif
26#include "decoder/decoderthreading.h"
27
28#define VP8_CAP_POSTPROC (CONFIG_POSTPROC ? VPX_CODEC_CAP_POSTPROC : 0)
29#define VP8_CAP_ERROR_CONCEALMENT (CONFIG_ERROR_CONCEALMENT ? \
30                                    VPX_CODEC_CAP_ERROR_CONCEALMENT : 0)
31
32typedef vpx_codec_stream_info_t  vp8_stream_info_t;
33
34/* Structures for handling memory allocations */
35typedef enum
36{
37    VP8_SEG_ALG_PRIV     = 256,
38    VP8_SEG_MAX
39} mem_seg_id_t;
40#define NELEMENTS(x) ((int)(sizeof(x)/sizeof(x[0])))
41
42static unsigned long vp8_priv_sz(const vpx_codec_dec_cfg_t *si, vpx_codec_flags_t);
43
44typedef struct
45{
46    unsigned int   id;
47    unsigned long  sz;
48    unsigned int   align;
49    unsigned int   flags;
50    unsigned long(*calc_sz)(const vpx_codec_dec_cfg_t *, vpx_codec_flags_t);
51} mem_req_t;
52
53static const mem_req_t vp8_mem_req_segs[] =
54{
55    {VP8_SEG_ALG_PRIV,    0, 8, VPX_CODEC_MEM_ZERO, vp8_priv_sz},
56    {VP8_SEG_MAX, 0, 0, 0, NULL}
57};
58
59struct vpx_codec_alg_priv
60{
61    vpx_codec_priv_t        base;
62    vpx_codec_mmap_t        mmaps[NELEMENTS(vp8_mem_req_segs)-1];
63    vpx_codec_dec_cfg_t     cfg;
64    vp8_stream_info_t       si;
65    int                     defer_alloc;
66    int                     decoder_init;
67    struct VP8D_COMP       *pbi;
68    int                     postproc_cfg_set;
69    vp8_postproc_cfg_t      postproc_cfg;
70#if CONFIG_POSTPROC_VISUALIZER
71    unsigned int            dbg_postproc_flag;
72    int                     dbg_color_ref_frame_flag;
73    int                     dbg_color_mb_modes_flag;
74    int                     dbg_color_b_modes_flag;
75    int                     dbg_display_mv_flag;
76#endif
77    vpx_image_t             img;
78    int                     img_setup;
79    void                    *user_priv;
80};
81
82static unsigned long vp8_priv_sz(const vpx_codec_dec_cfg_t *si, vpx_codec_flags_t flags)
83{
84    /* Although this declaration is constant, we can't use it in the requested
85     * segments list because we want to define the requested segments list
86     * before defining the private type (so that the number of memory maps is
87     * known)
88     */
89    (void)si;
90    return sizeof(vpx_codec_alg_priv_t);
91}
92
93
94static void vp8_mmap_dtor(vpx_codec_mmap_t *mmap)
95{
96    free(mmap->priv);
97}
98
99static vpx_codec_err_t vp8_mmap_alloc(vpx_codec_mmap_t *mmap)
100{
101    vpx_codec_err_t  res;
102    unsigned int   align;
103
104    align = mmap->align ? mmap->align - 1 : 0;
105
106    if (mmap->flags & VPX_CODEC_MEM_ZERO)
107        mmap->priv = calloc(1, mmap->sz + align);
108    else
109        mmap->priv = malloc(mmap->sz + align);
110
111    res = (mmap->priv) ? VPX_CODEC_OK : VPX_CODEC_MEM_ERROR;
112    mmap->base = (void *)((((uintptr_t)mmap->priv) + align) & ~(uintptr_t)align);
113    mmap->dtor = vp8_mmap_dtor;
114    return res;
115}
116
117static vpx_codec_err_t vp8_validate_mmaps(const vp8_stream_info_t *si,
118        const vpx_codec_mmap_t        *mmaps,
119        vpx_codec_flags_t              init_flags)
120{
121    int i;
122    vpx_codec_err_t res = VPX_CODEC_OK;
123
124    for (i = 0; i < NELEMENTS(vp8_mem_req_segs) - 1; i++)
125    {
126        /* Ensure the segment has been allocated */
127        if (!mmaps[i].base)
128        {
129            res = VPX_CODEC_MEM_ERROR;
130            break;
131        }
132
133        /* Verify variable size segment is big enough for the current si. */
134        if (vp8_mem_req_segs[i].calc_sz)
135        {
136            vpx_codec_dec_cfg_t cfg;
137
138            cfg.w = si->w;
139            cfg.h = si->h;
140
141            if (mmaps[i].sz < vp8_mem_req_segs[i].calc_sz(&cfg, init_flags))
142            {
143                res = VPX_CODEC_MEM_ERROR;
144                break;
145            }
146        }
147    }
148
149    return res;
150}
151
152static void vp8_init_ctx(vpx_codec_ctx_t *ctx, const vpx_codec_mmap_t *mmap)
153{
154    int i;
155
156    ctx->priv = mmap->base;
157    ctx->priv->sz = sizeof(*ctx->priv);
158    ctx->priv->iface = ctx->iface;
159    ctx->priv->alg_priv = mmap->base;
160
161    for (i = 0; i < NELEMENTS(ctx->priv->alg_priv->mmaps); i++)
162        ctx->priv->alg_priv->mmaps[i].id = vp8_mem_req_segs[i].id;
163
164    ctx->priv->alg_priv->mmaps[0] = *mmap;
165    ctx->priv->alg_priv->si.sz = sizeof(ctx->priv->alg_priv->si);
166    ctx->priv->init_flags = ctx->init_flags;
167
168    if (ctx->config.dec)
169    {
170        /* Update the reference to the config structure to an internal copy. */
171        ctx->priv->alg_priv->cfg = *ctx->config.dec;
172        ctx->config.dec = &ctx->priv->alg_priv->cfg;
173    }
174}
175
176static void *mmap_lkup(vpx_codec_alg_priv_t *ctx, unsigned int id)
177{
178    int i;
179
180    for (i = 0; i < NELEMENTS(ctx->mmaps); i++)
181        if (ctx->mmaps[i].id == id)
182            return ctx->mmaps[i].base;
183
184    return NULL;
185}
186static void vp8_finalize_mmaps(vpx_codec_alg_priv_t *ctx)
187{
188    /* nothing to clean up */
189}
190
191static vpx_codec_err_t vp8_init(vpx_codec_ctx_t *ctx,
192                                vpx_codec_priv_enc_mr_cfg_t *data)
193{
194    vpx_codec_err_t        res = VPX_CODEC_OK;
195    (void) data;
196
197    vpx_rtcd();
198
199    /* This function only allocates space for the vpx_codec_alg_priv_t
200     * structure. More memory may be required at the time the stream
201     * information becomes known.
202     */
203    if (!ctx->priv)
204    {
205        vpx_codec_mmap_t mmap;
206
207        mmap.id = vp8_mem_req_segs[0].id;
208        mmap.sz = sizeof(vpx_codec_alg_priv_t);
209        mmap.align = vp8_mem_req_segs[0].align;
210        mmap.flags = vp8_mem_req_segs[0].flags;
211
212        res = vp8_mmap_alloc(&mmap);
213
214        if (!res)
215        {
216            vp8_init_ctx(ctx, &mmap);
217
218            ctx->priv->alg_priv->defer_alloc = 1;
219            /*post processing level initialized to do nothing */
220        }
221    }
222
223    return res;
224}
225
226static vpx_codec_err_t vp8_destroy(vpx_codec_alg_priv_t *ctx)
227{
228    int i;
229
230    vp8dx_remove_decompressor(ctx->pbi);
231
232    for (i = NELEMENTS(ctx->mmaps) - 1; i >= 0; i--)
233    {
234        if (ctx->mmaps[i].dtor)
235            ctx->mmaps[i].dtor(&ctx->mmaps[i]);
236    }
237
238    return VPX_CODEC_OK;
239}
240
241static vpx_codec_err_t vp8_peek_si(const uint8_t         *data,
242                                   unsigned int           data_sz,
243                                   vpx_codec_stream_info_t *si)
244{
245    vpx_codec_err_t res = VPX_CODEC_OK;
246
247    if(data + data_sz <= data)
248        res = VPX_CODEC_INVALID_PARAM;
249    else
250    {
251        /* Parse uncompresssed part of key frame header.
252         * 3 bytes:- including version, frame type and an offset
253         * 3 bytes:- sync code (0x9d, 0x01, 0x2a)
254         * 4 bytes:- including image width and height in the lowest 14 bits
255         *           of each 2-byte value.
256         */
257        si->is_kf = 0;
258
259        if (data_sz >= 10 && !(data[0] & 0x01))  /* I-Frame */
260        {
261            const uint8_t *c = data + 3;
262            si->is_kf = 1;
263
264            /* vet via sync code */
265            if (c[0] != 0x9d || c[1] != 0x01 || c[2] != 0x2a)
266                res = VPX_CODEC_UNSUP_BITSTREAM;
267
268            si->w = (c[3] | (c[4] << 8)) & 0x3fff;
269            si->h = (c[5] | (c[6] << 8)) & 0x3fff;
270
271            /*printf("w=%d, h=%d\n", si->w, si->h);*/
272            if (!(si->h | si->w))
273                res = VPX_CODEC_UNSUP_BITSTREAM;
274        }
275        else
276            res = VPX_CODEC_UNSUP_BITSTREAM;
277    }
278
279    return res;
280
281}
282
283static vpx_codec_err_t vp8_get_si(vpx_codec_alg_priv_t    *ctx,
284                                  vpx_codec_stream_info_t *si)
285{
286
287    unsigned int sz;
288
289    if (si->sz >= sizeof(vp8_stream_info_t))
290        sz = sizeof(vp8_stream_info_t);
291    else
292        sz = sizeof(vpx_codec_stream_info_t);
293
294    memcpy(si, &ctx->si, sz);
295    si->sz = sz;
296
297    return VPX_CODEC_OK;
298}
299
300
301static vpx_codec_err_t
302update_error_state(vpx_codec_alg_priv_t                 *ctx,
303                   const struct vpx_internal_error_info *error)
304{
305    vpx_codec_err_t res;
306
307    if ((res = error->error_code))
308        ctx->base.err_detail = error->has_detail
309                               ? error->detail
310                               : NULL;
311
312    return res;
313}
314
315static void yuvconfig2image(vpx_image_t               *img,
316                            const YV12_BUFFER_CONFIG  *yv12,
317                            void                      *user_priv)
318{
319    /** vpx_img_wrap() doesn't allow specifying independent strides for
320      * the Y, U, and V planes, nor other alignment adjustments that
321      * might be representable by a YV12_BUFFER_CONFIG, so we just
322      * initialize all the fields.*/
323    img->fmt = yv12->clrtype == REG_YUV ?
324        VPX_IMG_FMT_I420 : VPX_IMG_FMT_VPXI420;
325    img->w = yv12->y_stride;
326    img->h = (yv12->y_height + 2 * VP8BORDERINPIXELS + 15) & ~15;
327    img->d_w = yv12->y_width;
328    img->d_h = yv12->y_height;
329    img->x_chroma_shift = 1;
330    img->y_chroma_shift = 1;
331    img->planes[VPX_PLANE_Y] = yv12->y_buffer;
332    img->planes[VPX_PLANE_U] = yv12->u_buffer;
333    img->planes[VPX_PLANE_V] = yv12->v_buffer;
334    img->planes[VPX_PLANE_ALPHA] = NULL;
335    img->stride[VPX_PLANE_Y] = yv12->y_stride;
336    img->stride[VPX_PLANE_U] = yv12->uv_stride;
337    img->stride[VPX_PLANE_V] = yv12->uv_stride;
338    img->stride[VPX_PLANE_ALPHA] = yv12->y_stride;
339    img->bps = 12;
340    img->user_priv = user_priv;
341    img->img_data = yv12->buffer_alloc;
342    img->img_data_owner = 0;
343    img->self_allocd = 0;
344}
345
346static vpx_codec_err_t vp8_decode(vpx_codec_alg_priv_t  *ctx,
347                                  const uint8_t         *data,
348                                  unsigned int            data_sz,
349                                  void                    *user_priv,
350                                  long                    deadline)
351{
352    vpx_codec_err_t res = VPX_CODEC_OK;
353    unsigned int resolution_change = 0;
354    unsigned int w, h;
355
356    /* Determine the stream parameters. Note that we rely on peek_si to
357     * validate that we have a buffer that does not wrap around the top
358     * of the heap.
359     */
360    w = ctx->si.w;
361    h = ctx->si.h;
362
363    res = ctx->base.iface->dec.peek_si(data, data_sz, &ctx->si);
364
365    if((res == VPX_CODEC_UNSUP_BITSTREAM) && !ctx->si.is_kf)
366    {
367        /* the peek function returns an error for non keyframes, however for
368         * this case, it is not an error */
369        res = VPX_CODEC_OK;
370    }
371
372    if(!ctx->decoder_init && !ctx->si.is_kf)
373        res = VPX_CODEC_UNSUP_BITSTREAM;
374
375    if ((ctx->si.h != h) || (ctx->si.w != w))
376        resolution_change = 1;
377
378    /* Perform deferred allocations, if required */
379    if (!res && ctx->defer_alloc)
380    {
381        int i;
382
383        for (i = 1; !res && i < NELEMENTS(ctx->mmaps); i++)
384        {
385            vpx_codec_dec_cfg_t cfg;
386
387            cfg.w = ctx->si.w;
388            cfg.h = ctx->si.h;
389            ctx->mmaps[i].id = vp8_mem_req_segs[i].id;
390            ctx->mmaps[i].sz = vp8_mem_req_segs[i].sz;
391            ctx->mmaps[i].align = vp8_mem_req_segs[i].align;
392            ctx->mmaps[i].flags = vp8_mem_req_segs[i].flags;
393
394            if (!ctx->mmaps[i].sz)
395                ctx->mmaps[i].sz = vp8_mem_req_segs[i].calc_sz(&cfg,
396                                   ctx->base.init_flags);
397
398            res = vp8_mmap_alloc(&ctx->mmaps[i]);
399        }
400
401        if (!res)
402            vp8_finalize_mmaps(ctx);
403
404        ctx->defer_alloc = 0;
405    }
406
407    /* Initialize the decoder instance on the first frame*/
408    if (!res && !ctx->decoder_init)
409    {
410        res = vp8_validate_mmaps(&ctx->si, ctx->mmaps, ctx->base.init_flags);
411
412        if (!res)
413        {
414            VP8D_CONFIG oxcf;
415            struct VP8D_COMP* optr;
416
417            oxcf.Width = ctx->si.w;
418            oxcf.Height = ctx->si.h;
419            oxcf.Version = 9;
420            oxcf.postprocess = 0;
421            oxcf.max_threads = ctx->cfg.threads;
422            oxcf.error_concealment =
423                    (ctx->base.init_flags & VPX_CODEC_USE_ERROR_CONCEALMENT);
424            oxcf.input_fragments =
425                    (ctx->base.init_flags & VPX_CODEC_USE_INPUT_FRAGMENTS);
426
427            optr = vp8dx_create_decompressor(&oxcf);
428
429            /* If postprocessing was enabled by the application and a
430             * configuration has not been provided, default it.
431             */
432            if (!ctx->postproc_cfg_set
433                && (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC))
434            {
435                ctx->postproc_cfg.post_proc_flag =
436                    VP8_DEBLOCK | VP8_DEMACROBLOCK | VP8_MFQE;
437                ctx->postproc_cfg.deblocking_level = 4;
438                ctx->postproc_cfg.noise_level = 0;
439            }
440
441            if (!optr)
442                res = VPX_CODEC_ERROR;
443            else
444                ctx->pbi = optr;
445        }
446
447        ctx->decoder_init = 1;
448    }
449
450    if (!res && ctx->pbi)
451    {
452        if(resolution_change)
453        {
454            VP8D_COMP *pbi = ctx->pbi;
455            VP8_COMMON *const pc = & pbi->common;
456            MACROBLOCKD *const xd  = & pbi->mb;
457#if CONFIG_MULTITHREAD
458            int i;
459#endif
460            pc->Width = ctx->si.w;
461            pc->Height = ctx->si.h;
462            {
463                int prev_mb_rows = pc->mb_rows;
464
465                if (setjmp(pbi->common.error.jmp))
466                {
467                    pbi->common.error.setjmp = 0;
468                    /* same return value as used in vp8dx_receive_compressed_data */
469                    return -1;
470                }
471
472                pbi->common.error.setjmp = 1;
473
474                if (pc->Width <= 0)
475                {
476                    pc->Width = w;
477                    vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
478                                       "Invalid frame width");
479                }
480
481                if (pc->Height <= 0)
482                {
483                    pc->Height = h;
484                    vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
485                                       "Invalid frame height");
486                }
487
488                if (vp8_alloc_frame_buffers(pc, pc->Width, pc->Height))
489                    vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
490                                       "Failed to allocate frame buffers");
491
492                xd->pre = pc->yv12_fb[pc->lst_fb_idx];
493                xd->dst = pc->yv12_fb[pc->new_fb_idx];
494
495#if CONFIG_MULTITHREAD
496                for (i = 0; i < pbi->allocated_decoding_thread_count; i++)
497                {
498                    pbi->mb_row_di[i].mbd.dst = pc->yv12_fb[pc->new_fb_idx];
499                    vp8_build_block_doffsets(&pbi->mb_row_di[i].mbd);
500                }
501#endif
502                vp8_build_block_doffsets(&pbi->mb);
503
504                /* allocate memory for last frame MODE_INFO array */
505#if CONFIG_ERROR_CONCEALMENT
506
507                if (pbi->ec_enabled)
508                {
509                    /* old prev_mip was released by vp8_de_alloc_frame_buffers()
510                     * called in vp8_alloc_frame_buffers() */
511                    pc->prev_mip = vpx_calloc(
512                                       (pc->mb_cols + 1) * (pc->mb_rows + 1),
513                                       sizeof(MODE_INFO));
514
515                    if (!pc->prev_mip)
516                    {
517                        vp8_de_alloc_frame_buffers(pc);
518                        vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
519                                           "Failed to allocate"
520                                           "last frame MODE_INFO array");
521                    }
522
523                    pc->prev_mi = pc->prev_mip + pc->mode_info_stride + 1;
524
525                    if (vp8_alloc_overlap_lists(pbi))
526                        vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
527                                           "Failed to allocate overlap lists "
528                                           "for error concealment");
529                }
530
531#endif
532
533#if CONFIG_MULTITHREAD
534                if (pbi->b_multithreaded_rd)
535                    vp8mt_alloc_temp_buffers(pbi, pc->Width, prev_mb_rows);
536#else
537                (void)prev_mb_rows;
538#endif
539            }
540
541            pbi->common.error.setjmp = 0;
542
543            /* required to get past the first get_free_fb() call */
544            ctx->pbi->common.fb_idx_ref_cnt[0] = 0;
545        }
546
547        ctx->user_priv = user_priv;
548        if (vp8dx_receive_compressed_data(ctx->pbi, data_sz, data, deadline))
549        {
550            VP8D_COMP *pbi = (VP8D_COMP *)ctx->pbi;
551            res = update_error_state(ctx, &pbi->common.error);
552        }
553    }
554
555    return res;
556}
557
558static vpx_image_t *vp8_get_frame(vpx_codec_alg_priv_t  *ctx,
559                                  vpx_codec_iter_t      *iter)
560{
561    vpx_image_t *img = NULL;
562
563    /* iter acts as a flip flop, so an image is only returned on the first
564     * call to get_frame.
565     */
566    if (!(*iter))
567    {
568        YV12_BUFFER_CONFIG sd;
569        int64_t time_stamp = 0, time_end_stamp = 0;
570        vp8_ppflags_t flags = {0};
571
572        if (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC)
573        {
574            flags.post_proc_flag= ctx->postproc_cfg.post_proc_flag
575#if CONFIG_POSTPROC_VISUALIZER
576
577                                | ((ctx->dbg_color_ref_frame_flag != 0) ? VP8D_DEBUG_CLR_FRM_REF_BLKS : 0)
578                                | ((ctx->dbg_color_mb_modes_flag != 0) ? VP8D_DEBUG_CLR_BLK_MODES : 0)
579                                | ((ctx->dbg_color_b_modes_flag != 0) ? VP8D_DEBUG_CLR_BLK_MODES : 0)
580                                | ((ctx->dbg_display_mv_flag != 0) ? VP8D_DEBUG_DRAW_MV : 0)
581#endif
582                                ;
583            flags.deblocking_level      = ctx->postproc_cfg.deblocking_level;
584            flags.noise_level           = ctx->postproc_cfg.noise_level;
585#if CONFIG_POSTPROC_VISUALIZER
586            flags.display_ref_frame_flag= ctx->dbg_color_ref_frame_flag;
587            flags.display_mb_modes_flag = ctx->dbg_color_mb_modes_flag;
588            flags.display_b_modes_flag  = ctx->dbg_color_b_modes_flag;
589            flags.display_mv_flag       = ctx->dbg_display_mv_flag;
590#endif
591        }
592
593        if (0 == vp8dx_get_raw_frame(ctx->pbi, &sd, &time_stamp, &time_end_stamp, &flags))
594        {
595            yuvconfig2image(&ctx->img, &sd, ctx->user_priv);
596
597            img = &ctx->img;
598            *iter = img;
599        }
600    }
601
602    return img;
603}
604
605
606static
607vpx_codec_err_t vp8_xma_get_mmap(const vpx_codec_ctx_t      *ctx,
608                                 vpx_codec_mmap_t           *mmap,
609                                 vpx_codec_iter_t           *iter)
610{
611    vpx_codec_err_t     res;
612    const mem_req_t  *seg_iter = *iter;
613
614    /* Get address of next segment request */
615    do
616    {
617        if (!seg_iter)
618            seg_iter = vp8_mem_req_segs;
619        else if (seg_iter->id != VP8_SEG_MAX)
620            seg_iter++;
621
622        *iter = (vpx_codec_iter_t)seg_iter;
623
624        if (seg_iter->id != VP8_SEG_MAX)
625        {
626            mmap->id = seg_iter->id;
627            mmap->sz = seg_iter->sz;
628            mmap->align = seg_iter->align;
629            mmap->flags = seg_iter->flags;
630
631            if (!seg_iter->sz)
632                mmap->sz = seg_iter->calc_sz(ctx->config.dec, ctx->init_flags);
633
634            res = VPX_CODEC_OK;
635        }
636        else
637            res = VPX_CODEC_LIST_END;
638    }
639    while (!mmap->sz && res != VPX_CODEC_LIST_END);
640
641    return res;
642}
643
644static vpx_codec_err_t vp8_xma_set_mmap(vpx_codec_ctx_t         *ctx,
645                                        const vpx_codec_mmap_t  *mmap)
646{
647    vpx_codec_err_t res = VPX_CODEC_MEM_ERROR;
648    int i, done;
649
650    if (!ctx->priv)
651    {
652        if (mmap->id == VP8_SEG_ALG_PRIV)
653        {
654            if (!ctx->priv)
655            {
656                vp8_init_ctx(ctx, mmap);
657                res = VPX_CODEC_OK;
658            }
659        }
660    }
661
662    done = 1;
663
664    if (!res && ctx->priv->alg_priv)
665    {
666        for (i = 0; i < NELEMENTS(ctx->priv->alg_priv->mmaps); i++)
667        {
668            if (ctx->priv->alg_priv->mmaps[i].id == mmap->id)
669                if (!ctx->priv->alg_priv->mmaps[i].base)
670                {
671                    ctx->priv->alg_priv->mmaps[i] = *mmap;
672                    res = VPX_CODEC_OK;
673                }
674
675            done &= (ctx->priv->alg_priv->mmaps[i].base != NULL);
676        }
677    }
678
679    if (done && !res)
680    {
681        vp8_finalize_mmaps(ctx->priv->alg_priv);
682        res = ctx->iface->init(ctx, NULL);
683    }
684
685    return res;
686}
687
688static vpx_codec_err_t image2yuvconfig(const vpx_image_t   *img,
689                                       YV12_BUFFER_CONFIG  *yv12)
690{
691    vpx_codec_err_t        res = VPX_CODEC_OK;
692    yv12->y_buffer = img->planes[VPX_PLANE_Y];
693    yv12->u_buffer = img->planes[VPX_PLANE_U];
694    yv12->v_buffer = img->planes[VPX_PLANE_V];
695
696    yv12->y_width  = img->d_w;
697    yv12->y_height = img->d_h;
698    yv12->uv_width = yv12->y_width / 2;
699    yv12->uv_height = yv12->y_height / 2;
700
701    yv12->y_stride = img->stride[VPX_PLANE_Y];
702    yv12->uv_stride = img->stride[VPX_PLANE_U];
703
704    yv12->border  = (img->stride[VPX_PLANE_Y] - img->d_w) / 2;
705    yv12->clrtype = (img->fmt == VPX_IMG_FMT_VPXI420 || img->fmt == VPX_IMG_FMT_VPXYV12);
706
707    return res;
708}
709
710
711static vpx_codec_err_t vp8_set_reference(vpx_codec_alg_priv_t *ctx,
712        int ctr_id,
713        va_list args)
714{
715
716    vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
717
718    if (data)
719    {
720        vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data;
721        YV12_BUFFER_CONFIG sd;
722
723        image2yuvconfig(&frame->img, &sd);
724
725        return vp8dx_set_reference(ctx->pbi, frame->frame_type, &sd);
726    }
727    else
728        return VPX_CODEC_INVALID_PARAM;
729
730}
731
732static vpx_codec_err_t vp8_get_reference(vpx_codec_alg_priv_t *ctx,
733        int ctr_id,
734        va_list args)
735{
736
737    vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
738
739    if (data)
740    {
741        vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data;
742        YV12_BUFFER_CONFIG sd;
743
744        image2yuvconfig(&frame->img, &sd);
745
746        return vp8dx_get_reference(ctx->pbi, frame->frame_type, &sd);
747    }
748    else
749        return VPX_CODEC_INVALID_PARAM;
750
751}
752
753static vpx_codec_err_t vp8_set_postproc(vpx_codec_alg_priv_t *ctx,
754                                        int ctr_id,
755                                        va_list args)
756{
757#if CONFIG_POSTPROC
758    vp8_postproc_cfg_t *data = va_arg(args, vp8_postproc_cfg_t *);
759
760    if (data)
761    {
762        ctx->postproc_cfg_set = 1;
763        ctx->postproc_cfg = *((vp8_postproc_cfg_t *)data);
764        return VPX_CODEC_OK;
765    }
766    else
767        return VPX_CODEC_INVALID_PARAM;
768
769#else
770    return VPX_CODEC_INCAPABLE;
771#endif
772}
773
774static vpx_codec_err_t vp8_set_dbg_options(vpx_codec_alg_priv_t *ctx,
775                                        int ctrl_id,
776                                        va_list args)
777{
778#if CONFIG_POSTPROC_VISUALIZER && CONFIG_POSTPROC
779    int data = va_arg(args, int);
780
781#define MAP(id, var) case id: var = data; break;
782
783    switch (ctrl_id)
784    {
785        MAP (VP8_SET_DBG_COLOR_REF_FRAME,   ctx->dbg_color_ref_frame_flag);
786        MAP (VP8_SET_DBG_COLOR_MB_MODES,    ctx->dbg_color_mb_modes_flag);
787        MAP (VP8_SET_DBG_COLOR_B_MODES,     ctx->dbg_color_b_modes_flag);
788        MAP (VP8_SET_DBG_DISPLAY_MV,        ctx->dbg_display_mv_flag);
789    }
790
791    return VPX_CODEC_OK;
792#else
793    return VPX_CODEC_INCAPABLE;
794#endif
795}
796
797static vpx_codec_err_t vp8_get_last_ref_updates(vpx_codec_alg_priv_t *ctx,
798                                                int ctrl_id,
799                                                va_list args)
800{
801    int *update_info = va_arg(args, int *);
802    VP8D_COMP *pbi = (VP8D_COMP *)ctx->pbi;
803
804    if (update_info)
805    {
806        *update_info = pbi->common.refresh_alt_ref_frame * (int) VP8_ALTR_FRAME
807            + pbi->common.refresh_golden_frame * (int) VP8_GOLD_FRAME
808            + pbi->common.refresh_last_frame * (int) VP8_LAST_FRAME;
809
810        return VPX_CODEC_OK;
811    }
812    else
813        return VPX_CODEC_INVALID_PARAM;
814}
815
816extern int vp8dx_references_buffer( VP8_COMMON *oci, int ref_frame );
817static vpx_codec_err_t vp8_get_last_ref_frame(vpx_codec_alg_priv_t *ctx,
818                                              int ctrl_id,
819                                              va_list args)
820{
821    int *ref_info = va_arg(args, int *);
822    VP8D_COMP *pbi = (VP8D_COMP *)ctx->pbi;
823    VP8_COMMON *oci = &pbi->common;
824
825    if (ref_info)
826    {
827        *ref_info =
828            (vp8dx_references_buffer( oci, ALTREF_FRAME )?VP8_ALTR_FRAME:0) |
829            (vp8dx_references_buffer( oci, GOLDEN_FRAME )?VP8_GOLD_FRAME:0) |
830            (vp8dx_references_buffer( oci, LAST_FRAME )?VP8_LAST_FRAME:0);
831
832        return VPX_CODEC_OK;
833    }
834    else
835        return VPX_CODEC_INVALID_PARAM;
836}
837
838static vpx_codec_err_t vp8_get_frame_corrupted(vpx_codec_alg_priv_t *ctx,
839                                               int ctrl_id,
840                                               va_list args)
841{
842
843    int *corrupted = va_arg(args, int *);
844
845    if (corrupted)
846    {
847        VP8D_COMP *pbi = (VP8D_COMP *)ctx->pbi;
848        *corrupted = pbi->common.frame_to_show->corrupted;
849
850        return VPX_CODEC_OK;
851    }
852    else
853        return VPX_CODEC_INVALID_PARAM;
854
855}
856
857vpx_codec_ctrl_fn_map_t vp8_ctf_maps[] =
858{
859    {VP8_SET_REFERENCE,             vp8_set_reference},
860    {VP8_COPY_REFERENCE,            vp8_get_reference},
861    {VP8_SET_POSTPROC,              vp8_set_postproc},
862    {VP8_SET_DBG_COLOR_REF_FRAME,   vp8_set_dbg_options},
863    {VP8_SET_DBG_COLOR_MB_MODES,    vp8_set_dbg_options},
864    {VP8_SET_DBG_COLOR_B_MODES,     vp8_set_dbg_options},
865    {VP8_SET_DBG_DISPLAY_MV,        vp8_set_dbg_options},
866    {VP8D_GET_LAST_REF_UPDATES,     vp8_get_last_ref_updates},
867    {VP8D_GET_FRAME_CORRUPTED,      vp8_get_frame_corrupted},
868    {VP8D_GET_LAST_REF_USED,        vp8_get_last_ref_frame},
869    { -1, NULL},
870};
871
872
873#ifndef VERSION_STRING
874#define VERSION_STRING
875#endif
876CODEC_INTERFACE(vpx_codec_vp8_dx) =
877{
878    "WebM Project VP8 Decoder" VERSION_STRING,
879    VPX_CODEC_INTERNAL_ABI_VERSION,
880    VPX_CODEC_CAP_DECODER | VP8_CAP_POSTPROC | VP8_CAP_ERROR_CONCEALMENT |
881    VPX_CODEC_CAP_INPUT_FRAGMENTS,
882    /* vpx_codec_caps_t          caps; */
883    vp8_init,         /* vpx_codec_init_fn_t       init; */
884    vp8_destroy,      /* vpx_codec_destroy_fn_t    destroy; */
885    vp8_ctf_maps,     /* vpx_codec_ctrl_fn_map_t  *ctrl_maps; */
886    vp8_xma_get_mmap, /* vpx_codec_get_mmap_fn_t   get_mmap; */
887    vp8_xma_set_mmap, /* vpx_codec_set_mmap_fn_t   set_mmap; */
888    {
889        vp8_peek_si,      /* vpx_codec_peek_si_fn_t    peek_si; */
890        vp8_get_si,       /* vpx_codec_get_si_fn_t     get_si; */
891        vp8_decode,       /* vpx_codec_decode_fn_t     decode; */
892        vp8_get_frame,    /* vpx_codec_frame_get_fn_t  frame_get; */
893    },
894    { /* encoder functions */
895        NOT_IMPLEMENTED,
896        NOT_IMPLEMENTED,
897        NOT_IMPLEMENTED,
898        NOT_IMPLEMENTED,
899        NOT_IMPLEMENTED,
900        NOT_IMPLEMENTED
901    }
902};
903