1/*
2 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
3 *
4 *  Use of this source code is governed by a BSD-style license
5 *  that can be found in the LICENSE file in the root of the source
6 *  tree. An additional intellectual property rights grant can be found
7 *  in the file PATENTS.  All contributing project authors may
8 *  be found in the AUTHORS file in the root of the source tree.
9 */
10
11
12#include <stdlib.h>
13#include <string.h>
14#include "vp8_rtcd.h"
15#include "vpx/vpx_decoder.h"
16#include "vpx/vp8dx.h"
17#include "vpx/internal/vpx_codec_internal.h"
18#include "vpx_version.h"
19#include "common/onyxd.h"
20#include "decoder/onyxd_int.h"
21#include "common/alloccommon.h"
22#include "vpx_mem/vpx_mem.h"
23#if CONFIG_ERROR_CONCEALMENT
24#include "decoder/error_concealment.h"
25#endif
26#include "decoder/decoderthreading.h"
27
28#define VP8_CAP_POSTPROC (CONFIG_POSTPROC ? VPX_CODEC_CAP_POSTPROC : 0)
29#define VP8_CAP_ERROR_CONCEALMENT (CONFIG_ERROR_CONCEALMENT ? \
30                                    VPX_CODEC_CAP_ERROR_CONCEALMENT : 0)
31
32typedef vpx_codec_stream_info_t  vp8_stream_info_t;
33
34/* Structures for handling memory allocations */
35typedef enum
36{
37    VP8_SEG_ALG_PRIV     = 256,
38    VP8_SEG_MAX
39} mem_seg_id_t;
40#define NELEMENTS(x) ((int)(sizeof(x)/sizeof(x[0])))
41
42static unsigned long vp8_priv_sz(const vpx_codec_dec_cfg_t *si, vpx_codec_flags_t);
43
44static const mem_req_t vp8_mem_req_segs[] =
45{
46    {VP8_SEG_ALG_PRIV,    0, 8, VPX_CODEC_MEM_ZERO, vp8_priv_sz},
47    {VP8_SEG_MAX, 0, 0, 0, NULL}
48};
49
50struct vpx_codec_alg_priv
51{
52    vpx_codec_priv_t        base;
53    vpx_codec_mmap_t        mmaps[NELEMENTS(vp8_mem_req_segs)-1];
54    vpx_codec_dec_cfg_t     cfg;
55    vp8_stream_info_t       si;
56    int                     defer_alloc;
57    int                     decoder_init;
58    int                     postproc_cfg_set;
59    vp8_postproc_cfg_t      postproc_cfg;
60#if CONFIG_POSTPROC_VISUALIZER
61    unsigned int            dbg_postproc_flag;
62    int                     dbg_color_ref_frame_flag;
63    int                     dbg_color_mb_modes_flag;
64    int                     dbg_color_b_modes_flag;
65    int                     dbg_display_mv_flag;
66#endif
67    vp8_decrypt_cb          *decrypt_cb;
68    void                    *decrypt_state;
69    vpx_image_t             img;
70    int                     img_setup;
71    struct frame_buffers    yv12_frame_buffers;
72    void                    *user_priv;
73    FRAGMENT_DATA           fragments;
74};
75
76static unsigned long vp8_priv_sz(const vpx_codec_dec_cfg_t *si, vpx_codec_flags_t flags)
77{
78    /* Although this declaration is constant, we can't use it in the requested
79     * segments list because we want to define the requested segments list
80     * before defining the private type (so that the number of memory maps is
81     * known)
82     */
83    (void)si;
84    (void)flags;
85    return sizeof(vpx_codec_alg_priv_t);
86}
87
88static void vp8_init_ctx(vpx_codec_ctx_t *ctx, const vpx_codec_mmap_t *mmap)
89{
90    int i;
91
92    ctx->priv = mmap->base;
93    ctx->priv->sz = sizeof(*ctx->priv);
94    ctx->priv->iface = ctx->iface;
95    ctx->priv->alg_priv = mmap->base;
96
97    for (i = 0; i < NELEMENTS(ctx->priv->alg_priv->mmaps); i++)
98        ctx->priv->alg_priv->mmaps[i].id = vp8_mem_req_segs[i].id;
99
100    ctx->priv->alg_priv->mmaps[0] = *mmap;
101    ctx->priv->alg_priv->si.sz = sizeof(ctx->priv->alg_priv->si);
102    ctx->priv->alg_priv->decrypt_cb = NULL;
103    ctx->priv->alg_priv->decrypt_state = NULL;
104    ctx->priv->init_flags = ctx->init_flags;
105
106    if (ctx->config.dec)
107    {
108        /* Update the reference to the config structure to an internal copy. */
109        ctx->priv->alg_priv->cfg = *ctx->config.dec;
110        ctx->config.dec = &ctx->priv->alg_priv->cfg;
111    }
112}
113
114static void vp8_finalize_mmaps(vpx_codec_alg_priv_t *ctx)
115{
116    (void)ctx;
117    /* nothing to clean up */
118}
119
120static vpx_codec_err_t vp8_init(vpx_codec_ctx_t *ctx,
121                                vpx_codec_priv_enc_mr_cfg_t *data)
122{
123    vpx_codec_err_t        res = VPX_CODEC_OK;
124    (void) data;
125
126    vp8_rtcd();
127
128    /* This function only allocates space for the vpx_codec_alg_priv_t
129     * structure. More memory may be required at the time the stream
130     * information becomes known.
131     */
132    if (!ctx->priv)
133    {
134        vpx_codec_mmap_t mmap;
135
136        mmap.id = vp8_mem_req_segs[0].id;
137        mmap.sz = sizeof(vpx_codec_alg_priv_t);
138        mmap.align = vp8_mem_req_segs[0].align;
139        mmap.flags = vp8_mem_req_segs[0].flags;
140
141        res = vpx_mmap_alloc(&mmap);
142        if (res != VPX_CODEC_OK) return res;
143
144        vp8_init_ctx(ctx, &mmap);
145
146        /* initialize number of fragments to zero */
147        ctx->priv->alg_priv->fragments.count = 0;
148        /* is input fragments enabled? */
149        ctx->priv->alg_priv->fragments.enabled =
150                (ctx->priv->alg_priv->base.init_flags &
151                    VPX_CODEC_USE_INPUT_FRAGMENTS);
152
153        ctx->priv->alg_priv->defer_alloc = 1;
154        /*post processing level initialized to do nothing */
155    }
156
157    ctx->priv->alg_priv->yv12_frame_buffers.use_frame_threads =
158            (ctx->priv->alg_priv->base.init_flags &
159                    VPX_CODEC_USE_FRAME_THREADING);
160
161    /* for now, disable frame threading */
162    ctx->priv->alg_priv->yv12_frame_buffers.use_frame_threads = 0;
163
164    if(ctx->priv->alg_priv->yv12_frame_buffers.use_frame_threads &&
165            (( ctx->priv->alg_priv->base.init_flags &
166                            VPX_CODEC_USE_ERROR_CONCEALMENT)
167                    || ( ctx->priv->alg_priv->base.init_flags &
168                            VPX_CODEC_USE_INPUT_FRAGMENTS) ) )
169    {
170        /* row-based threading, error concealment, and input fragments will
171         * not be supported when using frame-based threading */
172        res = VPX_CODEC_INVALID_PARAM;
173    }
174
175    return res;
176}
177
178static vpx_codec_err_t vp8_destroy(vpx_codec_alg_priv_t *ctx)
179{
180    int i;
181
182    vp8_remove_decoder_instances(&ctx->yv12_frame_buffers);
183
184    for (i = NELEMENTS(ctx->mmaps) - 1; i >= 0; i--)
185    {
186        if (ctx->mmaps[i].dtor)
187            ctx->mmaps[i].dtor(&ctx->mmaps[i]);
188    }
189
190    return VPX_CODEC_OK;
191}
192
193static vpx_codec_err_t vp8_peek_si_internal(const uint8_t *data,
194                                            unsigned int data_sz,
195                                            vpx_codec_stream_info_t *si,
196                                            vp8_decrypt_cb *decrypt_cb,
197                                            void *decrypt_state)
198{
199    vpx_codec_err_t res = VPX_CODEC_OK;
200
201    if(data + data_sz <= data)
202    {
203        res = VPX_CODEC_INVALID_PARAM;
204    }
205    else
206    {
207        /* Parse uncompresssed part of key frame header.
208         * 3 bytes:- including version, frame type and an offset
209         * 3 bytes:- sync code (0x9d, 0x01, 0x2a)
210         * 4 bytes:- including image width and height in the lowest 14 bits
211         *           of each 2-byte value.
212         */
213        uint8_t clear_buffer[10];
214        const uint8_t *clear = data;
215        if (decrypt_cb)
216        {
217            int n = data_sz > 10 ? 10 : data_sz;
218            decrypt_cb(decrypt_state, data, clear_buffer, n);
219            clear = clear_buffer;
220        }
221        si->is_kf = 0;
222
223        if (data_sz >= 10 && !(clear[0] & 0x01))  /* I-Frame */
224        {
225            si->is_kf = 1;
226
227            /* vet via sync code */
228            if (clear[3] != 0x9d || clear[4] != 0x01 || clear[5] != 0x2a)
229                res = VPX_CODEC_UNSUP_BITSTREAM;
230
231            si->w = (clear[6] | (clear[7] << 8)) & 0x3fff;
232            si->h = (clear[8] | (clear[9] << 8)) & 0x3fff;
233
234            /*printf("w=%d, h=%d\n", si->w, si->h);*/
235            if (!(si->h | si->w))
236                res = VPX_CODEC_UNSUP_BITSTREAM;
237        }
238        else
239        {
240            res = VPX_CODEC_UNSUP_BITSTREAM;
241        }
242    }
243
244    return res;
245}
246
247static vpx_codec_err_t vp8_peek_si(const uint8_t *data,
248                                   unsigned int data_sz,
249                                   vpx_codec_stream_info_t *si) {
250    return vp8_peek_si_internal(data, data_sz, si, NULL, NULL);
251}
252
253static vpx_codec_err_t vp8_get_si(vpx_codec_alg_priv_t    *ctx,
254                                  vpx_codec_stream_info_t *si)
255{
256
257    unsigned int sz;
258
259    if (si->sz >= sizeof(vp8_stream_info_t))
260        sz = sizeof(vp8_stream_info_t);
261    else
262        sz = sizeof(vpx_codec_stream_info_t);
263
264    memcpy(si, &ctx->si, sz);
265    si->sz = sz;
266
267    return VPX_CODEC_OK;
268}
269
270
271static vpx_codec_err_t
272update_error_state(vpx_codec_alg_priv_t                 *ctx,
273                   const struct vpx_internal_error_info *error)
274{
275    vpx_codec_err_t res;
276
277    if ((res = error->error_code))
278        ctx->base.err_detail = error->has_detail
279                               ? error->detail
280                               : NULL;
281
282    return res;
283}
284
285static void yuvconfig2image(vpx_image_t               *img,
286                            const YV12_BUFFER_CONFIG  *yv12,
287                            void                      *user_priv)
288{
289    /** vpx_img_wrap() doesn't allow specifying independent strides for
290      * the Y, U, and V planes, nor other alignment adjustments that
291      * might be representable by a YV12_BUFFER_CONFIG, so we just
292      * initialize all the fields.*/
293    img->fmt = VPX_IMG_FMT_I420;
294    img->w = yv12->y_stride;
295    img->h = (yv12->y_height + 2 * VP8BORDERINPIXELS + 15) & ~15;
296    img->d_w = yv12->y_width;
297    img->d_h = yv12->y_height;
298    img->x_chroma_shift = 1;
299    img->y_chroma_shift = 1;
300    img->planes[VPX_PLANE_Y] = yv12->y_buffer;
301    img->planes[VPX_PLANE_U] = yv12->u_buffer;
302    img->planes[VPX_PLANE_V] = yv12->v_buffer;
303    img->planes[VPX_PLANE_ALPHA] = NULL;
304    img->stride[VPX_PLANE_Y] = yv12->y_stride;
305    img->stride[VPX_PLANE_U] = yv12->uv_stride;
306    img->stride[VPX_PLANE_V] = yv12->uv_stride;
307    img->stride[VPX_PLANE_ALPHA] = yv12->y_stride;
308    img->bps = 12;
309    img->user_priv = user_priv;
310    img->img_data = yv12->buffer_alloc;
311    img->img_data_owner = 0;
312    img->self_allocd = 0;
313}
314
315static int
316update_fragments(vpx_codec_alg_priv_t  *ctx,
317                 const uint8_t         *data,
318                 unsigned int           data_sz,
319                 vpx_codec_err_t       *res)
320{
321    *res = VPX_CODEC_OK;
322
323    if (ctx->fragments.count == 0)
324    {
325        /* New frame, reset fragment pointers and sizes */
326        vpx_memset((void*)ctx->fragments.ptrs, 0, sizeof(ctx->fragments.ptrs));
327        vpx_memset(ctx->fragments.sizes, 0, sizeof(ctx->fragments.sizes));
328    }
329    if (ctx->fragments.enabled && !(data == NULL && data_sz == 0))
330    {
331        /* Store a pointer to this fragment and return. We haven't
332         * received the complete frame yet, so we will wait with decoding.
333         */
334        ctx->fragments.ptrs[ctx->fragments.count] = data;
335        ctx->fragments.sizes[ctx->fragments.count] = data_sz;
336        ctx->fragments.count++;
337        if (ctx->fragments.count > (1 << EIGHT_PARTITION) + 1)
338        {
339            ctx->fragments.count = 0;
340            *res = VPX_CODEC_INVALID_PARAM;
341            return -1;
342        }
343        return 0;
344    }
345
346    if (!ctx->fragments.enabled)
347    {
348        ctx->fragments.ptrs[0] = data;
349        ctx->fragments.sizes[0] = data_sz;
350        ctx->fragments.count = 1;
351    }
352
353    return 1;
354}
355
356static vpx_codec_err_t vp8_decode(vpx_codec_alg_priv_t  *ctx,
357                                  const uint8_t         *data,
358                                  unsigned int            data_sz,
359                                  void                    *user_priv,
360                                  long                    deadline)
361{
362    vpx_codec_err_t res = VPX_CODEC_OK;
363    unsigned int resolution_change = 0;
364    unsigned int w, h;
365
366
367    /* Update the input fragment data */
368    if(update_fragments(ctx, data, data_sz, &res) <= 0)
369        return res;
370
371    /* Determine the stream parameters. Note that we rely on peek_si to
372     * validate that we have a buffer that does not wrap around the top
373     * of the heap.
374     */
375    w = ctx->si.w;
376    h = ctx->si.h;
377
378    res = vp8_peek_si_internal(ctx->fragments.ptrs[0], ctx->fragments.sizes[0],
379                               &ctx->si, ctx->decrypt_cb, ctx->decrypt_state);
380
381    if((res == VPX_CODEC_UNSUP_BITSTREAM) && !ctx->si.is_kf)
382    {
383        /* the peek function returns an error for non keyframes, however for
384         * this case, it is not an error */
385        res = VPX_CODEC_OK;
386    }
387
388    if(!ctx->decoder_init && !ctx->si.is_kf)
389        res = VPX_CODEC_UNSUP_BITSTREAM;
390
391    if ((ctx->si.h != h) || (ctx->si.w != w))
392        resolution_change = 1;
393
394    /* Perform deferred allocations, if required */
395    if (!res && ctx->defer_alloc)
396    {
397        int i;
398
399        for (i = 1; !res && i < NELEMENTS(ctx->mmaps); i++)
400        {
401            vpx_codec_dec_cfg_t cfg;
402
403            cfg.w = ctx->si.w;
404            cfg.h = ctx->si.h;
405            ctx->mmaps[i].id = vp8_mem_req_segs[i].id;
406            ctx->mmaps[i].sz = vp8_mem_req_segs[i].sz;
407            ctx->mmaps[i].align = vp8_mem_req_segs[i].align;
408            ctx->mmaps[i].flags = vp8_mem_req_segs[i].flags;
409
410            if (!ctx->mmaps[i].sz)
411                ctx->mmaps[i].sz = vp8_mem_req_segs[i].calc_sz(&cfg,
412                                   ctx->base.init_flags);
413
414            res = vpx_mmap_alloc(&ctx->mmaps[i]);
415        }
416
417        if (!res)
418            vp8_finalize_mmaps(ctx);
419
420        ctx->defer_alloc = 0;
421    }
422
423    /* Initialize the decoder instance on the first frame*/
424    if (!res && !ctx->decoder_init)
425    {
426        res = vpx_validate_mmaps(&ctx->si, ctx->mmaps,
427                                 vp8_mem_req_segs, NELEMENTS(vp8_mem_req_segs),
428                                 ctx->base.init_flags);
429
430        if (!res)
431        {
432            VP8D_CONFIG oxcf;
433
434            oxcf.Width = ctx->si.w;
435            oxcf.Height = ctx->si.h;
436            oxcf.Version = 9;
437            oxcf.postprocess = 0;
438            oxcf.max_threads = ctx->cfg.threads;
439            oxcf.error_concealment =
440                    (ctx->base.init_flags & VPX_CODEC_USE_ERROR_CONCEALMENT);
441
442            /* If postprocessing was enabled by the application and a
443             * configuration has not been provided, default it.
444             */
445            if (!ctx->postproc_cfg_set
446                && (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC))
447            {
448                ctx->postproc_cfg.post_proc_flag =
449                    VP8_DEBLOCK | VP8_DEMACROBLOCK | VP8_MFQE;
450                ctx->postproc_cfg.deblocking_level = 4;
451                ctx->postproc_cfg.noise_level = 0;
452            }
453
454            res = vp8_create_decoder_instances(&ctx->yv12_frame_buffers, &oxcf);
455            ctx->yv12_frame_buffers.pbi[0]->decrypt_cb = ctx->decrypt_cb;
456            ctx->yv12_frame_buffers.pbi[0]->decrypt_state = ctx->decrypt_state;
457        }
458
459        ctx->decoder_init = 1;
460    }
461
462    if (!res)
463    {
464        VP8D_COMP *pbi = ctx->yv12_frame_buffers.pbi[0];
465        if(resolution_change)
466        {
467            VP8_COMMON *const pc = & pbi->common;
468            MACROBLOCKD *const xd  = & pbi->mb;
469#if CONFIG_MULTITHREAD
470            int i;
471#endif
472            pc->Width = ctx->si.w;
473            pc->Height = ctx->si.h;
474            {
475                int prev_mb_rows = pc->mb_rows;
476
477                if (setjmp(pbi->common.error.jmp))
478                {
479                    pbi->common.error.setjmp = 0;
480                    /* same return value as used in vp8dx_receive_compressed_data */
481                    return -1;
482                }
483
484                pbi->common.error.setjmp = 1;
485
486                if (pc->Width <= 0)
487                {
488                    pc->Width = w;
489                    vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
490                                       "Invalid frame width");
491                }
492
493                if (pc->Height <= 0)
494                {
495                    pc->Height = h;
496                    vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
497                                       "Invalid frame height");
498                }
499
500                if (vp8_alloc_frame_buffers(pc, pc->Width, pc->Height))
501                    vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
502                                       "Failed to allocate frame buffers");
503
504                xd->pre = pc->yv12_fb[pc->lst_fb_idx];
505                xd->dst = pc->yv12_fb[pc->new_fb_idx];
506
507#if CONFIG_MULTITHREAD
508                for (i = 0; i < pbi->allocated_decoding_thread_count; i++)
509                {
510                    pbi->mb_row_di[i].mbd.dst = pc->yv12_fb[pc->new_fb_idx];
511                    vp8_build_block_doffsets(&pbi->mb_row_di[i].mbd);
512                }
513#endif
514                vp8_build_block_doffsets(&pbi->mb);
515
516                /* allocate memory for last frame MODE_INFO array */
517#if CONFIG_ERROR_CONCEALMENT
518
519                if (pbi->ec_enabled)
520                {
521                    /* old prev_mip was released by vp8_de_alloc_frame_buffers()
522                     * called in vp8_alloc_frame_buffers() */
523                    pc->prev_mip = vpx_calloc(
524                                       (pc->mb_cols + 1) * (pc->mb_rows + 1),
525                                       sizeof(MODE_INFO));
526
527                    if (!pc->prev_mip)
528                    {
529                        vp8_de_alloc_frame_buffers(pc);
530                        vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
531                                           "Failed to allocate"
532                                           "last frame MODE_INFO array");
533                    }
534
535                    pc->prev_mi = pc->prev_mip + pc->mode_info_stride + 1;
536
537                    if (vp8_alloc_overlap_lists(pbi))
538                        vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
539                                           "Failed to allocate overlap lists "
540                                           "for error concealment");
541                }
542
543#endif
544
545#if CONFIG_MULTITHREAD
546                if (pbi->b_multithreaded_rd)
547                    vp8mt_alloc_temp_buffers(pbi, pc->Width, prev_mb_rows);
548#else
549                (void)prev_mb_rows;
550#endif
551            }
552
553            pbi->common.error.setjmp = 0;
554
555            /* required to get past the first get_free_fb() call */
556            pbi->common.fb_idx_ref_cnt[0] = 0;
557        }
558
559        /* update the pbi fragment data */
560        pbi->fragments = ctx->fragments;
561
562        ctx->user_priv = user_priv;
563        if (vp8dx_receive_compressed_data(pbi, data_sz, data, deadline))
564        {
565            res = update_error_state(ctx, &pbi->common.error);
566        }
567
568        /* get ready for the next series of fragments */
569        ctx->fragments.count = 0;
570    }
571
572    return res;
573}
574
575static vpx_image_t *vp8_get_frame(vpx_codec_alg_priv_t  *ctx,
576                                  vpx_codec_iter_t      *iter)
577{
578    vpx_image_t *img = NULL;
579
580    /* iter acts as a flip flop, so an image is only returned on the first
581     * call to get_frame.
582     */
583    if (!(*iter) && ctx->yv12_frame_buffers.pbi[0])
584    {
585        YV12_BUFFER_CONFIG sd;
586        int64_t time_stamp = 0, time_end_stamp = 0;
587        vp8_ppflags_t flags = {0};
588
589        if (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC)
590        {
591            flags.post_proc_flag= ctx->postproc_cfg.post_proc_flag
592#if CONFIG_POSTPROC_VISUALIZER
593
594                                | ((ctx->dbg_color_ref_frame_flag != 0) ? VP8D_DEBUG_CLR_FRM_REF_BLKS : 0)
595                                | ((ctx->dbg_color_mb_modes_flag != 0) ? VP8D_DEBUG_CLR_BLK_MODES : 0)
596                                | ((ctx->dbg_color_b_modes_flag != 0) ? VP8D_DEBUG_CLR_BLK_MODES : 0)
597                                | ((ctx->dbg_display_mv_flag != 0) ? VP8D_DEBUG_DRAW_MV : 0)
598#endif
599                                ;
600            flags.deblocking_level      = ctx->postproc_cfg.deblocking_level;
601            flags.noise_level           = ctx->postproc_cfg.noise_level;
602#if CONFIG_POSTPROC_VISUALIZER
603            flags.display_ref_frame_flag= ctx->dbg_color_ref_frame_flag;
604            flags.display_mb_modes_flag = ctx->dbg_color_mb_modes_flag;
605            flags.display_b_modes_flag  = ctx->dbg_color_b_modes_flag;
606            flags.display_mv_flag       = ctx->dbg_display_mv_flag;
607#endif
608        }
609
610        if (0 == vp8dx_get_raw_frame(ctx->yv12_frame_buffers.pbi[0], &sd,
611                                     &time_stamp, &time_end_stamp, &flags))
612        {
613            yuvconfig2image(&ctx->img, &sd, ctx->user_priv);
614
615            img = &ctx->img;
616            *iter = img;
617        }
618    }
619
620    return img;
621}
622
623
624static
625vpx_codec_err_t vp8_xma_get_mmap(const vpx_codec_ctx_t      *ctx,
626                                 vpx_codec_mmap_t           *mmap,
627                                 vpx_codec_iter_t           *iter)
628{
629    vpx_codec_err_t     res;
630    const mem_req_t  *seg_iter = *iter;
631
632    /* Get address of next segment request */
633    do
634    {
635        if (!seg_iter)
636            seg_iter = vp8_mem_req_segs;
637        else if (seg_iter->id != VP8_SEG_MAX)
638            seg_iter++;
639
640        *iter = (vpx_codec_iter_t)seg_iter;
641
642        if (seg_iter->id != VP8_SEG_MAX)
643        {
644            mmap->id = seg_iter->id;
645            mmap->sz = seg_iter->sz;
646            mmap->align = seg_iter->align;
647            mmap->flags = seg_iter->flags;
648
649            if (!seg_iter->sz)
650                mmap->sz = seg_iter->calc_sz(ctx->config.dec, ctx->init_flags);
651
652            res = VPX_CODEC_OK;
653        }
654        else
655            res = VPX_CODEC_LIST_END;
656    }
657    while (!mmap->sz && res != VPX_CODEC_LIST_END);
658
659    return res;
660}
661
662static vpx_codec_err_t vp8_xma_set_mmap(vpx_codec_ctx_t         *ctx,
663                                        const vpx_codec_mmap_t  *mmap)
664{
665    vpx_codec_err_t res = VPX_CODEC_MEM_ERROR;
666    int i, done;
667
668    if (!ctx->priv)
669    {
670        if (mmap->id == VP8_SEG_ALG_PRIV)
671        {
672            if (!ctx->priv)
673            {
674                vp8_init_ctx(ctx, mmap);
675                res = VPX_CODEC_OK;
676            }
677        }
678    }
679
680    done = 1;
681
682    if (!res && ctx->priv->alg_priv)
683    {
684        for (i = 0; i < NELEMENTS(ctx->priv->alg_priv->mmaps); i++)
685        {
686            if (ctx->priv->alg_priv->mmaps[i].id == mmap->id)
687                if (!ctx->priv->alg_priv->mmaps[i].base)
688                {
689                    ctx->priv->alg_priv->mmaps[i] = *mmap;
690                    res = VPX_CODEC_OK;
691                }
692
693            done &= (ctx->priv->alg_priv->mmaps[i].base != NULL);
694        }
695    }
696
697    if (done && !res)
698    {
699        vp8_finalize_mmaps(ctx->priv->alg_priv);
700        res = ctx->iface->init(ctx, NULL);
701    }
702
703    return res;
704}
705
706static vpx_codec_err_t image2yuvconfig(const vpx_image_t   *img,
707                                       YV12_BUFFER_CONFIG  *yv12)
708{
709    vpx_codec_err_t        res = VPX_CODEC_OK;
710    yv12->y_buffer = img->planes[VPX_PLANE_Y];
711    yv12->u_buffer = img->planes[VPX_PLANE_U];
712    yv12->v_buffer = img->planes[VPX_PLANE_V];
713
714    yv12->y_crop_width  = img->d_w;
715    yv12->y_crop_height = img->d_h;
716    yv12->y_width  = img->d_w;
717    yv12->y_height = img->d_h;
718    yv12->uv_width = yv12->y_width / 2;
719    yv12->uv_height = yv12->y_height / 2;
720
721    yv12->y_stride = img->stride[VPX_PLANE_Y];
722    yv12->uv_stride = img->stride[VPX_PLANE_U];
723
724    yv12->border  = (img->stride[VPX_PLANE_Y] - img->d_w) / 2;
725    return res;
726}
727
728
729static vpx_codec_err_t vp8_set_reference(vpx_codec_alg_priv_t *ctx,
730        int ctr_id,
731        va_list args)
732{
733
734    vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
735    (void)ctr_id;
736    if (data && !ctx->yv12_frame_buffers.use_frame_threads)
737    {
738        vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data;
739        YV12_BUFFER_CONFIG sd;
740
741        image2yuvconfig(&frame->img, &sd);
742
743        return vp8dx_set_reference(ctx->yv12_frame_buffers.pbi[0],
744                                   frame->frame_type, &sd);
745    }
746    else
747        return VPX_CODEC_INVALID_PARAM;
748
749}
750
751static vpx_codec_err_t vp8_get_reference(vpx_codec_alg_priv_t *ctx,
752        int ctr_id,
753        va_list args)
754{
755
756    vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
757    (void)ctr_id;
758    if (data && !ctx->yv12_frame_buffers.use_frame_threads)
759    {
760        vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data;
761        YV12_BUFFER_CONFIG sd;
762
763        image2yuvconfig(&frame->img, &sd);
764
765        return vp8dx_get_reference(ctx->yv12_frame_buffers.pbi[0],
766                                   frame->frame_type, &sd);
767    }
768    else
769        return VPX_CODEC_INVALID_PARAM;
770
771}
772
773static vpx_codec_err_t vp8_set_postproc(vpx_codec_alg_priv_t *ctx,
774                                        int ctr_id,
775                                        va_list args)
776{
777    (void)ctr_id;
778#if CONFIG_POSTPROC
779    vp8_postproc_cfg_t *data = va_arg(args, vp8_postproc_cfg_t *);
780
781    if (data)
782    {
783        ctx->postproc_cfg_set = 1;
784        ctx->postproc_cfg = *((vp8_postproc_cfg_t *)data);
785        return VPX_CODEC_OK;
786    }
787    else
788        return VPX_CODEC_INVALID_PARAM;
789
790#else
791    return VPX_CODEC_INCAPABLE;
792#endif
793}
794
795static vpx_codec_err_t vp8_set_dbg_options(vpx_codec_alg_priv_t *ctx,
796                                        int ctrl_id,
797                                        va_list args)
798{
799
800    (void)ctrl_id;
801    (void)ctx;
802    (void)args;
803#if CONFIG_POSTPROC_VISUALIZER && CONFIG_POSTPROC
804    int data = va_arg(args, int);
805
806#define MAP(id, var) case id: var = data; break;
807
808    switch (ctrl_id)
809    {
810        MAP (VP8_SET_DBG_COLOR_REF_FRAME,   ctx->dbg_color_ref_frame_flag);
811        MAP (VP8_SET_DBG_COLOR_MB_MODES,    ctx->dbg_color_mb_modes_flag);
812        MAP (VP8_SET_DBG_COLOR_B_MODES,     ctx->dbg_color_b_modes_flag);
813        MAP (VP8_SET_DBG_DISPLAY_MV,        ctx->dbg_display_mv_flag);
814    }
815
816    return VPX_CODEC_OK;
817#else
818    return VPX_CODEC_INCAPABLE;
819#endif
820}
821
822static vpx_codec_err_t vp8_get_last_ref_updates(vpx_codec_alg_priv_t *ctx,
823                                                int ctrl_id,
824                                                va_list args)
825{
826    int *update_info = va_arg(args, int *);
827    (void)ctrl_id;
828    if (update_info && !ctx->yv12_frame_buffers.use_frame_threads)
829    {
830        VP8D_COMP *pbi = (VP8D_COMP *)ctx->yv12_frame_buffers.pbi[0];
831
832        *update_info = pbi->common.refresh_alt_ref_frame * (int) VP8_ALTR_FRAME
833            + pbi->common.refresh_golden_frame * (int) VP8_GOLD_FRAME
834            + pbi->common.refresh_last_frame * (int) VP8_LAST_FRAME;
835
836        return VPX_CODEC_OK;
837    }
838    else
839        return VPX_CODEC_INVALID_PARAM;
840}
841
842extern int vp8dx_references_buffer( VP8_COMMON *oci, int ref_frame );
843static vpx_codec_err_t vp8_get_last_ref_frame(vpx_codec_alg_priv_t *ctx,
844                                              int ctrl_id,
845                                              va_list args)
846{
847    int *ref_info = va_arg(args, int *);
848    (void)ctrl_id;
849    if (ref_info && !ctx->yv12_frame_buffers.use_frame_threads)
850    {
851        VP8D_COMP *pbi = (VP8D_COMP *)ctx->yv12_frame_buffers.pbi[0];
852        VP8_COMMON *oci = &pbi->common;
853        *ref_info =
854            (vp8dx_references_buffer( oci, ALTREF_FRAME )?VP8_ALTR_FRAME:0) |
855            (vp8dx_references_buffer( oci, GOLDEN_FRAME )?VP8_GOLD_FRAME:0) |
856            (vp8dx_references_buffer( oci, LAST_FRAME )?VP8_LAST_FRAME:0);
857
858        return VPX_CODEC_OK;
859    }
860    else
861        return VPX_CODEC_INVALID_PARAM;
862}
863
864static vpx_codec_err_t vp8_get_frame_corrupted(vpx_codec_alg_priv_t *ctx,
865                                               int ctrl_id,
866                                               va_list args)
867{
868
869    int *corrupted = va_arg(args, int *);
870    (void)ctrl_id;
871    VP8D_COMP *pbi = (VP8D_COMP *)ctx->yv12_frame_buffers.pbi[0];
872
873    if (corrupted && pbi)
874    {
875        *corrupted = pbi->common.frame_to_show->corrupted;
876
877        return VPX_CODEC_OK;
878    }
879    else
880        return VPX_CODEC_INVALID_PARAM;
881
882}
883
884static vpx_codec_err_t vp8_set_decryptor(vpx_codec_alg_priv_t *ctx,
885                                         int ctrl_id,
886                                         va_list args)
887{
888    vp8_decrypt_init *init = va_arg(args, vp8_decrypt_init *);
889    (void)ctrl_id;
890    if (init)
891    {
892        ctx->decrypt_cb = init->decrypt_cb;
893        ctx->decrypt_state = init->decrypt_state;
894    }
895    else
896    {
897        ctx->decrypt_cb = NULL;
898        ctx->decrypt_state = NULL;
899    }
900    return VPX_CODEC_OK;
901}
902
903vpx_codec_ctrl_fn_map_t vp8_ctf_maps[] =
904{
905    {VP8_SET_REFERENCE,             vp8_set_reference},
906    {VP8_COPY_REFERENCE,            vp8_get_reference},
907    {VP8_SET_POSTPROC,              vp8_set_postproc},
908    {VP8_SET_DBG_COLOR_REF_FRAME,   vp8_set_dbg_options},
909    {VP8_SET_DBG_COLOR_MB_MODES,    vp8_set_dbg_options},
910    {VP8_SET_DBG_COLOR_B_MODES,     vp8_set_dbg_options},
911    {VP8_SET_DBG_DISPLAY_MV,        vp8_set_dbg_options},
912    {VP8D_GET_LAST_REF_UPDATES,     vp8_get_last_ref_updates},
913    {VP8D_GET_FRAME_CORRUPTED,      vp8_get_frame_corrupted},
914    {VP8D_GET_LAST_REF_USED,        vp8_get_last_ref_frame},
915    {VP8D_SET_DECRYPTOR,            vp8_set_decryptor},
916    { -1, NULL},
917};
918
919
920#ifndef VERSION_STRING
921#define VERSION_STRING
922#endif
923CODEC_INTERFACE(vpx_codec_vp8_dx) =
924{
925    "WebM Project VP8 Decoder" VERSION_STRING,
926    VPX_CODEC_INTERNAL_ABI_VERSION,
927    VPX_CODEC_CAP_DECODER | VP8_CAP_POSTPROC | VP8_CAP_ERROR_CONCEALMENT |
928    VPX_CODEC_CAP_INPUT_FRAGMENTS,
929    /* vpx_codec_caps_t          caps; */
930    vp8_init,         /* vpx_codec_init_fn_t       init; */
931    vp8_destroy,      /* vpx_codec_destroy_fn_t    destroy; */
932    vp8_ctf_maps,     /* vpx_codec_ctrl_fn_map_t  *ctrl_maps; */
933    vp8_xma_get_mmap, /* vpx_codec_get_mmap_fn_t   get_mmap; */
934    vp8_xma_set_mmap, /* vpx_codec_set_mmap_fn_t   set_mmap; */
935    {
936        vp8_peek_si,      /* vpx_codec_peek_si_fn_t    peek_si; */
937        vp8_get_si,       /* vpx_codec_get_si_fn_t     get_si; */
938        vp8_decode,       /* vpx_codec_decode_fn_t     decode; */
939        vp8_get_frame,    /* vpx_codec_frame_get_fn_t  frame_get; */
940        NOT_IMPLEMENTED,
941    },
942    { /* encoder functions */
943        NOT_IMPLEMENTED,
944        NOT_IMPLEMENTED,
945        NOT_IMPLEMENTED,
946        NOT_IMPLEMENTED,
947        NOT_IMPLEMENTED,
948        NOT_IMPLEMENTED,
949        NOT_IMPLEMENTED,
950    }
951};
952