1/*
2* Copyright (c) 2009-2011 Intel Corporation.  All rights reserved.
3*
4* Licensed under the Apache License, Version 2.0 (the "License");
5* you may not use this file except in compliance with the License.
6* You may obtain a copy of the License at
7*
8* http://www.apache.org/licenses/LICENSE-2.0
9*
10* Unless required by applicable law or agreed to in writing, software
11* distributed under the License is distributed on an "AS IS" BASIS,
12* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13* See the License for the specific language governing permissions and
14* limitations under the License.
15*/
16
17#include "VideoDecoderMPEG4.h"
18#include "VideoDecoderTrace.h"
19#include <string.h>
20
21VideoDecoderMPEG4::VideoDecoderMPEG4(const char *mimeType)
22    : VideoDecoderBase(mimeType, VBP_MPEG4),
23      mLastVOPTimeIncrement(0),
24      mExpectingNVOP(false),
25      mSendIQMatrixBuf(false),
26      mLastVOPCodingType(MP4_VOP_TYPE_I),
27      mIsShortHeader(false) {
28}
29
30VideoDecoderMPEG4::~VideoDecoderMPEG4() {
31    stop();
32}
33
34Decode_Status VideoDecoderMPEG4::start(VideoConfigBuffer *buffer) {
35    Decode_Status status;
36
37    status = VideoDecoderBase::start(buffer);
38    CHECK_STATUS("VideoDecoderBase::start");
39
40    if (buffer->data == NULL || buffer->size == 0) {
41        WTRACE("No config data to start VA.");
42        return DECODE_SUCCESS;
43    }
44
45    vbp_data_mp42 *data = NULL;
46    status = VideoDecoderBase::parseBuffer(buffer->data, buffer->size, true, (void**)&data);
47    CHECK_STATUS("VideoDecoderBase::parseBuffer");
48
49    status = startVA(data);
50    return status;
51}
52
53void VideoDecoderMPEG4::stop(void) {
54    // drop the last frame and ignore return value
55    endDecodingFrame(true);
56    VideoDecoderBase::stop();
57
58    mLastVOPTimeIncrement = 0;
59    mExpectingNVOP = false;
60    mLastVOPCodingType = MP4_VOP_TYPE_I;
61}
62
63Decode_Status VideoDecoderMPEG4::decode(VideoDecodeBuffer *buffer) {
64    Decode_Status status;
65    vbp_data_mp42 *data = NULL;
66    bool useGraphicbuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
67    if (buffer == NULL) {
68        return DECODE_INVALID_DATA;
69    }
70    if (buffer->flag & IS_SYNC_FRAME) {
71        mIsSyncFrame = true;
72    } else {
73        mIsSyncFrame = false;
74    }
75    buffer->ext = NULL;
76    status =  VideoDecoderBase::parseBuffer(
77            buffer->data,
78            buffer->size,
79            false,
80            (void**)&data);
81    CHECK_STATUS("VideoDecoderBase::parseBuffer");
82
83    if (!mVAStarted) {
84        status = startVA(data);
85        CHECK_STATUS("startVA");
86    }
87
88    if (mSizeChanged && !useGraphicbuffer) {
89        // some container has the incorrect width/height.
90        // send the format change to OMX to update the crop info.
91        mSizeChanged = false;
92        ITRACE("Video size is changed during startVA");
93        return DECODE_FORMAT_CHANGE;
94    }
95
96    if ((mVideoFormatInfo.width != (uint32_t)data->codec_data.video_object_layer_width ||
97        mVideoFormatInfo.height != (uint32_t)data->codec_data.video_object_layer_height) &&
98        data->codec_data.video_object_layer_width &&
99        data->codec_data.video_object_layer_height) {
100        // update  encoded image size
101        ITRACE("Video size is changed. from %dx%d to %dx%d\n", mVideoFormatInfo.width, mVideoFormatInfo.height,
102        data->codec_data.video_object_layer_width,data->codec_data.video_object_layer_height);
103
104        if (useGraphicbuffer && mStoreMetaData) {
105            pthread_mutex_lock(&mFormatLock);
106        }
107        mVideoFormatInfo.width = data->codec_data.video_object_layer_width;
108        mVideoFormatInfo.height = data->codec_data.video_object_layer_height;
109        bool needFlush = false;
110        if (useGraphicbuffer) {
111            if (mStoreMetaData) {
112                needFlush = true;
113
114                mVideoFormatInfo.valid = false;
115                pthread_mutex_unlock(&mFormatLock);
116            } else {
117                needFlush = (mVideoFormatInfo.width > mVideoFormatInfo.surfaceWidth)
118                         || (mVideoFormatInfo.height > mVideoFormatInfo.surfaceHeight);
119            }
120        }
121        if (needFlush) {
122            if (mStoreMetaData) {
123                status = endDecodingFrame(false);
124                CHECK_STATUS("endDecodingFrame");
125            } else {
126                flushSurfaceBuffers();
127            }
128            mSizeChanged = false;
129            return DECODE_FORMAT_CHANGE;
130        } else {
131            mSizeChanged = true;
132        }
133
134        setRenderRect();
135    } else {
136        if (useGraphicbuffer && mStoreMetaData) {
137            mVideoFormatInfo.valid = true;
138        }
139    }
140
141    status = decodeFrame(buffer, data);
142    CHECK_STATUS("decodeFrame");
143
144    return status;
145}
146
147void VideoDecoderMPEG4::flush(void) {
148    VideoDecoderBase::flush();
149
150    mExpectingNVOP = false;
151    mLastVOPTimeIncrement = 0;
152    mLastVOPCodingType = MP4_VOP_TYPE_I;
153}
154
155Decode_Status VideoDecoderMPEG4::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_mp42 *data) {
156    Decode_Status status;
157    // check if any slice is parsed, we may just receive configuration data
158    if (data->number_picture_data == 0) {
159        WTRACE("number_picture_data == 0");
160        return DECODE_SUCCESS;
161    }
162    if (data->picture_data && (data->picture_data->picture_param.vop_width == 0 || data->picture_data->picture_param.vop_height == 0)) {
163        if (!data->codec_data.got_vol && data->codec_data.got_vop) {
164            // error enhancement if vol is missing
165            data->picture_data->picture_param.vop_width = mVideoFormatInfo.width;
166            data->picture_data->picture_param.vop_height = mVideoFormatInfo.height;
167        } else {
168            return DECODE_PARSER_FAIL;
169        }
170    }
171
172    uint64_t lastPTS = mCurrentPTS;
173    mCurrentPTS = buffer->timeStamp;
174
175    if (lastPTS != mCurrentPTS) {
176        // finish decoding the last frame
177        status = endDecodingFrame(false);
178        CHECK_STATUS("endDecodingFrame");
179
180        // start decoding a new frame
181        status = beginDecodingFrame(data);
182        if (status == DECODE_MULTIPLE_FRAME) {
183            buffer->ext = &mExtensionBuffer;
184            mExtensionBuffer.extType = PACKED_FRAME_TYPE;
185            mExtensionBuffer.extSize = sizeof(mPackedFrame);
186            mExtensionBuffer.extData = (uint8_t*)&mPackedFrame;
187        } else if (status != DECODE_SUCCESS) {
188            endDecodingFrame(true);
189        }
190        CHECK_STATUS("beginDecodingFrame");
191    } else {
192        status = continueDecodingFrame(data);
193        if (status == DECODE_MULTIPLE_FRAME) {
194            buffer->ext = &mExtensionBuffer;
195            mExtensionBuffer.extType = PACKED_FRAME_TYPE;
196            mExtensionBuffer.extSize = sizeof(mPackedFrame);
197            mExtensionBuffer.extData = (uint8_t*)&mPackedFrame;
198        } else if (status != DECODE_SUCCESS) {
199            endDecodingFrame(true);
200        }
201        CHECK_STATUS("continueDecodingFrame");
202    }
203
204    if (buffer->flag & HAS_COMPLETE_FRAME) {
205        // finish decoding current frame
206        status = endDecodingFrame(false);
207        CHECK_STATUS("endDecodingFrame");
208    }
209
210    return DECODE_SUCCESS;
211}
212
213Decode_Status VideoDecoderMPEG4::beginDecodingFrame(vbp_data_mp42 *data) {
214
215    Decode_Status status = DECODE_SUCCESS;
216    vbp_picture_data_mp42 *picData = data->picture_data;
217    VAPictureParameterBufferMPEG4 *picParam = &(picData->picture_param);
218    int codingType = picParam->vop_fields.bits.vop_coding_type;
219
220    // start sanity checking
221    if (mExpectingNVOP) {
222        // if we are waiting for n-vop for packed frame, and the new frame is coded, the coding type
223        // of this frame must be B
224        // for example: {PB} B N P B B P...
225        if (picData->vop_coded == 1 && codingType != MP4_VOP_TYPE_B) {
226            WTRACE("Invalid coding type while waiting for n-vop for packed frame.");
227            mExpectingNVOP = false;
228        }
229    }
230
231    // handle N-VOP picuture, it could be a skipped frame or a simple placeholder of packed frame
232    if (picData->vop_coded == 0) {
233        if (mLastReference == NULL) {
234            WTRACE("The last reference is unavailable to construct skipped frame.");
235            flush();
236            mExpectingNVOP = false;
237            // TODO: handle this case
238            return DECODE_SUCCESS;
239        }
240
241        if (mExpectingNVOP) {
242            // P frame is already in queue, just need to update time stamp.
243            mLastReference->renderBuffer.timeStamp = mCurrentPTS;
244            mExpectingNVOP = false;
245        }
246        else {
247            // Do nothing for skip frame as the last frame will be rendered agian by natively
248            // No needs to handle reference frame neither
249#if 0
250            // this is skipped frame, use the last reference frame as output
251            status = acquireSurfaceBuffer();
252            CHECK_STATUS("acquireSurfaceBuffer");
253            mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS;
254            mAcquiredBuffer->renderBuffer.flag = 0;
255            mAcquiredBuffer->renderBuffer.scanFormat = mLastReference->renderBuffer.scanFormat;
256            mAcquiredBuffer->renderBuffer.surface = mLastReference->renderBuffer.surface;
257            // No need to update mappedData for HW decoding
258            //mAcquiredBuffer->mappedData.data = mLastReference->mappedData.data;
259            mAcquiredBuffer->referenceFrame = true;
260            status = outputSurfaceBuffer();
261            CHECK_STATUS("outputSurfaceBuffer");
262#endif
263        }
264
265        if (data->number_picture_data > 1) {
266            WTRACE("Unexpected to have more picture data following a non-coded VOP.");
267            //picture data is thrown away. No issue if picture data is for N-VOP. if picture data is for
268            // coded picture, a frame is lost.
269            // TODO: handle this case
270            // return DECODE_FAIL;
271        }
272        return DECODE_SUCCESS;
273    }
274    else {
275        // Check if we have reference frame(s)  for decoding
276        if (codingType == MP4_VOP_TYPE_B)  {
277            if (mForwardReference ==  NULL ||
278                mLastReference == NULL) {
279                if (mIsShortHeader) {
280                    status = DECODE_SUCCESS;
281                    VTRACE("%s: No reference frame but keep decoding", __FUNCTION__);
282                } else
283                    return DECODE_NO_REFERENCE;
284            }
285        } else if (codingType == MP4_VOP_TYPE_P || codingType == MP4_VOP_TYPE_S) {
286            if (mLastReference == NULL && mIsSyncFrame == false) {
287                if (mIsShortHeader) {
288                    status = DECODE_SUCCESS;
289                    VTRACE("%s: No reference frame but keep decoding", __FUNCTION__);
290                } else
291                    return DECODE_NO_REFERENCE;
292            }
293        }
294        // all sanity checks pass, continue decoding through continueDecodingFrame
295        status = continueDecodingFrame(data);
296    }
297    return status;
298}
299
300Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data) {
301    Decode_Status status = DECODE_SUCCESS;
302    VAStatus vaStatus = VA_STATUS_SUCCESS;
303    bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
304
305    /*
306         Packed Frame Assumption:
307
308         1. In one packed frame, there's only one P or I frame and only one B frame.
309         2. In packed frame, there's no skipped frame (vop_coded = 0)
310         3. For one packed frame, there will be one N-VOP frame to follow the packed frame (may not immediately).
311         4. N-VOP frame is the frame with vop_coded = 0.
312         5. The timestamp of  N-VOP frame will be used for P or I frame in the packed frame
313
314
315         I, P, {P, B}, B, N, P, N, I, ...
316         I, P, {P, B}, N, P, N, I, ...
317
318         The first N is placeholder for P frame in the packed frame
319         The second N is a skipped frame
320         */
321
322    vbp_picture_data_mp42 *picData = data->picture_data;
323    for (uint32_t i = 0; i < data->number_picture_data; i++, picData = picData->next_picture_data) {
324        // each slice has its own picture data, video_packet_header following resync_marker may reset picture header, see MP4 spec
325        VAPictureParameterBufferMPEG4 *picParam = &(picData->picture_param);
326        int codingType = picParam->vop_fields.bits.vop_coding_type;
327        if (codingType == MP4_VOP_TYPE_S && picParam->no_of_sprite_warping_points > 1) {
328            WTRACE("Hardware only supports up to one warping point (stationary or translation)");
329        }
330
331        if (picData->vop_coded == 0) {
332            ETRACE("Unexpected to have non-coded VOP.");
333            return DECODE_FAIL;
334        }
335        if (picData->new_picture_flag == 1 || mDecodingFrame == false) {
336            // either condition indicates start of a new frame
337            if (picData->new_picture_flag == 0) {
338                WTRACE("First slice of picture is lost!");
339                // TODO: handle this case
340            }
341            if (mDecodingFrame) {
342                if (codingType == MP4_VOP_TYPE_B){
343                    // this indicates the start of a new frame in the packed frame
344                    // Update timestamp for P frame in the packed frame as timestamp here is for the B frame!
345                    if (picParam->vop_time_increment_resolution){
346                        uint64_t increment = mLastVOPTimeIncrement - picData->vop_time_increment +
347                                picParam->vop_time_increment_resolution;
348                        increment = increment % picParam->vop_time_increment_resolution;
349                        // convert to micro-second
350                        // TODO: unit of time stamp varies on different frame work
351                        increment = increment * 1e6 / picParam->vop_time_increment_resolution;
352                        mAcquiredBuffer->renderBuffer.timeStamp += increment;
353                        if (useGraphicBuffer){
354                           mPackedFrame.timestamp = mCurrentPTS;
355                           mCurrentPTS = mAcquiredBuffer->renderBuffer.timeStamp;
356                        }
357                    }
358                } else {
359                    // this indicates the start of a new frame in the packed frame. no B frame int the packet
360                    // Update the timestamp according the increment
361                    if (picParam->vop_time_increment_resolution){
362                        int64_t increment = picData->vop_time_increment - mLastVOPTimeIncrement + picParam->vop_time_increment_resolution;
363                        increment = increment % picParam->vop_time_increment_resolution;
364                        //convert to micro-second
365                        increment = increment * 1e6 / picParam->vop_time_increment_resolution;
366                        if (useGraphicBuffer) {
367                            mPackedFrame.timestamp = mCurrentPTS + increment;
368                        }
369                        else {
370                            mCurrentPTS += increment;
371                        }
372
373                    } else {
374                        if (useGraphicBuffer) {
375                            mPackedFrame.timestamp = mCurrentPTS + 30000;
376                        }
377                        else {
378                            mCurrentPTS += 30000;
379                        }
380                    }
381                }
382                endDecodingFrame(false);
383                mExpectingNVOP = true;
384                if (codingType != MP4_VOP_TYPE_B) {
385                    mExpectingNVOP = false;
386                }
387                if (useGraphicBuffer) {
388                    int32_t count = i - 1;
389                    if (count < 0) {
390                        WTRACE("Shuld not be here!");
391                        return DECODE_SUCCESS;
392                    }
393                    vbp_picture_data_mp42 *lastpic = data->picture_data;
394                    for(int k = 0; k < count; k++ ) {
395                        lastpic = lastpic->next_picture_data;
396                    }
397                    mPackedFrame.offSet = lastpic->slice_data.slice_offset + lastpic->slice_data.slice_size;
398                    VTRACE("Report OMX to handle for Multiple frame offset=%d time=%lld",mPackedFrame.offSet,mPackedFrame.timestamp);
399                    return DECODE_MULTIPLE_FRAME;
400                }
401            }
402
403            // acquire a new surface buffer
404            status = acquireSurfaceBuffer();
405            CHECK_STATUS("acquireSurfaceBuffer");
406
407            // sprite is treated as P frame in the display order, so only B frame frame is not used as "reference"
408            mAcquiredBuffer->referenceFrame = (codingType != MP4_VOP_TYPE_B);
409            if (picData->picture_param.vol_fields.bits.interlaced) {
410                // only MPEG-4 studio profile can have field coding. All other profiles
411                // use frame coding only, i.e, there is no field VOP.  (see vop_structure in MP4 spec)
412                mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD | VA_TOP_FIELD;
413            } else {
414                mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE;
415            }
416            // TODO:  set discontinuity flag
417            mAcquiredBuffer->renderBuffer.flag = 0;
418            mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS;
419            if (mSizeChanged) {
420                mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE;
421                mSizeChanged = false;
422            }
423            if (codingType != MP4_VOP_TYPE_B) {
424                mLastVOPCodingType = codingType;
425                mLastVOPTimeIncrement = picData->vop_time_increment;
426            }
427
428            // start decoding a frame
429            vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
430            CHECK_VA_STATUS("vaBeginPicture");
431
432            mDecodingFrame = true;
433            mSendIQMatrixBuf = true;
434        }
435
436        status = decodeSlice(data, picData);
437        CHECK_STATUS("decodeSlice");
438    }
439
440    return DECODE_SUCCESS;
441}
442
443
444Decode_Status VideoDecoderMPEG4::decodeSlice(vbp_data_mp42 *data, vbp_picture_data_mp42 *picData) {
445    Decode_Status status;
446    VAStatus vaStatus;
447    uint32_t bufferIDCount = 0;
448    // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data
449    VABufferID bufferIDs[4];
450
451    VAPictureParameterBufferMPEG4 *picParam = &(picData->picture_param);
452    vbp_slice_data_mp42 *sliceData = &(picData->slice_data);
453    VASliceParameterBufferMPEG4 *sliceParam = &(sliceData->slice_param);
454
455    // send picture parametre for each slice
456    status = setReference(picParam);
457    CHECK_STATUS("setReference");
458
459    vaStatus = vaCreateBuffer(
460        mVADisplay,
461        mVAContext,
462        VAPictureParameterBufferType,
463        sizeof(VAPictureParameterBufferMPEG4),
464        1,
465        picParam,
466        &bufferIDs[bufferIDCount]);
467    CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
468
469    bufferIDCount++;
470    if (picParam->vol_fields.bits.quant_type && mSendIQMatrixBuf)
471    {
472        // only send IQ matrix for the first slice in the picture
473        vaStatus = vaCreateBuffer(
474            mVADisplay,
475            mVAContext,
476            VAIQMatrixBufferType,
477            sizeof(VAIQMatrixBufferMPEG4),
478            1,
479            &(data->iq_matrix_buffer),
480            &bufferIDs[bufferIDCount]);
481        CHECK_VA_STATUS("vaCreateIQMatrixBuffer");
482
483        mSendIQMatrixBuf = false;
484        bufferIDCount++;
485    }
486
487    vaStatus = vaCreateBuffer(
488        mVADisplay,
489        mVAContext,
490        VASliceParameterBufferType,
491        sizeof(VASliceParameterBufferMPEG4),
492        1,
493        sliceParam,
494        &bufferIDs[bufferIDCount]);
495    CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
496
497    bufferIDCount++;
498
499    //slice data buffer pointer
500    //Note that this is the original data buffer ptr;
501    // offset to the actual slice data is provided in
502    // slice_data_offset in VASliceParameterBufferMP42
503
504    vaStatus = vaCreateBuffer(
505        mVADisplay,
506        mVAContext,
507        VASliceDataBufferType,
508        sliceData->slice_size, //size
509        1,        //num_elements
510        sliceData->buffer_addr + sliceData->slice_offset,
511        &bufferIDs[bufferIDCount]);
512    CHECK_VA_STATUS("vaCreateSliceDataBuffer");
513
514    bufferIDCount++;
515
516    vaStatus = vaRenderPicture(
517        mVADisplay,
518        mVAContext,
519        bufferIDs,
520        bufferIDCount);
521    CHECK_VA_STATUS("vaRenderPicture");
522
523
524    return DECODE_SUCCESS;
525}
526
527Decode_Status VideoDecoderMPEG4::setReference(VAPictureParameterBufferMPEG4 *picParam) {
528    switch (picParam->vop_fields.bits.vop_coding_type) {
529        case MP4_VOP_TYPE_I:
530            picParam->forward_reference_picture = VA_INVALID_SURFACE;
531            picParam->backward_reference_picture = VA_INVALID_SURFACE;
532            break;
533        case MP4_VOP_TYPE_P:
534            if (mLastReference == NULL && mIsSyncFrame == false && !mIsShortHeader) {
535                return DECODE_NO_REFERENCE;
536            }
537            if (mLastReference != NULL) {
538                picParam->forward_reference_picture = mLastReference->renderBuffer.surface;
539            } else {
540                VTRACE("%s: no reference frame, but keep decoding", __FUNCTION__);
541                picParam->forward_reference_picture = VA_INVALID_SURFACE;
542            }
543            picParam->backward_reference_picture = VA_INVALID_SURFACE;
544            break;
545        case MP4_VOP_TYPE_B:
546            picParam->vop_fields.bits.backward_reference_vop_coding_type = mLastVOPCodingType;
547            // WEIRD, CHECK AGAIN !!!!!!!
548            if (mIsShortHeader) {
549                if (mLastReference != NULL) {
550                    picParam->forward_reference_picture = mLastReference->renderBuffer.surface;
551                } else {
552                    VTRACE("%s: no forward reference frame, but keep decoding", __FUNCTION__);
553                    picParam->forward_reference_picture = VA_INVALID_SURFACE;
554                }
555                if (mForwardReference != NULL) {
556                    picParam->backward_reference_picture = mForwardReference->renderBuffer.surface;
557                } else {
558                    VTRACE("%s: no backward reference frame, but keep decoding", __FUNCTION__);
559                    picParam->backward_reference_picture = VA_INVALID_SURFACE;
560                }
561            } else if (mLastReference == NULL || mForwardReference == NULL) {
562                return DECODE_NO_REFERENCE;
563            } else {
564                picParam->forward_reference_picture = mLastReference->renderBuffer.surface;
565                picParam->backward_reference_picture = mForwardReference->renderBuffer.surface;
566            }
567            break;
568        case MP4_VOP_TYPE_S:
569            // WEIRD, CHECK AGAIN!!!! WAS using mForwardReference
570            if (mLastReference == NULL) {
571                return DECODE_NO_REFERENCE;
572            }
573            picParam->forward_reference_picture = mLastReference->renderBuffer.surface;
574            picParam->backward_reference_picture = VA_INVALID_SURFACE;
575            break;
576
577        default:
578            // Will never reach here;
579            return DECODE_PARSER_FAIL;
580    }
581    return DECODE_SUCCESS;
582}
583
584Decode_Status VideoDecoderMPEG4::startVA(vbp_data_mp42 *data) {
585    updateFormatInfo(data);
586
587    VAProfile vaProfile;
588
589    if ((data->codec_data.profile_and_level_indication & 0xF8) == 0xF0) {
590        vaProfile = VAProfileMPEG4AdvancedSimple;
591    } else {
592        vaProfile = VAProfileMPEG4Simple;
593    }
594
595    mIsShortHeader = data->codec_data.short_video_header;
596
597    return VideoDecoderBase::setupVA(MP4_SURFACE_NUMBER, vaProfile);
598}
599
600void VideoDecoderMPEG4::updateFormatInfo(vbp_data_mp42 *data) {
601    ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d",
602        mVideoFormatInfo.width, mVideoFormatInfo.height,
603        data->codec_data.video_object_layer_width,
604        data->codec_data.video_object_layer_height);
605    // error enhancement if vol is missing
606    if (!data->codec_data.got_vol && data->codec_data.got_vop) {
607        data->codec_data.video_object_layer_width = mVideoFormatInfo.width;
608        data->codec_data.video_object_layer_height = mVideoFormatInfo.height;
609    }
610
611    mVideoFormatInfo.cropBottom = data->codec_data.video_object_layer_height > mVideoFormatInfo.height ?
612                                                                          data->codec_data.video_object_layer_height - mVideoFormatInfo.height : 0;
613    mVideoFormatInfo.cropRight = data->codec_data.video_object_layer_width > mVideoFormatInfo.width ?
614                                                                     data->codec_data.video_object_layer_width - mVideoFormatInfo.width : 0;
615
616    if ((mVideoFormatInfo.width != (uint32_t)data->codec_data.video_object_layer_width ||
617        mVideoFormatInfo.height != (uint32_t)data->codec_data.video_object_layer_height) &&
618        data->codec_data.video_object_layer_width &&
619        data->codec_data.video_object_layer_height) {
620        // update  encoded image size
621        mVideoFormatInfo.width = data->codec_data.video_object_layer_width;
622        mVideoFormatInfo.height = data->codec_data.video_object_layer_height;
623        mSizeChanged = true;
624        ITRACE("Video size is changed.");
625    }
626
627    // video_range has default value of 0. Y ranges from 16 to 235.
628    mVideoFormatInfo.videoRange = data->codec_data.video_range;
629
630    switch (data->codec_data.matrix_coefficients) {
631        case 1:
632            mVideoFormatInfo.colorMatrix = VA_SRC_BT709;
633            break;
634
635        // ITU-R Recommendation BT.470-6 System B, G (MP4), same as
636        // SMPTE 170M/BT601
637        case 5:
638        case 6:
639            mVideoFormatInfo.colorMatrix = VA_SRC_BT601;
640            break;
641
642        default:
643            // unknown color matrix, set to 0 so color space flag will not be set.
644            mVideoFormatInfo.colorMatrix = 0;
645            break;
646    }
647
648    mVideoFormatInfo.aspectX = data->codec_data.par_width;
649    mVideoFormatInfo.aspectY = data->codec_data.par_height;
650    //mVideoFormatInfo.bitrate = data->codec_data.bit_rate;
651    mVideoFormatInfo.valid = true;
652
653    setRenderRect();
654    setColorSpaceInfo(mVideoFormatInfo.colorMatrix, mVideoFormatInfo.videoRange);
655}
656
657Decode_Status VideoDecoderMPEG4::checkHardwareCapability() {
658    VAStatus vaStatus;
659    VAConfigAttrib cfgAttribs[2];
660    cfgAttribs[0].type = VAConfigAttribMaxPictureWidth;
661    cfgAttribs[1].type = VAConfigAttribMaxPictureHeight;
662    vaStatus = vaGetConfigAttributes(mVADisplay,
663            mIsShortHeader ? VAProfileH263Baseline : VAProfileMPEG4AdvancedSimple,
664            VAEntrypointVLD, cfgAttribs, 2);
665    CHECK_VA_STATUS("vaGetConfigAttributes");
666    if (cfgAttribs[0].value * cfgAttribs[1].value < (uint32_t)mVideoFormatInfo.width * (uint32_t)mVideoFormatInfo.height) {
667        ETRACE("hardware supports resolution %d * %d smaller than the clip resolution %d * %d",
668                cfgAttribs[0].value, cfgAttribs[1].value, mVideoFormatInfo.width, mVideoFormatInfo.height);
669        return DECODE_DRIVER_FAIL;
670    }
671
672    return DECODE_SUCCESS;
673}
674