FrameDecoder.cpp revision f49d8f7b8c5f77016e9ba848219966890c9810a8
1/*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "FrameDecoder"
19
20#include "include/FrameDecoder.h"
21#include <binder/MemoryBase.h>
22#include <binder/MemoryHeapBase.h>
23#include <gui/Surface.h>
24#include <inttypes.h>
25#include <media/ICrypto.h>
26#include <media/IMediaSource.h>
27#include <media/MediaCodecBuffer.h>
28#include <media/stagefright/foundation/avc_utils.h>
29#include <media/stagefright/foundation/ADebug.h>
30#include <media/stagefright/foundation/AMessage.h>
31#include <media/stagefright/ColorConverter.h>
32#include <media/stagefright/MediaBuffer.h>
33#include <media/stagefright/MediaCodec.h>
34#include <media/stagefright/MediaDefs.h>
35#include <media/stagefright/MediaErrors.h>
36#include <media/stagefright/Utils.h>
37#include <private/media/VideoFrame.h>
38#include <utils/Log.h>
39
40namespace android {
41
42static const int64_t kBufferTimeOutUs = 10000ll; // 10 msec
43static const size_t kRetryCount = 50; // must be >0
44
45sp<IMemory> allocVideoFrame(const sp<MetaData>& trackMeta,
46        int32_t width, int32_t height, int32_t dstBpp, bool metaOnly = false) {
47    int32_t rotationAngle;
48    if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
49        rotationAngle = 0;  // By default, no rotation
50    }
51    uint32_t type;
52    const void *iccData;
53    size_t iccSize;
54    if (!trackMeta->findData(kKeyIccProfile, &type, &iccData, &iccSize)){
55        iccData = NULL;
56        iccSize = 0;
57    }
58
59    int32_t sarWidth, sarHeight;
60    int32_t displayWidth, displayHeight;
61    if (trackMeta->findInt32(kKeySARWidth, &sarWidth)
62            && trackMeta->findInt32(kKeySARHeight, &sarHeight)
63            && sarHeight != 0) {
64        displayWidth = (width * sarWidth) / sarHeight;
65        displayHeight = height;
66    } else if (trackMeta->findInt32(kKeyDisplayWidth, &displayWidth)
67                && trackMeta->findInt32(kKeyDisplayHeight, &displayHeight)
68                && displayWidth > 0 && displayHeight > 0
69                && width > 0 && height > 0) {
70        ALOGV("found display size %dx%d", displayWidth, displayHeight);
71    } else {
72        displayWidth = width;
73        displayHeight = height;
74    }
75
76    VideoFrame frame(width, height, displayWidth, displayHeight,
77            rotationAngle, dstBpp, !metaOnly, iccSize);
78
79    size_t size = frame.getFlattenedSize();
80    sp<MemoryHeapBase> heap = new MemoryHeapBase(size, 0, "MetadataRetrieverClient");
81    if (heap == NULL) {
82        ALOGE("failed to create MemoryDealer");
83        return NULL;
84    }
85    sp<IMemory> frameMem = new MemoryBase(heap, 0, size);
86    if (frameMem == NULL) {
87        ALOGE("not enough memory for VideoFrame size=%zu", size);
88        return NULL;
89    }
90    VideoFrame* frameCopy = static_cast<VideoFrame*>(frameMem->pointer());
91    frameCopy->init(frame, iccData, iccSize);
92
93    return frameMem;
94}
95
96bool findThumbnailInfo(
97        const sp<MetaData> &trackMeta, int32_t *width, int32_t *height,
98        uint32_t *type = NULL, const void **data = NULL, size_t *size = NULL) {
99    uint32_t dummyType;
100    const void *dummyData;
101    size_t dummySize;
102    return trackMeta->findInt32(kKeyThumbnailWidth, width)
103        && trackMeta->findInt32(kKeyThumbnailHeight, height)
104        && trackMeta->findData(kKeyThumbnailHVCC,
105                type ?: &dummyType, data ?: &dummyData, size ?: &dummySize);
106}
107
108bool findGridInfo(const sp<MetaData> &trackMeta,
109        int32_t *tileWidth, int32_t *tileHeight, int32_t *gridRows, int32_t *gridCols) {
110    return trackMeta->findInt32(kKeyTileWidth, tileWidth) && (*tileWidth > 0)
111        && trackMeta->findInt32(kKeyTileHeight, tileHeight) && (*tileHeight > 0)
112        && trackMeta->findInt32(kKeyGridRows, gridRows) && (*gridRows > 0)
113        && trackMeta->findInt32(kKeyGridCols, gridCols) && (*gridCols > 0);
114}
115
116bool getDstColorFormat(
117        android_pixel_format_t colorFormat,
118        OMX_COLOR_FORMATTYPE *dstFormat,
119        int32_t *dstBpp) {
120    switch (colorFormat) {
121        case HAL_PIXEL_FORMAT_RGB_565:
122        {
123            *dstFormat = OMX_COLOR_Format16bitRGB565;
124            *dstBpp = 2;
125            return true;
126        }
127        case HAL_PIXEL_FORMAT_RGBA_8888:
128        {
129            *dstFormat = OMX_COLOR_Format32BitRGBA8888;
130            *dstBpp = 4;
131            return true;
132        }
133        case HAL_PIXEL_FORMAT_BGRA_8888:
134        {
135            *dstFormat = OMX_COLOR_Format32bitBGRA8888;
136            *dstBpp = 4;
137            return true;
138        }
139        default:
140        {
141            ALOGE("Unsupported color format: %d", colorFormat);
142            break;
143        }
144    }
145    return false;
146}
147
148//static
149sp<IMemory> FrameDecoder::getMetadataOnly(
150        const sp<MetaData> &trackMeta, int colorFormat, bool thumbnail) {
151    OMX_COLOR_FORMATTYPE dstFormat;
152    int32_t dstBpp;
153    if (!getDstColorFormat(
154            (android_pixel_format_t)colorFormat, &dstFormat, &dstBpp)) {
155        return NULL;
156    }
157
158    int32_t width, height;
159    if (thumbnail) {
160        if (!findThumbnailInfo(trackMeta, &width, &height)) {
161            return NULL;
162        }
163    } else {
164        CHECK(trackMeta->findInt32(kKeyWidth, &width));
165        CHECK(trackMeta->findInt32(kKeyHeight, &height));
166    }
167    return allocVideoFrame(trackMeta, width, height, dstBpp, true /*metaOnly*/);
168}
169
170FrameDecoder::FrameDecoder(
171        const AString &componentName,
172        const sp<MetaData> &trackMeta,
173        const sp<IMediaSource> &source)
174    : mComponentName(componentName),
175      mTrackMeta(trackMeta),
176      mSource(source),
177      mDstFormat(OMX_COLOR_Format16bitRGB565),
178      mDstBpp(2),
179      mHaveMoreInputs(true),
180      mFirstSample(true) {
181}
182
183FrameDecoder::~FrameDecoder() {
184    if (mDecoder != NULL) {
185        mDecoder->release();
186        mSource->stop();
187    }
188}
189
190status_t FrameDecoder::init(
191        int64_t frameTimeUs, size_t numFrames, int option, int colorFormat) {
192    if (!getDstColorFormat(
193            (android_pixel_format_t)colorFormat, &mDstFormat, &mDstBpp)) {
194        return ERROR_UNSUPPORTED;
195    }
196
197    sp<AMessage> videoFormat = onGetFormatAndSeekOptions(
198            frameTimeUs, numFrames, option, &mReadOptions);
199    if (videoFormat == NULL) {
200        ALOGE("video format or seek mode not supported");
201        return ERROR_UNSUPPORTED;
202    }
203
204    status_t err;
205    sp<ALooper> looper = new ALooper;
206    looper->start();
207    sp<MediaCodec> decoder = MediaCodec::CreateByComponentName(
208            looper, mComponentName, &err);
209    if (decoder.get() == NULL || err != OK) {
210        ALOGW("Failed to instantiate decoder [%s]", mComponentName.c_str());
211        return (decoder.get() == NULL) ? NO_MEMORY : err;
212    }
213
214    err = decoder->configure(
215            videoFormat, NULL /* surface */, NULL /* crypto */, 0 /* flags */);
216    if (err != OK) {
217        ALOGW("configure returned error %d (%s)", err, asString(err));
218        decoder->release();
219        return err;
220    }
221
222    err = decoder->start();
223    if (err != OK) {
224        ALOGW("start returned error %d (%s)", err, asString(err));
225        decoder->release();
226        return err;
227    }
228
229    err = mSource->start();
230    if (err != OK) {
231        ALOGW("source failed to start: %d (%s)", err, asString(err));
232        decoder->release();
233        return err;
234    }
235    mDecoder = decoder;
236
237    return OK;
238}
239
240sp<IMemory> FrameDecoder::extractFrame() {
241    status_t err = extractInternal();
242    if (err != OK) {
243        return NULL;
244    }
245
246    return mFrames.size() > 0 ? mFrames[0] : NULL;
247}
248
249status_t FrameDecoder::extractFrames(std::vector<sp<IMemory> >* frames) {
250    status_t err = extractInternal();
251    if (err != OK) {
252        return err;
253    }
254
255    for (size_t i = 0; i < mFrames.size(); i++) {
256        frames->push_back(mFrames[i]);
257    }
258    return OK;
259}
260
261status_t FrameDecoder::extractInternal() {
262    status_t err = OK;
263    bool done = false;
264    size_t retriesLeft = kRetryCount;
265    do {
266        size_t index;
267        int64_t ptsUs = 0ll;
268        uint32_t flags = 0;
269
270        // Queue as many inputs as we possibly can, then block on dequeuing
271        // outputs. After getting each output, come back and queue the inputs
272        // again to keep the decoder busy.
273        while (mHaveMoreInputs) {
274            err = mDecoder->dequeueInputBuffer(&index, 0);
275            if (err != OK) {
276                ALOGV("Timed out waiting for input");
277                if (retriesLeft) {
278                    err = OK;
279                }
280                break;
281            }
282            sp<MediaCodecBuffer> codecBuffer;
283            err = mDecoder->getInputBuffer(index, &codecBuffer);
284            if (err != OK) {
285                ALOGE("failed to get input buffer %zu", index);
286                break;
287            }
288
289            MediaBufferBase *mediaBuffer = NULL;
290
291            err = mSource->read(&mediaBuffer, &mReadOptions);
292            mReadOptions.clearSeekTo();
293            if (err != OK) {
294                ALOGW("Input Error or EOS");
295                mHaveMoreInputs = false;
296                if (!mFirstSample && err == ERROR_END_OF_STREAM) {
297                    err = OK;
298                }
299                break;
300            }
301
302            if (mediaBuffer->range_length() > codecBuffer->capacity()) {
303                ALOGE("buffer size (%zu) too large for codec input size (%zu)",
304                        mediaBuffer->range_length(), codecBuffer->capacity());
305                mHaveMoreInputs = false;
306                err = BAD_VALUE;
307            } else {
308                codecBuffer->setRange(0, mediaBuffer->range_length());
309
310                CHECK(mediaBuffer->meta_data().findInt64(kKeyTime, &ptsUs));
311                memcpy(codecBuffer->data(),
312                        (const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(),
313                        mediaBuffer->range_length());
314
315                onInputReceived(codecBuffer, mediaBuffer->meta_data(), mFirstSample, &flags);
316                mFirstSample = false;
317            }
318
319            mediaBuffer->release();
320
321            if (mHaveMoreInputs) {
322                ALOGV("QueueInput: size=%zu ts=%" PRId64 " us flags=%x",
323                        codecBuffer->size(), ptsUs, flags);
324
325                err = mDecoder->queueInputBuffer(
326                        index,
327                        codecBuffer->offset(),
328                        codecBuffer->size(),
329                        ptsUs,
330                        flags);
331
332                if (flags & MediaCodec::BUFFER_FLAG_EOS) {
333                    mHaveMoreInputs = false;
334                }
335            }
336        }
337
338        while (err == OK) {
339            size_t offset, size;
340            // wait for a decoded buffer
341            err = mDecoder->dequeueOutputBuffer(
342                    &index,
343                    &offset,
344                    &size,
345                    &ptsUs,
346                    &flags,
347                    kBufferTimeOutUs);
348
349            if (err == INFO_FORMAT_CHANGED) {
350                ALOGV("Received format change");
351                err = mDecoder->getOutputFormat(&mOutputFormat);
352            } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
353                ALOGV("Output buffers changed");
354                err = OK;
355            } else {
356                if (err == -EAGAIN /* INFO_TRY_AGAIN_LATER */ && --retriesLeft > 0) {
357                    ALOGV("Timed-out waiting for output.. retries left = %zu", retriesLeft);
358                    err = OK;
359                } else if (err == OK) {
360                    // If we're seeking with CLOSEST option and obtained a valid targetTimeUs
361                    // from the extractor, decode to the specified frame. Otherwise we're done.
362                    ALOGV("Received an output buffer, timeUs=%lld", (long long)ptsUs);
363                    sp<MediaCodecBuffer> videoFrameBuffer;
364                    err = mDecoder->getOutputBuffer(index, &videoFrameBuffer);
365                    if (err != OK) {
366                        ALOGE("failed to get output buffer %zu", index);
367                        break;
368                    }
369                    err = onOutputReceived(videoFrameBuffer, mOutputFormat, ptsUs, &done);
370                    mDecoder->releaseOutputBuffer(index);
371                } else {
372                    ALOGW("Received error %d (%s) instead of output", err, asString(err));
373                    done = true;
374                }
375                break;
376            }
377        }
378    } while (err == OK && !done);
379
380    if (err != OK) {
381        ALOGE("failed to get video frame (err %d)", err);
382    }
383
384    return err;
385}
386
387//////////////////////////////////////////////////////////////////////
388
389VideoFrameDecoder::VideoFrameDecoder(
390        const AString &componentName,
391        const sp<MetaData> &trackMeta,
392        const sp<IMediaSource> &source)
393    : FrameDecoder(componentName, trackMeta, source),
394      mIsAvcOrHevc(false),
395      mSeekMode(MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC),
396      mTargetTimeUs(-1ll),
397      mNumFrames(0),
398      mNumFramesDecoded(0) {
399}
400
401sp<AMessage> VideoFrameDecoder::onGetFormatAndSeekOptions(
402        int64_t frameTimeUs, size_t numFrames, int seekMode, MediaSource::ReadOptions *options) {
403    mSeekMode = static_cast<MediaSource::ReadOptions::SeekMode>(seekMode);
404    if (mSeekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC ||
405            mSeekMode > MediaSource::ReadOptions::SEEK_FRAME_INDEX) {
406        ALOGE("Unknown seek mode: %d", mSeekMode);
407        return NULL;
408    }
409    mNumFrames = numFrames;
410
411    const char *mime;
412    if (!trackMeta()->findCString(kKeyMIMEType, &mime)) {
413        ALOGE("Could not find mime type");
414        return NULL;
415    }
416
417    mIsAvcOrHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)
418            || !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
419
420    if (frameTimeUs < 0) {
421        int64_t thumbNailTime;
422        if (!trackMeta()->findInt64(kKeyThumbnailTime, &thumbNailTime)
423                || thumbNailTime < 0) {
424            thumbNailTime = 0;
425        }
426        options->setSeekTo(thumbNailTime, mSeekMode);
427    } else {
428        options->setSeekTo(frameTimeUs, mSeekMode);
429    }
430
431    sp<AMessage> videoFormat;
432    if (convertMetaDataToMessage(trackMeta(), &videoFormat) != OK) {
433        ALOGE("b/23680780");
434        ALOGW("Failed to convert meta data to message");
435        return NULL;
436    }
437
438    // TODO: Use Flexible color instead
439    videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
440
441    // For the thumbnail extraction case, try to allocate single buffer in both
442    // input and output ports, if seeking to a sync frame. NOTE: This request may
443    // fail if component requires more than that for decoding.
444    bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
445            || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
446    if (!isSeekingClosest) {
447        videoFormat->setInt32("android._num-input-buffers", 1);
448        videoFormat->setInt32("android._num-output-buffers", 1);
449    }
450    return videoFormat;
451}
452
453status_t VideoFrameDecoder::onInputReceived(
454        const sp<MediaCodecBuffer> &codecBuffer,
455        MetaDataBase &sampleMeta, bool firstSample, uint32_t *flags) {
456    bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
457            || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
458
459    if (firstSample && isSeekingClosest) {
460        sampleMeta.findInt64(kKeyTargetTime, &mTargetTimeUs);
461        ALOGV("Seeking closest: targetTimeUs=%lld", (long long)mTargetTimeUs);
462    }
463
464    if (mIsAvcOrHevc && !isSeekingClosest
465            && IsIDR(codecBuffer->data(), codecBuffer->size())) {
466        // Only need to decode one IDR frame, unless we're seeking with CLOSEST
467        // option, in which case we need to actually decode to targetTimeUs.
468        *flags |= MediaCodec::BUFFER_FLAG_EOS;
469    }
470    return OK;
471}
472
473status_t VideoFrameDecoder::onOutputReceived(
474        const sp<MediaCodecBuffer> &videoFrameBuffer,
475        const sp<AMessage> &outputFormat,
476        int64_t timeUs, bool *done) {
477    bool shouldOutput = (mTargetTimeUs < 0ll) || (timeUs >= mTargetTimeUs);
478
479    // If this is not the target frame, skip color convert.
480    if (!shouldOutput) {
481        *done = false;
482        return OK;
483    }
484
485    *done = (++mNumFramesDecoded >= mNumFrames);
486
487    if (outputFormat == NULL) {
488        return ERROR_MALFORMED;
489    }
490
491    int32_t width, height;
492    CHECK(outputFormat->findInt32("width", &width));
493    CHECK(outputFormat->findInt32("height", &height));
494
495    int32_t crop_left, crop_top, crop_right, crop_bottom;
496    if (!outputFormat->findRect("crop", &crop_left, &crop_top, &crop_right, &crop_bottom)) {
497        crop_left = crop_top = 0;
498        crop_right = width - 1;
499        crop_bottom = height - 1;
500    }
501
502    sp<IMemory> frameMem = allocVideoFrame(
503            trackMeta(),
504            (crop_right - crop_left + 1),
505            (crop_bottom - crop_top + 1),
506            dstBpp());
507    addFrame(frameMem);
508    VideoFrame* frame = static_cast<VideoFrame*>(frameMem->pointer());
509
510    int32_t srcFormat;
511    CHECK(outputFormat->findInt32("color-format", &srcFormat));
512
513    ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
514
515    if (converter.isValid()) {
516        converter.convert(
517                (const uint8_t *)videoFrameBuffer->data(),
518                width, height,
519                crop_left, crop_top, crop_right, crop_bottom,
520                frame->getFlattenedData(),
521                frame->mWidth,
522                frame->mHeight,
523                crop_left, crop_top, crop_right, crop_bottom);
524        return OK;
525    }
526
527    ALOGE("Unable to convert from format 0x%08x to 0x%08x",
528                srcFormat, dstFormat());
529    return ERROR_UNSUPPORTED;
530}
531
532////////////////////////////////////////////////////////////////////////
533
534ImageDecoder::ImageDecoder(
535        const AString &componentName,
536        const sp<MetaData> &trackMeta,
537        const sp<IMediaSource> &source)
538    : FrameDecoder(componentName, trackMeta, source),
539      mFrame(NULL),
540      mWidth(0),
541      mHeight(0),
542      mGridRows(1),
543      mGridCols(1),
544      mTilesDecoded(0) {
545}
546
547sp<AMessage> ImageDecoder::onGetFormatAndSeekOptions(
548        int64_t frameTimeUs, size_t /*numFrames*/,
549        int /*seekMode*/, MediaSource::ReadOptions *options) {
550    sp<MetaData> overrideMeta;
551    if (frameTimeUs < 0) {
552        uint32_t type;
553        const void *data;
554        size_t size;
555
556        // if we have a stand-alone thumbnail, set up the override meta,
557        // and set seekTo time to -1.
558        if (!findThumbnailInfo(trackMeta(), &mWidth, &mHeight, &type, &data, &size)) {
559            ALOGE("Thumbnail not available");
560            return NULL;
561        }
562        overrideMeta = new MetaData(*(trackMeta()));
563        overrideMeta->remove(kKeyDisplayWidth);
564        overrideMeta->remove(kKeyDisplayHeight);
565        overrideMeta->setInt32(kKeyWidth, mWidth);
566        overrideMeta->setInt32(kKeyHeight, mHeight);
567        overrideMeta->setData(kKeyHVCC, type, data, size);
568        options->setSeekTo(-1);
569    } else {
570        CHECK(trackMeta()->findInt32(kKeyWidth, &mWidth));
571        CHECK(trackMeta()->findInt32(kKeyHeight, &mHeight));
572
573        options->setSeekTo(frameTimeUs);
574    }
575
576    mGridRows = mGridCols = 1;
577    if (overrideMeta == NULL) {
578        // check if we're dealing with a tiled heif
579        int32_t tileWidth, tileHeight, gridRows, gridCols;
580        if (findGridInfo(trackMeta(), &tileWidth, &tileHeight, &gridRows, &gridCols)) {
581            if (mWidth <= tileWidth * gridCols && mHeight <= tileHeight * gridRows) {
582                ALOGV("grid: %dx%d, tile size: %dx%d, picture size: %dx%d",
583                        gridCols, gridRows, tileWidth, tileHeight, mWidth, mHeight);
584
585                overrideMeta = new MetaData(*(trackMeta()));
586                overrideMeta->setInt32(kKeyWidth, tileWidth);
587                overrideMeta->setInt32(kKeyHeight, tileHeight);
588                mGridCols = gridCols;
589                mGridRows = gridRows;
590            } else {
591                ALOGE("ignore bad grid: %dx%d, tile size: %dx%d, picture size: %dx%d",
592                        gridCols, gridRows, tileWidth, tileHeight, mWidth, mHeight);
593            }
594        }
595        if (overrideMeta == NULL) {
596            overrideMeta = trackMeta();
597        }
598    }
599
600    sp<AMessage> videoFormat;
601    if (convertMetaDataToMessage(overrideMeta, &videoFormat) != OK) {
602        ALOGE("b/23680780");
603        ALOGW("Failed to convert meta data to message");
604        return NULL;
605    }
606
607    // TODO: Use Flexible color instead
608    videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
609
610    if ((mGridRows == 1) && (mGridCols == 1)) {
611        videoFormat->setInt32("android._num-input-buffers", 1);
612        videoFormat->setInt32("android._num-output-buffers", 1);
613    }
614    return videoFormat;
615}
616
617status_t ImageDecoder::onOutputReceived(
618        const sp<MediaCodecBuffer> &videoFrameBuffer,
619        const sp<AMessage> &outputFormat, int64_t /*timeUs*/, bool *done) {
620    if (outputFormat == NULL) {
621        return ERROR_MALFORMED;
622    }
623
624    int32_t width, height;
625    CHECK(outputFormat->findInt32("width", &width));
626    CHECK(outputFormat->findInt32("height", &height));
627
628    if (mFrame == NULL) {
629        sp<IMemory> frameMem = allocVideoFrame(trackMeta(), mWidth, mHeight, dstBpp());
630        mFrame = static_cast<VideoFrame*>(frameMem->pointer());
631
632        addFrame(frameMem);
633    }
634
635    int32_t srcFormat;
636    CHECK(outputFormat->findInt32("color-format", &srcFormat));
637
638    ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
639
640    int32_t dstLeft, dstTop, dstRight, dstBottom;
641    int32_t numTiles = mGridRows * mGridCols;
642
643    dstLeft = mTilesDecoded % mGridCols * width;
644    dstTop = mTilesDecoded / mGridCols * height;
645    dstRight = dstLeft + width - 1;
646    dstBottom = dstTop + height - 1;
647
648    int32_t crop_left, crop_top, crop_right, crop_bottom;
649    if (!outputFormat->findRect("crop", &crop_left, &crop_top, &crop_right, &crop_bottom)) {
650        crop_left = crop_top = 0;
651        crop_right = width - 1;
652        crop_bottom = height - 1;
653    }
654
655    // apply crop on bottom-right
656    // TODO: need to move this into the color converter itself.
657    if (dstRight >= mWidth) {
658        crop_right = mWidth - dstLeft - 1;
659        dstRight = dstLeft + crop_right;
660    }
661    if (dstBottom >= mHeight) {
662        crop_bottom = mHeight - dstTop - 1;
663        dstBottom = dstTop + crop_bottom;
664    }
665
666    *done = (++mTilesDecoded >= numTiles);
667
668    if (converter.isValid()) {
669        converter.convert(
670                (const uint8_t *)videoFrameBuffer->data(),
671                width, height,
672                crop_left, crop_top, crop_right, crop_bottom,
673                mFrame->getFlattenedData(),
674                mFrame->mWidth,
675                mFrame->mHeight,
676                dstLeft, dstTop, dstRight, dstBottom);
677        return OK;
678    }
679
680    ALOGE("Unable to convert from format 0x%08x to 0x%08x",
681                srcFormat, dstFormat());
682    return ERROR_UNSUPPORTED;
683}
684
685}  // namespace android
686