FrameDecoder.cpp revision c666687f67d1d00beb88c63f90cefc708cbe8500
1/*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "FrameDecoder"
19
20#include <inttypes.h>
21
22#include <utils/Log.h>
23#include <gui/Surface.h>
24
25#include "include/FrameDecoder.h"
26#include <media/ICrypto.h>
27#include <media/IMediaSource.h>
28#include <media/MediaCodecBuffer.h>
29#include <media/stagefright/foundation/avc_utils.h>
30#include <media/stagefright/foundation/ADebug.h>
31#include <media/stagefright/foundation/AMessage.h>
32#include <media/stagefright/ColorConverter.h>
33#include <media/stagefright/MediaBuffer.h>
34#include <media/stagefright/MediaCodec.h>
35#include <media/stagefright/MediaDefs.h>
36#include <media/stagefright/MediaErrors.h>
37#include <media/stagefright/Utils.h>
38#include <private/media/VideoFrame.h>
39
40namespace android {
41
42static const int64_t kBufferTimeOutUs = 10000ll; // 10 msec
43static const size_t kRetryCount = 50; // must be >0
44
45//static
46VideoFrame *allocVideoFrame(const sp<MetaData> &trackMeta,
47        int32_t width, int32_t height, int32_t dstBpp, bool metaOnly = false) {
48    int32_t rotationAngle;
49    if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
50        rotationAngle = 0;  // By default, no rotation
51    }
52    uint32_t type;
53    const void *iccData;
54    size_t iccSize;
55    if (!trackMeta->findData(kKeyIccProfile, &type, &iccData, &iccSize)){
56        iccData = NULL;
57        iccSize = 0;
58    }
59
60    int32_t sarWidth, sarHeight;
61    int32_t displayWidth, displayHeight;
62    if (trackMeta->findInt32(kKeySARWidth, &sarWidth)
63            && trackMeta->findInt32(kKeySARHeight, &sarHeight)
64            && sarHeight != 0) {
65        displayWidth = (width * sarWidth) / sarHeight;
66        displayHeight = height;
67    } else if (trackMeta->findInt32(kKeyDisplayWidth, &displayWidth)
68                && trackMeta->findInt32(kKeyDisplayHeight, &displayHeight)
69                && displayWidth > 0 && displayHeight > 0
70                && width > 0 && height > 0) {
71        ALOGV("found display size %dx%d", displayWidth, displayHeight);
72    } else {
73        displayWidth = width;
74        displayHeight = height;
75    }
76
77    return new VideoFrame(width, height, displayWidth, displayHeight,
78            rotationAngle, dstBpp, !metaOnly, iccData, iccSize);
79}
80
81//static
82bool findThumbnailInfo(
83        const sp<MetaData> &trackMeta, int32_t *width, int32_t *height,
84        uint32_t *type = NULL, const void **data = NULL, size_t *size = NULL) {
85    uint32_t dummyType;
86    const void *dummyData;
87    size_t dummySize;
88    return trackMeta->findInt32(kKeyThumbnailWidth, width)
89        && trackMeta->findInt32(kKeyThumbnailHeight, height)
90        && trackMeta->findData(kKeyThumbnailHVCC,
91                type ?: &dummyType, data ?: &dummyData, size ?: &dummySize);
92}
93
94//static
95VideoFrame* FrameDecoder::getMetadataOnly(
96        const sp<MetaData> &trackMeta, int colorFormat, bool thumbnail) {
97    OMX_COLOR_FORMATTYPE dstFormat;
98    int32_t dstBpp;
99    if (!getDstColorFormat(
100            (android_pixel_format_t)colorFormat, &dstFormat, &dstBpp)) {
101        return NULL;
102    }
103
104    int32_t width, height;
105    if (thumbnail) {
106        if (!findThumbnailInfo(trackMeta, &width, &height)) {
107            return NULL;
108        }
109    } else {
110        CHECK(trackMeta->findInt32(kKeyWidth, &width));
111        CHECK(trackMeta->findInt32(kKeyHeight, &height));
112    }
113    return allocVideoFrame(trackMeta, width, height, dstBpp, true /*metaOnly*/);
114}
115
116//static
117bool FrameDecoder::getDstColorFormat(
118        android_pixel_format_t colorFormat,
119        OMX_COLOR_FORMATTYPE *dstFormat,
120        int32_t *dstBpp) {
121    switch (colorFormat) {
122        case HAL_PIXEL_FORMAT_RGB_565:
123        {
124            *dstFormat = OMX_COLOR_Format16bitRGB565;
125            *dstBpp = 2;
126            return true;
127        }
128        case HAL_PIXEL_FORMAT_RGBA_8888:
129        {
130            *dstFormat = OMX_COLOR_Format32BitRGBA8888;
131            *dstBpp = 4;
132            return true;
133        }
134        case HAL_PIXEL_FORMAT_BGRA_8888:
135        {
136            *dstFormat = OMX_COLOR_Format32bitBGRA8888;
137            *dstBpp = 4;
138            return true;
139        }
140        default:
141        {
142            ALOGE("Unsupported color format: %d", colorFormat);
143            break;
144        }
145    }
146    return false;
147}
148
149VideoFrame* FrameDecoder::extractFrame(
150        int64_t frameTimeUs, int option, int colorFormat) {
151    if (!getDstColorFormat(
152            (android_pixel_format_t)colorFormat, &mDstFormat, &mDstBpp)) {
153        return NULL;
154    }
155
156    status_t err = extractInternal(frameTimeUs, 1, option);
157    if (err != OK) {
158        return NULL;
159    }
160
161    return mFrames.size() > 0 ? mFrames[0].release() : NULL;
162}
163
164status_t FrameDecoder::extractFrames(
165        int64_t frameTimeUs, size_t numFrames, int option, int colorFormat,
166        std::vector<VideoFrame*>* frames) {
167    if (!getDstColorFormat(
168            (android_pixel_format_t)colorFormat, &mDstFormat, &mDstBpp)) {
169        return ERROR_UNSUPPORTED;
170    }
171
172    status_t err = extractInternal(frameTimeUs, numFrames, option);
173    if (err != OK) {
174        return err;
175    }
176
177    for (size_t i = 0; i < mFrames.size(); i++) {
178        frames->push_back(mFrames[i].release());
179    }
180    return OK;
181}
182
183status_t FrameDecoder::extractInternal(
184        int64_t frameTimeUs, size_t numFrames, int option) {
185
186    MediaSource::ReadOptions options;
187    sp<AMessage> videoFormat = onGetFormatAndSeekOptions(
188            frameTimeUs, numFrames, option, &options);
189    if (videoFormat == NULL) {
190        ALOGE("video format or seek mode not supported");
191        return ERROR_UNSUPPORTED;
192    }
193
194    status_t err;
195    sp<ALooper> looper = new ALooper;
196    looper->start();
197    sp<MediaCodec> decoder = MediaCodec::CreateByComponentName(
198            looper, mComponentName, &err);
199    if (decoder.get() == NULL || err != OK) {
200        ALOGW("Failed to instantiate decoder [%s]", mComponentName.c_str());
201        return (decoder.get() == NULL) ? NO_MEMORY : err;
202    }
203
204    err = decoder->configure(videoFormat, NULL /* surface */, NULL /* crypto */, 0 /* flags */);
205    if (err != OK) {
206        ALOGW("configure returned error %d (%s)", err, asString(err));
207        decoder->release();
208        return err;
209    }
210
211    err = decoder->start();
212    if (err != OK) {
213        ALOGW("start returned error %d (%s)", err, asString(err));
214        decoder->release();
215        return err;
216    }
217
218    err = mSource->start();
219    if (err != OK) {
220        ALOGW("source failed to start: %d (%s)", err, asString(err));
221        decoder->release();
222        return err;
223    }
224
225    Vector<sp<MediaCodecBuffer> > inputBuffers;
226    err = decoder->getInputBuffers(&inputBuffers);
227    if (err != OK) {
228        ALOGW("failed to get input buffers: %d (%s)", err, asString(err));
229        decoder->release();
230        mSource->stop();
231        return err;
232    }
233
234    Vector<sp<MediaCodecBuffer> > outputBuffers;
235    err = decoder->getOutputBuffers(&outputBuffers);
236    if (err != OK) {
237        ALOGW("failed to get output buffers: %d (%s)", err, asString(err));
238        decoder->release();
239        mSource->stop();
240        return err;
241    }
242
243    sp<AMessage> outputFormat = NULL;
244    bool haveMoreInputs = true;
245    size_t index, offset, size;
246    int64_t timeUs;
247    size_t retriesLeft = kRetryCount;
248    bool done = false;
249    bool firstSample = true;
250    do {
251        size_t inputIndex = -1;
252        int64_t ptsUs = 0ll;
253        uint32_t flags = 0;
254        sp<MediaCodecBuffer> codecBuffer = NULL;
255
256        // Queue as many inputs as we possibly can, then block on dequeuing
257        // outputs. After getting each output, come back and queue the inputs
258        // again to keep the decoder busy.
259        while (haveMoreInputs) {
260            err = decoder->dequeueInputBuffer(&inputIndex, 0);
261            if (err != OK) {
262                ALOGV("Timed out waiting for input");
263                if (retriesLeft) {
264                    err = OK;
265                }
266                break;
267            }
268            codecBuffer = inputBuffers[inputIndex];
269
270            MediaBufferBase *mediaBuffer = NULL;
271
272            err = mSource->read(&mediaBuffer, &options);
273            options.clearSeekTo();
274            if (err != OK) {
275                ALOGW("Input Error or EOS");
276                haveMoreInputs = false;
277                if (!firstSample && err == ERROR_END_OF_STREAM) {
278                    err = OK;
279                }
280                break;
281            }
282
283            if (mediaBuffer->range_length() > codecBuffer->capacity()) {
284                ALOGE("buffer size (%zu) too large for codec input size (%zu)",
285                        mediaBuffer->range_length(), codecBuffer->capacity());
286                haveMoreInputs = false;
287                err = BAD_VALUE;
288            } else {
289                codecBuffer->setRange(0, mediaBuffer->range_length());
290
291                CHECK(mediaBuffer->meta_data().findInt64(kKeyTime, &ptsUs));
292                memcpy(codecBuffer->data(),
293                        (const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(),
294                        mediaBuffer->range_length());
295
296                onInputReceived(codecBuffer, mediaBuffer->meta_data(), firstSample, &flags);
297                firstSample = false;
298            }
299
300            mediaBuffer->release();
301
302            if (haveMoreInputs && inputIndex < inputBuffers.size()) {
303                ALOGV("QueueInput: size=%zu ts=%" PRId64 " us flags=%x",
304                        codecBuffer->size(), ptsUs, flags);
305
306                err = decoder->queueInputBuffer(
307                        inputIndex,
308                        codecBuffer->offset(),
309                        codecBuffer->size(),
310                        ptsUs,
311                        flags);
312
313                if (flags & MediaCodec::BUFFER_FLAG_EOS) {
314                    haveMoreInputs = false;
315                }
316            }
317        }
318
319        while (err == OK) {
320            // wait for a decoded buffer
321            err = decoder->dequeueOutputBuffer(
322                    &index,
323                    &offset,
324                    &size,
325                    &timeUs,
326                    &flags,
327                    kBufferTimeOutUs);
328
329            if (err == INFO_FORMAT_CHANGED) {
330                ALOGV("Received format change");
331                err = decoder->getOutputFormat(&outputFormat);
332            } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
333                ALOGV("Output buffers changed");
334                err = decoder->getOutputBuffers(&outputBuffers);
335            } else {
336                if (err == -EAGAIN /* INFO_TRY_AGAIN_LATER */ && --retriesLeft > 0) {
337                    ALOGV("Timed-out waiting for output.. retries left = %zu", retriesLeft);
338                    err = OK;
339                } else if (err == OK) {
340                    // If we're seeking with CLOSEST option and obtained a valid targetTimeUs
341                    // from the extractor, decode to the specified frame. Otherwise we're done.
342                    ALOGV("Received an output buffer, timeUs=%lld", (long long)timeUs);
343                    sp<MediaCodecBuffer> videoFrameBuffer = outputBuffers.itemAt(index);
344
345                    err = onOutputReceived(videoFrameBuffer, outputFormat, timeUs, &done);
346
347                    decoder->releaseOutputBuffer(index);
348                } else {
349                    ALOGW("Received error %d (%s) instead of output", err, asString(err));
350                    done = true;
351                }
352                break;
353            }
354        }
355    } while (err == OK && !done);
356
357    mSource->stop();
358    decoder->release();
359
360    if (err != OK) {
361        ALOGE("failed to get video frame (err %d)", err);
362    }
363
364    return err;
365}
366
367sp<AMessage> VideoFrameDecoder::onGetFormatAndSeekOptions(
368        int64_t frameTimeUs, size_t numFrames, int seekMode, MediaSource::ReadOptions *options) {
369    mSeekMode = static_cast<MediaSource::ReadOptions::SeekMode>(seekMode);
370    if (mSeekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC ||
371            mSeekMode > MediaSource::ReadOptions::SEEK_FRAME_INDEX) {
372        ALOGE("Unknown seek mode: %d", mSeekMode);
373        return NULL;
374    }
375    mNumFrames = numFrames;
376
377    const char *mime;
378    if (!trackMeta()->findCString(kKeyMIMEType, &mime)) {
379        ALOGE("Could not find mime type");
380        return NULL;
381    }
382
383    mIsAvcOrHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)
384            || !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
385
386    if (frameTimeUs < 0) {
387        int64_t thumbNailTime;
388        if (!trackMeta()->findInt64(kKeyThumbnailTime, &thumbNailTime)
389                || thumbNailTime < 0) {
390            thumbNailTime = 0;
391        }
392        options->setSeekTo(thumbNailTime, mSeekMode);
393    } else {
394        options->setSeekTo(frameTimeUs, mSeekMode);
395    }
396
397    sp<AMessage> videoFormat;
398    if (convertMetaDataToMessage(trackMeta(), &videoFormat) != OK) {
399        ALOGE("b/23680780");
400        ALOGW("Failed to convert meta data to message");
401        return NULL;
402    }
403
404    // TODO: Use Flexible color instead
405    videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
406
407    // For the thumbnail extraction case, try to allocate single buffer in both
408    // input and output ports, if seeking to a sync frame. NOTE: This request may
409    // fail if component requires more than that for decoding.
410    bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
411            || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
412    if (!isSeekingClosest) {
413        videoFormat->setInt32("android._num-input-buffers", 1);
414        videoFormat->setInt32("android._num-output-buffers", 1);
415    }
416    return videoFormat;
417}
418
419status_t VideoFrameDecoder::onInputReceived(
420        const sp<MediaCodecBuffer> &codecBuffer,
421        MetaDataBase &sampleMeta, bool firstSample, uint32_t *flags) {
422    bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
423            || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
424
425    if (firstSample && isSeekingClosest) {
426        sampleMeta.findInt64(kKeyTargetTime, &mTargetTimeUs);
427        ALOGV("Seeking closest: targetTimeUs=%lld", (long long)mTargetTimeUs);
428    }
429
430    if (mIsAvcOrHevc && !isSeekingClosest
431            && IsIDR(codecBuffer->data(), codecBuffer->size())) {
432        // Only need to decode one IDR frame, unless we're seeking with CLOSEST
433        // option, in which case we need to actually decode to targetTimeUs.
434        *flags |= MediaCodec::BUFFER_FLAG_EOS;
435    }
436    return OK;
437}
438
439status_t VideoFrameDecoder::onOutputReceived(
440        const sp<MediaCodecBuffer> &videoFrameBuffer,
441        const sp<AMessage> &outputFormat,
442        int64_t timeUs, bool *done) {
443    bool shouldOutput = (mTargetTimeUs < 0ll) || (timeUs >= mTargetTimeUs);
444
445    // If this is not the target frame, skip color convert.
446    if (!shouldOutput) {
447        *done = false;
448        return OK;
449    }
450
451    *done = (++mNumFramesDecoded >= mNumFrames);
452
453    if (outputFormat == NULL) {
454        return ERROR_MALFORMED;
455    }
456
457    int32_t width, height;
458    CHECK(outputFormat->findInt32("width", &width));
459    CHECK(outputFormat->findInt32("height", &height));
460
461    int32_t crop_left, crop_top, crop_right, crop_bottom;
462    if (!outputFormat->findRect("crop", &crop_left, &crop_top, &crop_right, &crop_bottom)) {
463        crop_left = crop_top = 0;
464        crop_right = width - 1;
465        crop_bottom = height - 1;
466    }
467
468    VideoFrame *frame = allocVideoFrame(
469            trackMeta(),
470            (crop_right - crop_left + 1),
471            (crop_bottom - crop_top + 1),
472            dstBpp());
473    addFrame(frame);
474
475    int32_t srcFormat;
476    CHECK(outputFormat->findInt32("color-format", &srcFormat));
477
478    ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
479
480    if (converter.isValid()) {
481        converter.convert(
482                (const uint8_t *)videoFrameBuffer->data(),
483                width, height,
484                crop_left, crop_top, crop_right, crop_bottom,
485                frame->mData,
486                frame->mWidth,
487                frame->mHeight,
488                crop_left, crop_top, crop_right, crop_bottom);
489        return OK;
490    }
491
492    ALOGE("Unable to convert from format 0x%08x to 0x%08x",
493                srcFormat, dstFormat());
494    return ERROR_UNSUPPORTED;
495}
496
497sp<AMessage> ImageDecoder::onGetFormatAndSeekOptions(
498        int64_t frameTimeUs, size_t /*numFrames*/,
499        int /*seekMode*/, MediaSource::ReadOptions *options) {
500    sp<MetaData> overrideMeta;
501    mThumbnail = false;
502    if (frameTimeUs < 0) {
503        uint32_t type;
504        const void *data;
505        size_t size;
506        int32_t thumbWidth, thumbHeight;
507
508        // if we have a stand-alone thumbnail, set up the override meta,
509        // and set seekTo time to -1.
510        if (!findThumbnailInfo(trackMeta(),
511                &thumbWidth, &thumbHeight, &type, &data, &size)) {
512            ALOGE("Thumbnail not available");
513            return NULL;
514        }
515        overrideMeta = new MetaData(*(trackMeta()));
516        overrideMeta->remove(kKeyDisplayWidth);
517        overrideMeta->remove(kKeyDisplayHeight);
518        overrideMeta->setInt32(kKeyWidth, thumbWidth);
519        overrideMeta->setInt32(kKeyHeight, thumbHeight);
520        overrideMeta->setData(kKeyHVCC, type, data, size);
521        options->setSeekTo(-1);
522        mThumbnail = true;
523    } else {
524        options->setSeekTo(frameTimeUs);
525    }
526
527    mGridRows = mGridCols = 1;
528    if (overrideMeta == NULL) {
529        // check if we're dealing with a tiled heif
530        int32_t tileWidth, tileHeight, gridRows, gridCols;
531        if (trackMeta()->findInt32(kKeyTileWidth, &tileWidth) && tileWidth > 0
532         && trackMeta()->findInt32(kKeyTileHeight, &tileHeight) && tileHeight > 0
533         && trackMeta()->findInt32(kKeyGridRows, &gridRows) && gridRows > 0
534         && trackMeta()->findInt32(kKeyGridCols, &gridCols) && gridCols > 0) {
535            int32_t width, height;
536            CHECK(trackMeta()->findInt32(kKeyWidth, &width));
537            CHECK(trackMeta()->findInt32(kKeyHeight, &height));
538
539            if (width <= tileWidth * gridCols && height <= tileHeight * gridRows) {
540                ALOGV("grid: %dx%d, tile size: %dx%d, picture size: %dx%d",
541                        gridCols, gridRows, tileWidth, tileHeight, width, height);
542
543                overrideMeta = new MetaData(*(trackMeta()));
544                overrideMeta->setInt32(kKeyWidth, tileWidth);
545                overrideMeta->setInt32(kKeyHeight, tileHeight);
546                mGridCols = gridCols;
547                mGridRows = gridRows;
548            } else {
549                ALOGE("bad grid: %dx%d, tile size: %dx%d, picture size: %dx%d",
550                        gridCols, gridRows, tileWidth, tileHeight, width, height);
551            }
552        }
553        if (overrideMeta == NULL) {
554            overrideMeta = trackMeta();
555        }
556    }
557
558    sp<AMessage> videoFormat;
559    if (convertMetaDataToMessage(overrideMeta, &videoFormat) != OK) {
560        ALOGE("b/23680780");
561        ALOGW("Failed to convert meta data to message");
562        return NULL;
563    }
564
565    // TODO: Use Flexible color instead
566    videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
567
568    if ((mGridRows == 1) && (mGridCols == 1)) {
569        videoFormat->setInt32("android._num-input-buffers", 1);
570        videoFormat->setInt32("android._num-output-buffers", 1);
571    }
572    return videoFormat;
573}
574
575status_t ImageDecoder::onOutputReceived(
576        const sp<MediaCodecBuffer> &videoFrameBuffer,
577        const sp<AMessage> &outputFormat, int64_t /*timeUs*/, bool *done) {
578    if (outputFormat == NULL) {
579        return ERROR_MALFORMED;
580    }
581
582    int32_t width, height;
583    CHECK(outputFormat->findInt32("width", &width));
584    CHECK(outputFormat->findInt32("height", &height));
585
586    int32_t imageWidth, imageHeight;
587    if (mThumbnail) {
588        CHECK(trackMeta()->findInt32(kKeyThumbnailWidth, &imageWidth));
589        CHECK(trackMeta()->findInt32(kKeyThumbnailHeight, &imageHeight));
590    } else {
591        CHECK(trackMeta()->findInt32(kKeyWidth, &imageWidth));
592        CHECK(trackMeta()->findInt32(kKeyHeight, &imageHeight));
593    }
594
595    if (mFrame == NULL) {
596        mFrame = allocVideoFrame(trackMeta(), imageWidth, imageHeight, dstBpp());
597
598        addFrame(mFrame);
599    }
600
601    int32_t srcFormat;
602    CHECK(outputFormat->findInt32("color-format", &srcFormat));
603
604    ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
605
606    int32_t dstLeft, dstTop, dstRight, dstBottom;
607    int32_t numTiles = mGridRows * mGridCols;
608
609    dstLeft = mTilesDecoded % mGridCols * width;
610    dstTop = mTilesDecoded / mGridCols * height;
611    dstRight = dstLeft + width - 1;
612    dstBottom = dstTop + height - 1;
613
614    int32_t crop_left, crop_top, crop_right, crop_bottom;
615    if (!outputFormat->findRect("crop", &crop_left, &crop_top, &crop_right, &crop_bottom)) {
616        crop_left = crop_top = 0;
617        crop_right = width - 1;
618        crop_bottom = height - 1;
619    }
620
621    // apply crop on bottom-right
622    // TODO: need to move this into the color converter itself.
623    if (dstRight >= imageWidth) {
624        crop_right = imageWidth - dstLeft - 1;
625        dstRight = dstLeft + crop_right;
626    }
627    if (dstBottom >= imageHeight) {
628        crop_bottom = imageHeight - dstTop - 1;
629        dstBottom = dstTop + crop_bottom;
630    }
631
632    *done = (++mTilesDecoded >= numTiles);
633
634    if (converter.isValid()) {
635        converter.convert(
636                (const uint8_t *)videoFrameBuffer->data(),
637                width, height,
638                crop_left, crop_top, crop_right, crop_bottom,
639                mFrame->mData,
640                mFrame->mWidth,
641                mFrame->mHeight,
642                dstLeft, dstTop, dstRight, dstBottom);
643        return OK;
644    }
645
646    ALOGE("Unable to convert from format 0x%08x to 0x%08x",
647                srcFormat, dstFormat());
648    return ERROR_UNSUPPORTED;
649}
650
651}  // namespace android
652