FrameDecoder.cpp revision e44ca3765fca8d82f7dcf884c53acddb6704dd20
1/*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "FrameDecoder"
19
20#include "include/FrameDecoder.h"
21#include <binder/MemoryBase.h>
22#include <binder/MemoryHeapBase.h>
23#include <gui/Surface.h>
24#include <inttypes.h>
25#include <media/ICrypto.h>
26#include <media/IMediaSource.h>
27#include <media/MediaCodecBuffer.h>
28#include <media/stagefright/foundation/avc_utils.h>
29#include <media/stagefright/foundation/ADebug.h>
30#include <media/stagefright/foundation/AMessage.h>
31#include <media/stagefright/ColorConverter.h>
32#include <media/stagefright/MediaBuffer.h>
33#include <media/stagefright/MediaCodec.h>
34#include <media/stagefright/MediaDefs.h>
35#include <media/stagefright/MediaErrors.h>
36#include <media/stagefright/Utils.h>
37#include <private/media/VideoFrame.h>
38#include <utils/Log.h>
39
40namespace android {
41
42static const int64_t kBufferTimeOutUs = 10000ll; // 10 msec
43static const size_t kRetryCount = 50; // must be >0
44
45//static
46sp<IMemory> allocVideoFrame(const sp<MetaData>& trackMeta,
47        int32_t width, int32_t height, int32_t dstBpp, bool metaOnly = false) {
48    int32_t rotationAngle;
49    if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
50        rotationAngle = 0;  // By default, no rotation
51    }
52    uint32_t type;
53    const void *iccData;
54    size_t iccSize;
55    if (!trackMeta->findData(kKeyIccProfile, &type, &iccData, &iccSize)){
56        iccData = NULL;
57        iccSize = 0;
58    }
59
60    int32_t sarWidth, sarHeight;
61    int32_t displayWidth, displayHeight;
62    if (trackMeta->findInt32(kKeySARWidth, &sarWidth)
63            && trackMeta->findInt32(kKeySARHeight, &sarHeight)
64            && sarHeight != 0) {
65        displayWidth = (width * sarWidth) / sarHeight;
66        displayHeight = height;
67    } else if (trackMeta->findInt32(kKeyDisplayWidth, &displayWidth)
68                && trackMeta->findInt32(kKeyDisplayHeight, &displayHeight)
69                && displayWidth > 0 && displayHeight > 0
70                && width > 0 && height > 0) {
71        ALOGV("found display size %dx%d", displayWidth, displayHeight);
72    } else {
73        displayWidth = width;
74        displayHeight = height;
75    }
76
77    VideoFrame frame(width, height, displayWidth, displayHeight,
78            rotationAngle, dstBpp, !metaOnly, iccSize);
79
80    size_t size = frame.getFlattenedSize();
81    sp<MemoryHeapBase> heap = new MemoryHeapBase(size, 0, "MetadataRetrieverClient");
82    if (heap == NULL) {
83        ALOGE("failed to create MemoryDealer");
84        return NULL;
85    }
86    sp<IMemory> frameMem = new MemoryBase(heap, 0, size);
87    if (frameMem == NULL) {
88        ALOGE("not enough memory for VideoFrame size=%zu", size);
89        return NULL;
90    }
91    VideoFrame* frameCopy = static_cast<VideoFrame*>(frameMem->pointer());
92    frameCopy->init(frame, iccData, iccSize);
93
94    return frameMem;
95}
96
97//static
98bool findThumbnailInfo(
99        const sp<MetaData> &trackMeta, int32_t *width, int32_t *height,
100        uint32_t *type = NULL, const void **data = NULL, size_t *size = NULL) {
101    uint32_t dummyType;
102    const void *dummyData;
103    size_t dummySize;
104    return trackMeta->findInt32(kKeyThumbnailWidth, width)
105        && trackMeta->findInt32(kKeyThumbnailHeight, height)
106        && trackMeta->findData(kKeyThumbnailHVCC,
107                type ?: &dummyType, data ?: &dummyData, size ?: &dummySize);
108}
109
110//static
111sp<IMemory> FrameDecoder::getMetadataOnly(
112        const sp<MetaData> &trackMeta, int colorFormat, bool thumbnail) {
113    OMX_COLOR_FORMATTYPE dstFormat;
114    int32_t dstBpp;
115    if (!getDstColorFormat(
116            (android_pixel_format_t)colorFormat, &dstFormat, &dstBpp)) {
117        return NULL;
118    }
119
120    int32_t width, height;
121    if (thumbnail) {
122        if (!findThumbnailInfo(trackMeta, &width, &height)) {
123            return NULL;
124        }
125    } else {
126        CHECK(trackMeta->findInt32(kKeyWidth, &width));
127        CHECK(trackMeta->findInt32(kKeyHeight, &height));
128    }
129    return allocVideoFrame(trackMeta, width, height, dstBpp, true /*metaOnly*/);
130}
131
132//static
133bool FrameDecoder::getDstColorFormat(
134        android_pixel_format_t colorFormat,
135        OMX_COLOR_FORMATTYPE *dstFormat,
136        int32_t *dstBpp) {
137    switch (colorFormat) {
138        case HAL_PIXEL_FORMAT_RGB_565:
139        {
140            *dstFormat = OMX_COLOR_Format16bitRGB565;
141            *dstBpp = 2;
142            return true;
143        }
144        case HAL_PIXEL_FORMAT_RGBA_8888:
145        {
146            *dstFormat = OMX_COLOR_Format32BitRGBA8888;
147            *dstBpp = 4;
148            return true;
149        }
150        case HAL_PIXEL_FORMAT_BGRA_8888:
151        {
152            *dstFormat = OMX_COLOR_Format32bitBGRA8888;
153            *dstBpp = 4;
154            return true;
155        }
156        default:
157        {
158            ALOGE("Unsupported color format: %d", colorFormat);
159            break;
160        }
161    }
162    return false;
163}
164
165sp<IMemory> FrameDecoder::extractFrame(
166        int64_t frameTimeUs, int option, int colorFormat) {
167    if (!getDstColorFormat(
168            (android_pixel_format_t)colorFormat, &mDstFormat, &mDstBpp)) {
169        return NULL;
170    }
171
172    status_t err = extractInternal(frameTimeUs, 1, option);
173    if (err != OK) {
174        return NULL;
175    }
176
177    return mFrames.size() > 0 ? mFrames[0] : NULL;
178}
179
180status_t FrameDecoder::extractFrames(
181        int64_t frameTimeUs, size_t numFrames, int option, int colorFormat,
182        std::vector<sp<IMemory> >* frames) {
183    if (!getDstColorFormat(
184            (android_pixel_format_t)colorFormat, &mDstFormat, &mDstBpp)) {
185        return ERROR_UNSUPPORTED;
186    }
187
188    status_t err = extractInternal(frameTimeUs, numFrames, option);
189    if (err != OK) {
190        return err;
191    }
192
193    for (size_t i = 0; i < mFrames.size(); i++) {
194        frames->push_back(mFrames[i]);
195    }
196    return OK;
197}
198
199status_t FrameDecoder::extractInternal(
200        int64_t frameTimeUs, size_t numFrames, int option) {
201
202    MediaSource::ReadOptions options;
203    sp<AMessage> videoFormat = onGetFormatAndSeekOptions(
204            frameTimeUs, numFrames, option, &options);
205    if (videoFormat == NULL) {
206        ALOGE("video format or seek mode not supported");
207        return ERROR_UNSUPPORTED;
208    }
209
210    status_t err;
211    sp<ALooper> looper = new ALooper;
212    looper->start();
213    sp<MediaCodec> decoder = MediaCodec::CreateByComponentName(
214            looper, mComponentName, &err);
215    if (decoder.get() == NULL || err != OK) {
216        ALOGW("Failed to instantiate decoder [%s]", mComponentName.c_str());
217        return (decoder.get() == NULL) ? NO_MEMORY : err;
218    }
219
220    err = decoder->configure(videoFormat, NULL /* surface */, NULL /* crypto */, 0 /* flags */);
221    if (err != OK) {
222        ALOGW("configure returned error %d (%s)", err, asString(err));
223        decoder->release();
224        return err;
225    }
226
227    err = decoder->start();
228    if (err != OK) {
229        ALOGW("start returned error %d (%s)", err, asString(err));
230        decoder->release();
231        return err;
232    }
233
234    err = mSource->start();
235    if (err != OK) {
236        ALOGW("source failed to start: %d (%s)", err, asString(err));
237        decoder->release();
238        return err;
239    }
240
241    Vector<sp<MediaCodecBuffer> > inputBuffers;
242    err = decoder->getInputBuffers(&inputBuffers);
243    if (err != OK) {
244        ALOGW("failed to get input buffers: %d (%s)", err, asString(err));
245        decoder->release();
246        mSource->stop();
247        return err;
248    }
249
250    Vector<sp<MediaCodecBuffer> > outputBuffers;
251    err = decoder->getOutputBuffers(&outputBuffers);
252    if (err != OK) {
253        ALOGW("failed to get output buffers: %d (%s)", err, asString(err));
254        decoder->release();
255        mSource->stop();
256        return err;
257    }
258
259    sp<AMessage> outputFormat = NULL;
260    bool haveMoreInputs = true;
261    size_t index, offset, size;
262    int64_t timeUs;
263    size_t retriesLeft = kRetryCount;
264    bool done = false;
265    bool firstSample = true;
266    do {
267        size_t inputIndex = -1;
268        int64_t ptsUs = 0ll;
269        uint32_t flags = 0;
270        sp<MediaCodecBuffer> codecBuffer = NULL;
271
272        // Queue as many inputs as we possibly can, then block on dequeuing
273        // outputs. After getting each output, come back and queue the inputs
274        // again to keep the decoder busy.
275        while (haveMoreInputs) {
276            err = decoder->dequeueInputBuffer(&inputIndex, 0);
277            if (err != OK) {
278                ALOGV("Timed out waiting for input");
279                if (retriesLeft) {
280                    err = OK;
281                }
282                break;
283            }
284            codecBuffer = inputBuffers[inputIndex];
285
286            MediaBufferBase *mediaBuffer = NULL;
287
288            err = mSource->read(&mediaBuffer, &options);
289            options.clearSeekTo();
290            if (err != OK) {
291                ALOGW("Input Error or EOS");
292                haveMoreInputs = false;
293                if (!firstSample && err == ERROR_END_OF_STREAM) {
294                    err = OK;
295                }
296                break;
297            }
298
299            if (mediaBuffer->range_length() > codecBuffer->capacity()) {
300                ALOGE("buffer size (%zu) too large for codec input size (%zu)",
301                        mediaBuffer->range_length(), codecBuffer->capacity());
302                haveMoreInputs = false;
303                err = BAD_VALUE;
304            } else {
305                codecBuffer->setRange(0, mediaBuffer->range_length());
306
307                CHECK(mediaBuffer->meta_data().findInt64(kKeyTime, &ptsUs));
308                memcpy(codecBuffer->data(),
309                        (const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(),
310                        mediaBuffer->range_length());
311
312                onInputReceived(codecBuffer, mediaBuffer->meta_data(), firstSample, &flags);
313                firstSample = false;
314            }
315
316            mediaBuffer->release();
317
318            if (haveMoreInputs && inputIndex < inputBuffers.size()) {
319                ALOGV("QueueInput: size=%zu ts=%" PRId64 " us flags=%x",
320                        codecBuffer->size(), ptsUs, flags);
321
322                err = decoder->queueInputBuffer(
323                        inputIndex,
324                        codecBuffer->offset(),
325                        codecBuffer->size(),
326                        ptsUs,
327                        flags);
328
329                if (flags & MediaCodec::BUFFER_FLAG_EOS) {
330                    haveMoreInputs = false;
331                }
332            }
333        }
334
335        while (err == OK) {
336            // wait for a decoded buffer
337            err = decoder->dequeueOutputBuffer(
338                    &index,
339                    &offset,
340                    &size,
341                    &timeUs,
342                    &flags,
343                    kBufferTimeOutUs);
344
345            if (err == INFO_FORMAT_CHANGED) {
346                ALOGV("Received format change");
347                err = decoder->getOutputFormat(&outputFormat);
348            } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
349                ALOGV("Output buffers changed");
350                err = decoder->getOutputBuffers(&outputBuffers);
351            } else {
352                if (err == -EAGAIN /* INFO_TRY_AGAIN_LATER */ && --retriesLeft > 0) {
353                    ALOGV("Timed-out waiting for output.. retries left = %zu", retriesLeft);
354                    err = OK;
355                } else if (err == OK) {
356                    // If we're seeking with CLOSEST option and obtained a valid targetTimeUs
357                    // from the extractor, decode to the specified frame. Otherwise we're done.
358                    ALOGV("Received an output buffer, timeUs=%lld", (long long)timeUs);
359                    sp<MediaCodecBuffer> videoFrameBuffer = outputBuffers.itemAt(index);
360
361                    err = onOutputReceived(videoFrameBuffer, outputFormat, timeUs, &done);
362
363                    decoder->releaseOutputBuffer(index);
364                } else {
365                    ALOGW("Received error %d (%s) instead of output", err, asString(err));
366                    done = true;
367                }
368                break;
369            }
370        }
371    } while (err == OK && !done);
372
373    mSource->stop();
374    decoder->release();
375
376    if (err != OK) {
377        ALOGE("failed to get video frame (err %d)", err);
378    }
379
380    return err;
381}
382
383sp<AMessage> VideoFrameDecoder::onGetFormatAndSeekOptions(
384        int64_t frameTimeUs, size_t numFrames, int seekMode, MediaSource::ReadOptions *options) {
385    mSeekMode = static_cast<MediaSource::ReadOptions::SeekMode>(seekMode);
386    if (mSeekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC ||
387            mSeekMode > MediaSource::ReadOptions::SEEK_FRAME_INDEX) {
388        ALOGE("Unknown seek mode: %d", mSeekMode);
389        return NULL;
390    }
391    mNumFrames = numFrames;
392
393    const char *mime;
394    if (!trackMeta()->findCString(kKeyMIMEType, &mime)) {
395        ALOGE("Could not find mime type");
396        return NULL;
397    }
398
399    mIsAvcOrHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)
400            || !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
401
402    if (frameTimeUs < 0) {
403        int64_t thumbNailTime;
404        if (!trackMeta()->findInt64(kKeyThumbnailTime, &thumbNailTime)
405                || thumbNailTime < 0) {
406            thumbNailTime = 0;
407        }
408        options->setSeekTo(thumbNailTime, mSeekMode);
409    } else {
410        options->setSeekTo(frameTimeUs, mSeekMode);
411    }
412
413    sp<AMessage> videoFormat;
414    if (convertMetaDataToMessage(trackMeta(), &videoFormat) != OK) {
415        ALOGE("b/23680780");
416        ALOGW("Failed to convert meta data to message");
417        return NULL;
418    }
419
420    // TODO: Use Flexible color instead
421    videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
422
423    // For the thumbnail extraction case, try to allocate single buffer in both
424    // input and output ports, if seeking to a sync frame. NOTE: This request may
425    // fail if component requires more than that for decoding.
426    bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
427            || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
428    if (!isSeekingClosest) {
429        videoFormat->setInt32("android._num-input-buffers", 1);
430        videoFormat->setInt32("android._num-output-buffers", 1);
431    }
432    return videoFormat;
433}
434
435status_t VideoFrameDecoder::onInputReceived(
436        const sp<MediaCodecBuffer> &codecBuffer,
437        MetaDataBase &sampleMeta, bool firstSample, uint32_t *flags) {
438    bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
439            || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
440
441    if (firstSample && isSeekingClosest) {
442        sampleMeta.findInt64(kKeyTargetTime, &mTargetTimeUs);
443        ALOGV("Seeking closest: targetTimeUs=%lld", (long long)mTargetTimeUs);
444    }
445
446    if (mIsAvcOrHevc && !isSeekingClosest
447            && IsIDR(codecBuffer->data(), codecBuffer->size())) {
448        // Only need to decode one IDR frame, unless we're seeking with CLOSEST
449        // option, in which case we need to actually decode to targetTimeUs.
450        *flags |= MediaCodec::BUFFER_FLAG_EOS;
451    }
452    return OK;
453}
454
455status_t VideoFrameDecoder::onOutputReceived(
456        const sp<MediaCodecBuffer> &videoFrameBuffer,
457        const sp<AMessage> &outputFormat,
458        int64_t timeUs, bool *done) {
459    bool shouldOutput = (mTargetTimeUs < 0ll) || (timeUs >= mTargetTimeUs);
460
461    // If this is not the target frame, skip color convert.
462    if (!shouldOutput) {
463        *done = false;
464        return OK;
465    }
466
467    *done = (++mNumFramesDecoded >= mNumFrames);
468
469    if (outputFormat == NULL) {
470        return ERROR_MALFORMED;
471    }
472
473    int32_t width, height;
474    CHECK(outputFormat->findInt32("width", &width));
475    CHECK(outputFormat->findInt32("height", &height));
476
477    int32_t crop_left, crop_top, crop_right, crop_bottom;
478    if (!outputFormat->findRect("crop", &crop_left, &crop_top, &crop_right, &crop_bottom)) {
479        crop_left = crop_top = 0;
480        crop_right = width - 1;
481        crop_bottom = height - 1;
482    }
483
484    sp<IMemory> frameMem = allocVideoFrame(
485            trackMeta(),
486            (crop_right - crop_left + 1),
487            (crop_bottom - crop_top + 1),
488            dstBpp());
489    addFrame(frameMem);
490    VideoFrame* frame = static_cast<VideoFrame*>(frameMem->pointer());
491
492    int32_t srcFormat;
493    CHECK(outputFormat->findInt32("color-format", &srcFormat));
494
495    ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
496
497    if (converter.isValid()) {
498        converter.convert(
499                (const uint8_t *)videoFrameBuffer->data(),
500                width, height,
501                crop_left, crop_top, crop_right, crop_bottom,
502                frame->getFlattenedData(),
503                frame->mWidth,
504                frame->mHeight,
505                crop_left, crop_top, crop_right, crop_bottom);
506        return OK;
507    }
508
509    ALOGE("Unable to convert from format 0x%08x to 0x%08x",
510                srcFormat, dstFormat());
511    return ERROR_UNSUPPORTED;
512}
513
514sp<AMessage> ImageDecoder::onGetFormatAndSeekOptions(
515        int64_t frameTimeUs, size_t /*numFrames*/,
516        int /*seekMode*/, MediaSource::ReadOptions *options) {
517    sp<MetaData> overrideMeta;
518    mThumbnail = false;
519    if (frameTimeUs < 0) {
520        uint32_t type;
521        const void *data;
522        size_t size;
523        int32_t thumbWidth, thumbHeight;
524
525        // if we have a stand-alone thumbnail, set up the override meta,
526        // and set seekTo time to -1.
527        if (!findThumbnailInfo(trackMeta(),
528                &thumbWidth, &thumbHeight, &type, &data, &size)) {
529            ALOGE("Thumbnail not available");
530            return NULL;
531        }
532        overrideMeta = new MetaData(*(trackMeta()));
533        overrideMeta->remove(kKeyDisplayWidth);
534        overrideMeta->remove(kKeyDisplayHeight);
535        overrideMeta->setInt32(kKeyWidth, thumbWidth);
536        overrideMeta->setInt32(kKeyHeight, thumbHeight);
537        overrideMeta->setData(kKeyHVCC, type, data, size);
538        options->setSeekTo(-1);
539        mThumbnail = true;
540    } else {
541        options->setSeekTo(frameTimeUs);
542    }
543
544    mGridRows = mGridCols = 1;
545    if (overrideMeta == NULL) {
546        // check if we're dealing with a tiled heif
547        int32_t tileWidth, tileHeight, gridRows, gridCols;
548        if (trackMeta()->findInt32(kKeyTileWidth, &tileWidth) && tileWidth > 0
549         && trackMeta()->findInt32(kKeyTileHeight, &tileHeight) && tileHeight > 0
550         && trackMeta()->findInt32(kKeyGridRows, &gridRows) && gridRows > 0
551         && trackMeta()->findInt32(kKeyGridCols, &gridCols) && gridCols > 0) {
552            int32_t width, height;
553            CHECK(trackMeta()->findInt32(kKeyWidth, &width));
554            CHECK(trackMeta()->findInt32(kKeyHeight, &height));
555
556            if (width <= tileWidth * gridCols && height <= tileHeight * gridRows) {
557                ALOGV("grid: %dx%d, tile size: %dx%d, picture size: %dx%d",
558                        gridCols, gridRows, tileWidth, tileHeight, width, height);
559
560                overrideMeta = new MetaData(*(trackMeta()));
561                overrideMeta->setInt32(kKeyWidth, tileWidth);
562                overrideMeta->setInt32(kKeyHeight, tileHeight);
563                mGridCols = gridCols;
564                mGridRows = gridRows;
565            } else {
566                ALOGE("bad grid: %dx%d, tile size: %dx%d, picture size: %dx%d",
567                        gridCols, gridRows, tileWidth, tileHeight, width, height);
568            }
569        }
570        if (overrideMeta == NULL) {
571            overrideMeta = trackMeta();
572        }
573    }
574
575    sp<AMessage> videoFormat;
576    if (convertMetaDataToMessage(overrideMeta, &videoFormat) != OK) {
577        ALOGE("b/23680780");
578        ALOGW("Failed to convert meta data to message");
579        return NULL;
580    }
581
582    // TODO: Use Flexible color instead
583    videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
584
585    if ((mGridRows == 1) && (mGridCols == 1)) {
586        videoFormat->setInt32("android._num-input-buffers", 1);
587        videoFormat->setInt32("android._num-output-buffers", 1);
588    }
589    return videoFormat;
590}
591
592status_t ImageDecoder::onOutputReceived(
593        const sp<MediaCodecBuffer> &videoFrameBuffer,
594        const sp<AMessage> &outputFormat, int64_t /*timeUs*/, bool *done) {
595    if (outputFormat == NULL) {
596        return ERROR_MALFORMED;
597    }
598
599    int32_t width, height;
600    CHECK(outputFormat->findInt32("width", &width));
601    CHECK(outputFormat->findInt32("height", &height));
602
603    int32_t imageWidth, imageHeight;
604    if (mThumbnail) {
605        CHECK(trackMeta()->findInt32(kKeyThumbnailWidth, &imageWidth));
606        CHECK(trackMeta()->findInt32(kKeyThumbnailHeight, &imageHeight));
607    } else {
608        CHECK(trackMeta()->findInt32(kKeyWidth, &imageWidth));
609        CHECK(trackMeta()->findInt32(kKeyHeight, &imageHeight));
610    }
611
612    if (mFrame == NULL) {
613        sp<IMemory> frameMem = allocVideoFrame(trackMeta(), imageWidth, imageHeight, dstBpp());
614        mFrame = static_cast<VideoFrame*>(frameMem->pointer());
615
616        addFrame(frameMem);
617    }
618
619    int32_t srcFormat;
620    CHECK(outputFormat->findInt32("color-format", &srcFormat));
621
622    ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
623
624    int32_t dstLeft, dstTop, dstRight, dstBottom;
625    int32_t numTiles = mGridRows * mGridCols;
626
627    dstLeft = mTilesDecoded % mGridCols * width;
628    dstTop = mTilesDecoded / mGridCols * height;
629    dstRight = dstLeft + width - 1;
630    dstBottom = dstTop + height - 1;
631
632    int32_t crop_left, crop_top, crop_right, crop_bottom;
633    if (!outputFormat->findRect("crop", &crop_left, &crop_top, &crop_right, &crop_bottom)) {
634        crop_left = crop_top = 0;
635        crop_right = width - 1;
636        crop_bottom = height - 1;
637    }
638
639    // apply crop on bottom-right
640    // TODO: need to move this into the color converter itself.
641    if (dstRight >= imageWidth) {
642        crop_right = imageWidth - dstLeft - 1;
643        dstRight = dstLeft + crop_right;
644    }
645    if (dstBottom >= imageHeight) {
646        crop_bottom = imageHeight - dstTop - 1;
647        dstBottom = dstTop + crop_bottom;
648    }
649
650    *done = (++mTilesDecoded >= numTiles);
651
652    if (converter.isValid()) {
653        converter.convert(
654                (const uint8_t *)videoFrameBuffer->data(),
655                width, height,
656                crop_left, crop_top, crop_right, crop_bottom,
657                mFrame->getFlattenedData(),
658                mFrame->mWidth,
659                mFrame->mHeight,
660                dstLeft, dstTop, dstRight, dstBottom);
661        return OK;
662    }
663
664    ALOGE("Unable to convert from format 0x%08x to 0x%08x",
665                srcFormat, dstFormat());
666    return ERROR_UNSUPPORTED;
667}
668
669}  // namespace android
670