MediaCodecSource.cpp revision 16fcc47c113e63efa69f5af5decf1ad46ec653a9
1/*
2 * Copyright 2014, The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *     http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "MediaCodecSource"
19#define DEBUG_DRIFT_TIME 0
20
21#include <inttypes.h>
22
23#include <gui/IGraphicBufferConsumer.h>
24#include <gui/IGraphicBufferProducer.h>
25#include <gui/Surface.h>
26#include <media/ICrypto.h>
27#include <media/stagefright/foundation/ABuffer.h>
28#include <media/stagefright/foundation/ADebug.h>
29#include <media/stagefright/foundation/ALooper.h>
30#include <media/stagefright/foundation/AMessage.h>
31#include <media/stagefright/MediaBuffer.h>
32#include <media/stagefright/MediaCodec.h>
33#include <media/stagefright/MediaCodecList.h>
34#include <media/stagefright/MediaCodecSource.h>
35#include <media/stagefright/MediaErrors.h>
36#include <media/stagefright/MediaSource.h>
37#include <media/stagefright/MetaData.h>
38#include <media/stagefright/PersistentSurface.h>
39#include <media/stagefright/Utils.h>
40
41namespace android {
42
43const int kDefaultSwVideoEncoderFormat = HAL_PIXEL_FORMAT_YCbCr_420_888;
44const int kDefaultSwVideoEncoderDataSpace = HAL_DATASPACE_BT709;
45
46struct MediaCodecSource::Puller : public AHandler {
47    Puller(const sp<MediaSource> &source);
48
49    status_t start(const sp<MetaData> &meta, const sp<AMessage> &notify);
50    void stop();
51
52    void pause();
53    void resume();
54
55protected:
56    virtual void onMessageReceived(const sp<AMessage> &msg);
57    virtual ~Puller();
58
59private:
60    enum {
61        kWhatStart = 'msta',
62        kWhatStop,
63        kWhatPull,
64        kWhatPause,
65        kWhatResume,
66    };
67
68    sp<MediaSource> mSource;
69    sp<AMessage> mNotify;
70    sp<ALooper> mLooper;
71    int32_t mPullGeneration;
72    bool mIsAudio;
73    bool mPaused;
74    bool mReachedEOS;
75
76    status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg);
77    void schedulePull();
78    void handleEOS();
79
80    DISALLOW_EVIL_CONSTRUCTORS(Puller);
81};
82
83MediaCodecSource::Puller::Puller(const sp<MediaSource> &source)
84    : mSource(source),
85      mLooper(new ALooper()),
86      mPullGeneration(0),
87      mIsAudio(false),
88      mPaused(false),
89      mReachedEOS(false) {
90    sp<MetaData> meta = source->getFormat();
91    const char *mime;
92    CHECK(meta->findCString(kKeyMIMEType, &mime));
93
94    mIsAudio = !strncasecmp(mime, "audio/", 6);
95
96    mLooper->setName("pull_looper");
97}
98
99MediaCodecSource::Puller::~Puller() {
100    mLooper->unregisterHandler(id());
101    mLooper->stop();
102}
103
104status_t MediaCodecSource::Puller::postSynchronouslyAndReturnError(
105        const sp<AMessage> &msg) {
106    sp<AMessage> response;
107    status_t err = msg->postAndAwaitResponse(&response);
108
109    if (err != OK) {
110        return err;
111    }
112
113    if (!response->findInt32("err", &err)) {
114        err = OK;
115    }
116
117    return err;
118}
119
120status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta,
121        const sp<AMessage> &notify) {
122    ALOGV("puller (%s) start", mIsAudio ? "audio" : "video");
123    mLooper->start(
124            false /* runOnCallingThread */,
125            false /* canCallJava */,
126            PRIORITY_AUDIO);
127    mLooper->registerHandler(this);
128    mNotify = notify;
129
130    sp<AMessage> msg = new AMessage(kWhatStart, this);
131    msg->setObject("meta", meta);
132    return postSynchronouslyAndReturnError(msg);
133}
134
135void MediaCodecSource::Puller::stop() {
136    // Stop source from caller's thread instead of puller's looper.
137    // mSource->stop() is thread-safe, doing it outside the puller's
138    // looper allows us to at least stop if source gets stuck.
139    // If source gets stuck in read(), the looper would never
140    // be able to process the stop(), which could lead to ANR.
141
142    ALOGV("source (%s) stopping", mIsAudio ? "audio" : "video");
143    mSource->stop();
144    ALOGV("source (%s) stopped", mIsAudio ? "audio" : "video");
145
146    (new AMessage(kWhatStop, this))->post();
147}
148
149void MediaCodecSource::Puller::pause() {
150    (new AMessage(kWhatPause, this))->post();
151}
152
153void MediaCodecSource::Puller::resume() {
154    (new AMessage(kWhatResume, this))->post();
155}
156
157void MediaCodecSource::Puller::schedulePull() {
158    sp<AMessage> msg = new AMessage(kWhatPull, this);
159    msg->setInt32("generation", mPullGeneration);
160    msg->post();
161}
162
163void MediaCodecSource::Puller::handleEOS() {
164    if (!mReachedEOS) {
165        ALOGV("puller (%s) posting EOS", mIsAudio ? "audio" : "video");
166        mReachedEOS = true;
167        sp<AMessage> notify = mNotify->dup();
168        notify->setPointer("accessUnit", NULL);
169        notify->post();
170    }
171}
172
173void MediaCodecSource::Puller::onMessageReceived(const sp<AMessage> &msg) {
174    switch (msg->what()) {
175        case kWhatStart:
176        {
177            sp<RefBase> obj;
178            CHECK(msg->findObject("meta", &obj));
179
180            mReachedEOS = false;
181
182            status_t err = mSource->start(static_cast<MetaData *>(obj.get()));
183
184            if (err == OK) {
185                schedulePull();
186            }
187
188            sp<AMessage> response = new AMessage;
189            response->setInt32("err", err);
190
191            sp<AReplyToken> replyID;
192            CHECK(msg->senderAwaitsResponse(&replyID));
193            response->postReply(replyID);
194            break;
195        }
196
197        case kWhatStop:
198        {
199            ++mPullGeneration;
200
201            handleEOS();
202            break;
203        }
204
205        case kWhatPull:
206        {
207            int32_t generation;
208            CHECK(msg->findInt32("generation", &generation));
209
210            if (generation != mPullGeneration) {
211                break;
212            }
213
214            MediaBuffer *mbuf;
215            status_t err = mSource->read(&mbuf);
216
217            if (mPaused) {
218                if (err == OK) {
219                    mbuf->release();
220                    mbuf = NULL;
221                }
222
223                msg->post();
224                break;
225            }
226
227            if (err != OK) {
228                if (err == ERROR_END_OF_STREAM) {
229                    ALOGV("stream ended, mbuf %p", mbuf);
230                } else {
231                    ALOGE("error %d reading stream.", err);
232                }
233                handleEOS();
234            } else {
235                sp<AMessage> notify = mNotify->dup();
236
237                notify->setPointer("accessUnit", mbuf);
238                notify->post();
239
240                msg->post();
241            }
242            break;
243        }
244
245        case kWhatPause:
246        {
247            mPaused = true;
248            break;
249        }
250
251        case kWhatResume:
252        {
253            mPaused = false;
254            break;
255        }
256
257        default:
258            TRESPASS();
259    }
260}
261
262// static
263sp<MediaCodecSource> MediaCodecSource::Create(
264        const sp<ALooper> &looper,
265        const sp<AMessage> &format,
266        const sp<MediaSource> &source,
267        const sp<IGraphicBufferConsumer> &consumer,
268        uint32_t flags) {
269    sp<MediaCodecSource> mediaSource =
270            new MediaCodecSource(looper, format, source, consumer, flags);
271
272    if (mediaSource->init() == OK) {
273        return mediaSource;
274    }
275    return NULL;
276}
277
278void MediaCodecSource::setInputBufferTimeOffset(int64_t timeOffsetUs) {
279    sp<AMessage> msg = new AMessage(kWhatSetInputBufferTimeOffset, mReflector);
280    msg->setInt64("time-offset-us", timeOffsetUs);
281    postSynchronouslyAndReturnError(msg);
282}
283
284status_t MediaCodecSource::start(MetaData* params) {
285    sp<AMessage> msg = new AMessage(kWhatStart, mReflector);
286    msg->setObject("meta", params);
287    return postSynchronouslyAndReturnError(msg);
288}
289
290status_t MediaCodecSource::stop() {
291    sp<AMessage> msg = new AMessage(kWhatStop, mReflector);
292    status_t err = postSynchronouslyAndReturnError(msg);
293
294    // mPuller->stop() needs to be done outside MediaCodecSource's looper,
295    // as it contains a synchronous call to stop the underlying MediaSource,
296    // which often waits for all outstanding MediaBuffers to return, but
297    // MediaBuffers are only returned when MediaCodecSource looper gets
298    // to process them.
299
300    if (mPuller != NULL) {
301        ALOGI("puller (%s) stopping", mIsVideo ? "video" : "audio");
302        mPuller->stop();
303        ALOGI("puller (%s) stopped", mIsVideo ? "video" : "audio");
304    }
305
306    return err;
307}
308
309status_t MediaCodecSource::pause() {
310    (new AMessage(kWhatPause, mReflector))->post();
311    return OK;
312}
313
314sp<IGraphicBufferProducer> MediaCodecSource::getGraphicBufferProducer() {
315    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
316    return mGraphicBufferProducer;
317}
318
319status_t MediaCodecSource::read(
320        MediaBuffer** buffer, const ReadOptions* /* options */) {
321    Mutex::Autolock autolock(mOutputBufferLock);
322
323    *buffer = NULL;
324    while (mOutputBufferQueue.size() == 0 && !mEncoderReachedEOS) {
325        mOutputBufferCond.wait(mOutputBufferLock);
326    }
327    if (!mEncoderReachedEOS) {
328        *buffer = *mOutputBufferQueue.begin();
329        mOutputBufferQueue.erase(mOutputBufferQueue.begin());
330        return OK;
331    }
332    return mErrorCode;
333}
334
335void MediaCodecSource::signalBufferReturned(MediaBuffer *buffer) {
336    buffer->setObserver(0);
337    buffer->release();
338}
339
340MediaCodecSource::MediaCodecSource(
341        const sp<ALooper> &looper,
342        const sp<AMessage> &outputFormat,
343        const sp<MediaSource> &source,
344        const sp<IGraphicBufferConsumer> &consumer,
345        uint32_t flags)
346    : mLooper(looper),
347      mOutputFormat(outputFormat),
348      mMeta(new MetaData),
349      mFlags(flags),
350      mIsVideo(false),
351      mStarted(false),
352      mStopping(false),
353      mDoMoreWorkPending(false),
354      mSetEncoderFormat(false),
355      mEncoderFormat(0),
356      mEncoderDataSpace(0),
357      mGraphicBufferConsumer(consumer),
358      mInputBufferTimeOffsetUs(0),
359      mFirstSampleTimeUs(-1ll),
360      mEncoderReachedEOS(false),
361      mErrorCode(OK) {
362    CHECK(mLooper != NULL);
363
364    AString mime;
365    CHECK(mOutputFormat->findString("mime", &mime));
366
367    if (!strncasecmp("video/", mime.c_str(), 6)) {
368        mIsVideo = true;
369    }
370
371    if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
372        mPuller = new Puller(source);
373    }
374}
375
376MediaCodecSource::~MediaCodecSource() {
377    releaseEncoder();
378
379    mCodecLooper->stop();
380    mLooper->unregisterHandler(mReflector->id());
381}
382
383status_t MediaCodecSource::init() {
384    status_t err = initEncoder();
385
386    if (err != OK) {
387        releaseEncoder();
388    }
389
390    return err;
391}
392
393status_t MediaCodecSource::initEncoder() {
394    mReflector = new AHandlerReflector<MediaCodecSource>(this);
395    mLooper->registerHandler(mReflector);
396
397    mCodecLooper = new ALooper;
398    mCodecLooper->setName("codec_looper");
399    mCodecLooper->start();
400
401    if (mFlags & FLAG_USE_METADATA_INPUT) {
402        mOutputFormat->setInt32("store-metadata-in-buffers", 1);
403    }
404
405    if (mFlags & FLAG_USE_SURFACE_INPUT) {
406        mOutputFormat->setInt32("create-input-buffers-suspended", 1);
407    }
408
409    AString outputMIME;
410    CHECK(mOutputFormat->findString("mime", &outputMIME));
411
412    Vector<AString> matchingCodecs;
413    MediaCodecList::findMatchingCodecs(
414            outputMIME.c_str(), true /* encoder */,
415            ((mFlags & FLAG_PREFER_SOFTWARE_CODEC) ? MediaCodecList::kPreferSoftwareCodecs : 0),
416            &matchingCodecs);
417
418    status_t err = NO_INIT;
419    for (size_t ix = 0; ix < matchingCodecs.size(); ++ix) {
420        mEncoder = MediaCodec::CreateByComponentName(
421                mCodecLooper, matchingCodecs[ix]);
422
423        if (mEncoder == NULL) {
424            continue;
425        }
426
427        ALOGV("output format is '%s'", mOutputFormat->debugString(0).c_str());
428
429        mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, mReflector);
430        mEncoder->setCallback(mEncoderActivityNotify);
431
432        err = mEncoder->configure(
433                    mOutputFormat,
434                    NULL /* nativeWindow */,
435                    NULL /* crypto */,
436                    MediaCodec::CONFIGURE_FLAG_ENCODE);
437
438        if (err == OK) {
439            break;
440        }
441        mEncoder->release();
442        mEncoder = NULL;
443    }
444
445    if (err != OK) {
446        return err;
447    }
448
449    mEncoder->getOutputFormat(&mOutputFormat);
450    convertMessageToMetaData(mOutputFormat, mMeta);
451
452    if (mFlags & FLAG_USE_SURFACE_INPUT) {
453        CHECK(mIsVideo);
454
455        if (mGraphicBufferConsumer != NULL) {
456            // When using persistent surface, we are only interested in the
457            // consumer, but have to use PersistentSurface as a wrapper to
458            // pass consumer over messages (similar to BufferProducerWrapper)
459            err = mEncoder->setInputSurface(
460                    new PersistentSurface(NULL, mGraphicBufferConsumer));
461        } else {
462            err = mEncoder->createInputSurface(&mGraphicBufferProducer);
463        }
464
465        if (err != OK) {
466            return err;
467        }
468    }
469
470    sp<AMessage> inputFormat;
471    int32_t usingSwReadOften;
472    mSetEncoderFormat = false;
473    if (mEncoder->getInputFormat(&inputFormat) == OK
474            && inputFormat->findInt32("using-sw-read-often", &usingSwReadOften)
475            && usingSwReadOften) {
476        // this is a SW encoder; signal source to allocate SW readable buffers
477        mSetEncoderFormat = true;
478        mEncoderFormat = kDefaultSwVideoEncoderFormat;
479        mEncoderDataSpace = kDefaultSwVideoEncoderDataSpace;
480    }
481
482    err = mEncoder->start();
483
484    if (err != OK) {
485        return err;
486    }
487
488    mEncoderReachedEOS = false;
489    mErrorCode = OK;
490
491    return OK;
492}
493
494void MediaCodecSource::releaseEncoder() {
495    if (mEncoder == NULL) {
496        return;
497    }
498
499    mEncoder->release();
500    mEncoder.clear();
501
502    while (!mInputBufferQueue.empty()) {
503        MediaBuffer *mbuf = *mInputBufferQueue.begin();
504        mInputBufferQueue.erase(mInputBufferQueue.begin());
505        if (mbuf != NULL) {
506            mbuf->release();
507        }
508    }
509}
510
511status_t MediaCodecSource::postSynchronouslyAndReturnError(
512        const sp<AMessage> &msg) {
513    sp<AMessage> response;
514    status_t err = msg->postAndAwaitResponse(&response);
515
516    if (err != OK) {
517        return err;
518    }
519
520    if (!response->findInt32("err", &err)) {
521        err = OK;
522    }
523
524    return err;
525}
526
527void MediaCodecSource::signalEOS(status_t err) {
528    if (!mEncoderReachedEOS) {
529        ALOGV("encoder (%s) reached EOS", mIsVideo ? "video" : "audio");
530        {
531            Mutex::Autolock autoLock(mOutputBufferLock);
532            // release all unread media buffers
533            for (List<MediaBuffer*>::iterator it = mOutputBufferQueue.begin();
534                    it != mOutputBufferQueue.end(); it++) {
535                (*it)->release();
536            }
537            mOutputBufferQueue.clear();
538            mEncoderReachedEOS = true;
539            mErrorCode = err;
540            mOutputBufferCond.signal();
541        }
542
543        releaseEncoder();
544    }
545    if (mStopping && mEncoderReachedEOS) {
546        ALOGI("encoder (%s) stopped", mIsVideo ? "video" : "audio");
547        // posting reply to everyone that's waiting
548        List<sp<AReplyToken>>::iterator it;
549        for (it = mStopReplyIDQueue.begin();
550                it != mStopReplyIDQueue.end(); it++) {
551            (new AMessage)->postReply(*it);
552        }
553        mStopReplyIDQueue.clear();
554        mStopping = false;
555    }
556}
557
558void MediaCodecSource::suspend() {
559    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
560    if (mEncoder != NULL) {
561        sp<AMessage> params = new AMessage;
562        params->setInt32("drop-input-frames", true);
563        mEncoder->setParameters(params);
564    }
565}
566
567void MediaCodecSource::resume(int64_t skipFramesBeforeUs) {
568    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
569    if (mEncoder != NULL) {
570        sp<AMessage> params = new AMessage;
571        params->setInt32("drop-input-frames", false);
572        if (skipFramesBeforeUs > 0) {
573            params->setInt64("skip-frames-before", skipFramesBeforeUs);
574        }
575        mEncoder->setParameters(params);
576    }
577}
578
579status_t MediaCodecSource::feedEncoderInputBuffers() {
580    while (!mInputBufferQueue.empty()
581            && !mAvailEncoderInputIndices.empty()) {
582        MediaBuffer* mbuf = *mInputBufferQueue.begin();
583        mInputBufferQueue.erase(mInputBufferQueue.begin());
584
585        size_t bufferIndex = *mAvailEncoderInputIndices.begin();
586        mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
587
588        int64_t timeUs = 0ll;
589        uint32_t flags = 0;
590        size_t size = 0;
591
592        if (mbuf != NULL) {
593            CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
594            timeUs += mInputBufferTimeOffsetUs;
595
596            // push decoding time for video, or drift time for audio
597            if (mIsVideo) {
598                mDecodingTimeQueue.push_back(timeUs);
599            } else {
600#if DEBUG_DRIFT_TIME
601                if (mFirstSampleTimeUs < 0ll) {
602                    mFirstSampleTimeUs = timeUs;
603                }
604
605                int64_t driftTimeUs = 0;
606                if (mbuf->meta_data()->findInt64(kKeyDriftTime, &driftTimeUs)
607                        && driftTimeUs) {
608                    driftTimeUs = timeUs - mFirstSampleTimeUs - driftTimeUs;
609                }
610                mDriftTimeQueue.push_back(driftTimeUs);
611#endif // DEBUG_DRIFT_TIME
612            }
613
614            sp<ABuffer> inbuf;
615            status_t err = mEncoder->getInputBuffer(bufferIndex, &inbuf);
616            if (err != OK || inbuf == NULL) {
617                mbuf->release();
618                signalEOS();
619                break;
620            }
621
622            size = mbuf->size();
623
624            memcpy(inbuf->data(), mbuf->data(), size);
625
626            if (mIsVideo) {
627                // video encoder will release MediaBuffer when done
628                // with underlying data.
629                inbuf->setMediaBufferBase(mbuf);
630            } else {
631                mbuf->release();
632            }
633        } else {
634            flags = MediaCodec::BUFFER_FLAG_EOS;
635        }
636
637        status_t err = mEncoder->queueInputBuffer(
638                bufferIndex, 0, size, timeUs, flags);
639
640        if (err != OK) {
641            return err;
642        }
643    }
644
645    return OK;
646}
647
648status_t MediaCodecSource::onStart(MetaData *params) {
649    if (mStopping) {
650        ALOGE("Failed to start while we're stopping");
651        return INVALID_OPERATION;
652    }
653
654    if (mStarted) {
655        ALOGI("MediaCodecSource (%s) resuming", mIsVideo ? "video" : "audio");
656        if (mFlags & FLAG_USE_SURFACE_INPUT) {
657            resume();
658        } else {
659            CHECK(mPuller != NULL);
660            mPuller->resume();
661        }
662        return OK;
663    }
664
665    ALOGI("MediaCodecSource (%s) starting", mIsVideo ? "video" : "audio");
666
667    status_t err = OK;
668
669    if (mFlags & FLAG_USE_SURFACE_INPUT) {
670        int64_t startTimeUs;
671        if (!params || !params->findInt64(kKeyTime, &startTimeUs)) {
672            startTimeUs = -1ll;
673        }
674        resume(startTimeUs);
675    } else {
676        CHECK(mPuller != NULL);
677        sp<MetaData> meta = params;
678        if (mSetEncoderFormat) {
679            if (meta == NULL) {
680                meta = new MetaData;
681            }
682            meta->setInt32(kKeyPixelFormat, mEncoderFormat);
683            meta->setInt32(kKeyColorSpace, mEncoderDataSpace);
684        }
685
686        sp<AMessage> notify = new AMessage(kWhatPullerNotify, mReflector);
687        err = mPuller->start(meta.get(), notify);
688        if (err != OK) {
689            return err;
690        }
691    }
692
693    ALOGI("MediaCodecSource (%s) started", mIsVideo ? "video" : "audio");
694
695    mStarted = true;
696    return OK;
697}
698
699void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
700    switch (msg->what()) {
701    case kWhatPullerNotify:
702    {
703        MediaBuffer *mbuf;
704        CHECK(msg->findPointer("accessUnit", (void**)&mbuf));
705
706        if (mbuf == NULL) {
707            ALOGV("puller (%s) reached EOS",
708                    mIsVideo ? "video" : "audio");
709            signalEOS();
710        }
711
712        if (mEncoder == NULL) {
713            ALOGV("got msg '%s' after encoder shutdown.",
714                  msg->debugString().c_str());
715
716            if (mbuf != NULL) {
717                mbuf->release();
718            }
719
720            break;
721        }
722
723        mInputBufferQueue.push_back(mbuf);
724
725        feedEncoderInputBuffers();
726
727        break;
728    }
729    case kWhatEncoderActivity:
730    {
731        if (mEncoder == NULL) {
732            break;
733        }
734
735        int32_t cbID;
736        CHECK(msg->findInt32("callbackID", &cbID));
737        if (cbID == MediaCodec::CB_INPUT_AVAILABLE) {
738            int32_t index;
739            CHECK(msg->findInt32("index", &index));
740
741            mAvailEncoderInputIndices.push_back(index);
742            feedEncoderInputBuffers();
743        } else if (cbID == MediaCodec::CB_OUTPUT_AVAILABLE) {
744            int32_t index;
745            size_t offset;
746            size_t size;
747            int64_t timeUs;
748            int32_t flags;
749
750            CHECK(msg->findInt32("index", &index));
751            CHECK(msg->findSize("offset", &offset));
752            CHECK(msg->findSize("size", &size));
753            CHECK(msg->findInt64("timeUs", &timeUs));
754            CHECK(msg->findInt32("flags", &flags));
755
756            if (flags & MediaCodec::BUFFER_FLAG_EOS) {
757                mEncoder->releaseOutputBuffer(index);
758                signalEOS();
759                break;
760            }
761
762            sp<ABuffer> outbuf;
763            status_t err = mEncoder->getOutputBuffer(index, &outbuf);
764            if (err != OK || outbuf == NULL) {
765                signalEOS();
766                break;
767            }
768
769            MediaBuffer *mbuf = new MediaBuffer(outbuf->size());
770            memcpy(mbuf->data(), outbuf->data(), outbuf->size());
771
772            if (!(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
773                if (mIsVideo) {
774                    int64_t decodingTimeUs;
775                    if (mFlags & FLAG_USE_SURFACE_INPUT) {
776                        // Time offset is not applied at
777                        // feedEncoderInputBuffer() in surface input case.
778                        timeUs += mInputBufferTimeOffsetUs;
779                        // GraphicBufferSource is supposed to discard samples
780                        // queued before start, and offset timeUs by start time
781                        CHECK_GE(timeUs, 0ll);
782                        // TODO:
783                        // Decoding time for surface source is unavailable,
784                        // use presentation time for now. May need to move
785                        // this logic into MediaCodec.
786                        decodingTimeUs = timeUs;
787                    } else {
788                        CHECK(!mDecodingTimeQueue.empty());
789                        decodingTimeUs = *(mDecodingTimeQueue.begin());
790                        mDecodingTimeQueue.erase(mDecodingTimeQueue.begin());
791                    }
792                    mbuf->meta_data()->setInt64(kKeyDecodingTime, decodingTimeUs);
793
794                    ALOGV("[video] time %" PRId64 " us (%.2f secs), dts/pts diff %" PRId64,
795                            timeUs, timeUs / 1E6, decodingTimeUs - timeUs);
796                } else {
797                    int64_t driftTimeUs = 0;
798#if DEBUG_DRIFT_TIME
799                    CHECK(!mDriftTimeQueue.empty());
800                    driftTimeUs = *(mDriftTimeQueue.begin());
801                    mDriftTimeQueue.erase(mDriftTimeQueue.begin());
802                    mbuf->meta_data()->setInt64(kKeyDriftTime, driftTimeUs);
803#endif // DEBUG_DRIFT_TIME
804                    ALOGV("[audio] time %" PRId64 " us (%.2f secs), drift %" PRId64,
805                            timeUs, timeUs / 1E6, driftTimeUs);
806                }
807                mbuf->meta_data()->setInt64(kKeyTime, timeUs);
808            } else {
809                mbuf->meta_data()->setInt32(kKeyIsCodecConfig, true);
810            }
811            if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
812                mbuf->meta_data()->setInt32(kKeyIsSyncFrame, true);
813            }
814            mbuf->setObserver(this);
815            mbuf->add_ref();
816
817            {
818                Mutex::Autolock autoLock(mOutputBufferLock);
819                mOutputBufferQueue.push_back(mbuf);
820                mOutputBufferCond.signal();
821            }
822
823            mEncoder->releaseOutputBuffer(index);
824       } else if (cbID == MediaCodec::CB_ERROR) {
825            status_t err;
826            CHECK(msg->findInt32("err", &err));
827            ALOGE("Encoder (%s) reported error : 0x%x",
828                    mIsVideo ? "video" : "audio", err);
829            signalEOS();
830       }
831       break;
832    }
833    case kWhatStart:
834    {
835        sp<AReplyToken> replyID;
836        CHECK(msg->senderAwaitsResponse(&replyID));
837
838        sp<RefBase> obj;
839        CHECK(msg->findObject("meta", &obj));
840        MetaData *params = static_cast<MetaData *>(obj.get());
841
842        sp<AMessage> response = new AMessage;
843        response->setInt32("err", onStart(params));
844        response->postReply(replyID);
845        break;
846    }
847    case kWhatStop:
848    {
849        ALOGI("encoder (%s) stopping", mIsVideo ? "video" : "audio");
850
851        sp<AReplyToken> replyID;
852        CHECK(msg->senderAwaitsResponse(&replyID));
853
854        if (mEncoderReachedEOS) {
855            // if we already reached EOS, reply and return now
856            ALOGI("encoder (%s) already stopped",
857                    mIsVideo ? "video" : "audio");
858            (new AMessage)->postReply(replyID);
859            break;
860        }
861
862        mStopReplyIDQueue.push_back(replyID);
863        if (mStopping) {
864            // nothing to do if we're already stopping, reply will be posted
865            // to all when we're stopped.
866            break;
867        }
868
869        mStopping = true;
870
871        // if using surface, signal source EOS and wait for EOS to come back.
872        // otherwise, release encoder and post EOS if haven't done already
873        if (mFlags & FLAG_USE_SURFACE_INPUT) {
874            mEncoder->signalEndOfInputStream();
875        } else {
876            signalEOS();
877        }
878        break;
879    }
880    case kWhatPause:
881    {
882        if (mFlags & FLAG_USE_SURFACE_INPUT) {
883            suspend();
884        } else {
885            CHECK(mPuller != NULL);
886            mPuller->pause();
887        }
888        break;
889    }
890    case kWhatSetInputBufferTimeOffset:
891    {
892        sp<AReplyToken> replyID;
893        CHECK(msg->senderAwaitsResponse(&replyID));
894
895        CHECK(msg->findInt64("time-offset-us", &mInputBufferTimeOffsetUs));
896
897        sp<AMessage> response = new AMessage;
898        response->postReply(replyID);
899        break;
900    }
901    default:
902        TRESPASS();
903    }
904}
905
906} // namespace android
907