1/*
2 * Copyright 2014, The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *     http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "MediaCodecSource"
19#define DEBUG_DRIFT_TIME 0
20
21#include <inttypes.h>
22
23#include <gui/IGraphicBufferConsumer.h>
24#include <gui/IGraphicBufferProducer.h>
25#include <gui/Surface.h>
26#include <media/ICrypto.h>
27#include <media/stagefright/foundation/ABuffer.h>
28#include <media/stagefright/foundation/ADebug.h>
29#include <media/stagefright/foundation/ALooper.h>
30#include <media/stagefright/foundation/AMessage.h>
31#include <media/stagefright/MediaBuffer.h>
32#include <media/stagefright/MediaCodec.h>
33#include <media/stagefright/MediaCodecSource.h>
34#include <media/stagefright/MediaErrors.h>
35#include <media/stagefright/MediaSource.h>
36#include <media/stagefright/MetaData.h>
37#include <media/stagefright/PersistentSurface.h>
38#include <media/stagefright/Utils.h>
39
40namespace android {
41
42const int kDefaultSwVideoEncoderFormat = HAL_PIXEL_FORMAT_YCbCr_420_888;
43const int kDefaultSwVideoEncoderDataSpace = HAL_DATASPACE_BT709;
44
45struct MediaCodecSource::Puller : public AHandler {
46    Puller(const sp<MediaSource> &source);
47
48    status_t start(const sp<MetaData> &meta, const sp<AMessage> &notify);
49    void stop();
50
51    void pause();
52    void resume();
53
54protected:
55    virtual void onMessageReceived(const sp<AMessage> &msg);
56    virtual ~Puller();
57
58private:
59    enum {
60        kWhatStart = 'msta',
61        kWhatStop,
62        kWhatPull,
63        kWhatPause,
64        kWhatResume,
65    };
66
67    sp<MediaSource> mSource;
68    sp<AMessage> mNotify;
69    sp<ALooper> mLooper;
70    int32_t mPullGeneration;
71    bool mIsAudio;
72    bool mPaused;
73    bool mReachedEOS;
74
75    status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg);
76    void schedulePull();
77    void handleEOS();
78
79    DISALLOW_EVIL_CONSTRUCTORS(Puller);
80};
81
82MediaCodecSource::Puller::Puller(const sp<MediaSource> &source)
83    : mSource(source),
84      mLooper(new ALooper()),
85      mPullGeneration(0),
86      mIsAudio(false),
87      mPaused(false),
88      mReachedEOS(false) {
89    sp<MetaData> meta = source->getFormat();
90    const char *mime;
91    CHECK(meta->findCString(kKeyMIMEType, &mime));
92
93    mIsAudio = !strncasecmp(mime, "audio/", 6);
94
95    mLooper->setName("pull_looper");
96}
97
98MediaCodecSource::Puller::~Puller() {
99    mLooper->unregisterHandler(id());
100    mLooper->stop();
101}
102
103status_t MediaCodecSource::Puller::postSynchronouslyAndReturnError(
104        const sp<AMessage> &msg) {
105    sp<AMessage> response;
106    status_t err = msg->postAndAwaitResponse(&response);
107
108    if (err != OK) {
109        return err;
110    }
111
112    if (!response->findInt32("err", &err)) {
113        err = OK;
114    }
115
116    return err;
117}
118
119status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta,
120        const sp<AMessage> &notify) {
121    ALOGV("puller (%s) start", mIsAudio ? "audio" : "video");
122    mLooper->start(
123            false /* runOnCallingThread */,
124            false /* canCallJava */,
125            PRIORITY_AUDIO);
126    mLooper->registerHandler(this);
127    mNotify = notify;
128
129    sp<AMessage> msg = new AMessage(kWhatStart, this);
130    msg->setObject("meta", meta);
131    return postSynchronouslyAndReturnError(msg);
132}
133
134void MediaCodecSource::Puller::stop() {
135    // Stop source from caller's thread instead of puller's looper.
136    // mSource->stop() is thread-safe, doing it outside the puller's
137    // looper allows us to at least stop if source gets stuck.
138    // If source gets stuck in read(), the looper would never
139    // be able to process the stop(), which could lead to ANR.
140
141    ALOGV("source (%s) stopping", mIsAudio ? "audio" : "video");
142    mSource->stop();
143    ALOGV("source (%s) stopped", mIsAudio ? "audio" : "video");
144
145    (new AMessage(kWhatStop, this))->post();
146}
147
148void MediaCodecSource::Puller::pause() {
149    (new AMessage(kWhatPause, this))->post();
150}
151
152void MediaCodecSource::Puller::resume() {
153    (new AMessage(kWhatResume, this))->post();
154}
155
156void MediaCodecSource::Puller::schedulePull() {
157    sp<AMessage> msg = new AMessage(kWhatPull, this);
158    msg->setInt32("generation", mPullGeneration);
159    msg->post();
160}
161
162void MediaCodecSource::Puller::handleEOS() {
163    if (!mReachedEOS) {
164        ALOGV("puller (%s) posting EOS", mIsAudio ? "audio" : "video");
165        mReachedEOS = true;
166        sp<AMessage> notify = mNotify->dup();
167        notify->setPointer("accessUnit", NULL);
168        notify->post();
169    }
170}
171
172void MediaCodecSource::Puller::onMessageReceived(const sp<AMessage> &msg) {
173    switch (msg->what()) {
174        case kWhatStart:
175        {
176            sp<RefBase> obj;
177            CHECK(msg->findObject("meta", &obj));
178
179            mReachedEOS = false;
180
181            status_t err = mSource->start(static_cast<MetaData *>(obj.get()));
182
183            if (err == OK) {
184                schedulePull();
185            }
186
187            sp<AMessage> response = new AMessage;
188            response->setInt32("err", err);
189
190            sp<AReplyToken> replyID;
191            CHECK(msg->senderAwaitsResponse(&replyID));
192            response->postReply(replyID);
193            break;
194        }
195
196        case kWhatStop:
197        {
198            ++mPullGeneration;
199
200            handleEOS();
201            break;
202        }
203
204        case kWhatPull:
205        {
206            int32_t generation;
207            CHECK(msg->findInt32("generation", &generation));
208
209            if (generation != mPullGeneration) {
210                break;
211            }
212
213            MediaBuffer *mbuf;
214            status_t err = mSource->read(&mbuf);
215
216            if (mPaused) {
217                if (err == OK) {
218                    mbuf->release();
219                    mbuf = NULL;
220                }
221
222                msg->post();
223                break;
224            }
225
226            if (err != OK) {
227                if (err == ERROR_END_OF_STREAM) {
228                    ALOGV("stream ended, mbuf %p", mbuf);
229                } else {
230                    ALOGE("error %d reading stream.", err);
231                }
232                handleEOS();
233            } else {
234                sp<AMessage> notify = mNotify->dup();
235
236                notify->setPointer("accessUnit", mbuf);
237                notify->post();
238
239                msg->post();
240            }
241            break;
242        }
243
244        case kWhatPause:
245        {
246            mPaused = true;
247            break;
248        }
249
250        case kWhatResume:
251        {
252            mPaused = false;
253            break;
254        }
255
256        default:
257            TRESPASS();
258    }
259}
260
261// static
262sp<MediaCodecSource> MediaCodecSource::Create(
263        const sp<ALooper> &looper,
264        const sp<AMessage> &format,
265        const sp<MediaSource> &source,
266        const sp<IGraphicBufferConsumer> &consumer,
267        uint32_t flags) {
268    sp<MediaCodecSource> mediaSource =
269            new MediaCodecSource(looper, format, source, consumer, flags);
270
271    if (mediaSource->init() == OK) {
272        return mediaSource;
273    }
274    return NULL;
275}
276
277status_t MediaCodecSource::start(MetaData* params) {
278    sp<AMessage> msg = new AMessage(kWhatStart, mReflector);
279    msg->setObject("meta", params);
280    return postSynchronouslyAndReturnError(msg);
281}
282
283status_t MediaCodecSource::stop() {
284    sp<AMessage> msg = new AMessage(kWhatStop, mReflector);
285    status_t err = postSynchronouslyAndReturnError(msg);
286
287    // mPuller->stop() needs to be done outside MediaCodecSource's looper,
288    // as it contains a synchronous call to stop the underlying MediaSource,
289    // which often waits for all outstanding MediaBuffers to return, but
290    // MediaBuffers are only returned when MediaCodecSource looper gets
291    // to process them.
292
293    if (mPuller != NULL) {
294        ALOGI("puller (%s) stopping", mIsVideo ? "video" : "audio");
295        mPuller->stop();
296        ALOGI("puller (%s) stopped", mIsVideo ? "video" : "audio");
297    }
298
299    return err;
300}
301
302status_t MediaCodecSource::pause() {
303    (new AMessage(kWhatPause, mReflector))->post();
304    return OK;
305}
306
307sp<IGraphicBufferProducer> MediaCodecSource::getGraphicBufferProducer() {
308    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
309    return mGraphicBufferProducer;
310}
311
312status_t MediaCodecSource::read(
313        MediaBuffer** buffer, const ReadOptions* /* options */) {
314    Mutex::Autolock autolock(mOutputBufferLock);
315
316    *buffer = NULL;
317    while (mOutputBufferQueue.size() == 0 && !mEncoderReachedEOS) {
318        mOutputBufferCond.wait(mOutputBufferLock);
319    }
320    if (!mEncoderReachedEOS) {
321        *buffer = *mOutputBufferQueue.begin();
322        mOutputBufferQueue.erase(mOutputBufferQueue.begin());
323        return OK;
324    }
325    return mErrorCode;
326}
327
328void MediaCodecSource::signalBufferReturned(MediaBuffer *buffer) {
329    buffer->setObserver(0);
330    buffer->release();
331}
332
333MediaCodecSource::MediaCodecSource(
334        const sp<ALooper> &looper,
335        const sp<AMessage> &outputFormat,
336        const sp<MediaSource> &source,
337        const sp<IGraphicBufferConsumer> &consumer,
338        uint32_t flags)
339    : mLooper(looper),
340      mOutputFormat(outputFormat),
341      mMeta(new MetaData),
342      mFlags(flags),
343      mIsVideo(false),
344      mStarted(false),
345      mStopping(false),
346      mDoMoreWorkPending(false),
347      mSetEncoderFormat(false),
348      mEncoderFormat(0),
349      mEncoderDataSpace(0),
350      mGraphicBufferConsumer(consumer),
351      mFirstSampleTimeUs(-1ll),
352      mEncoderReachedEOS(false),
353      mErrorCode(OK) {
354    CHECK(mLooper != NULL);
355
356    AString mime;
357    CHECK(mOutputFormat->findString("mime", &mime));
358
359    if (!strncasecmp("video/", mime.c_str(), 6)) {
360        mIsVideo = true;
361    }
362
363    if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
364        mPuller = new Puller(source);
365    }
366}
367
368MediaCodecSource::~MediaCodecSource() {
369    releaseEncoder();
370
371    mCodecLooper->stop();
372    mLooper->unregisterHandler(mReflector->id());
373}
374
375status_t MediaCodecSource::init() {
376    status_t err = initEncoder();
377
378    if (err != OK) {
379        releaseEncoder();
380    }
381
382    return err;
383}
384
385status_t MediaCodecSource::initEncoder() {
386    mReflector = new AHandlerReflector<MediaCodecSource>(this);
387    mLooper->registerHandler(mReflector);
388
389    mCodecLooper = new ALooper;
390    mCodecLooper->setName("codec_looper");
391    mCodecLooper->start();
392
393    if (mFlags & FLAG_USE_METADATA_INPUT) {
394        mOutputFormat->setInt32("store-metadata-in-buffers", 1);
395    }
396
397    if (mFlags & FLAG_USE_SURFACE_INPUT) {
398        mOutputFormat->setInt32("create-input-buffers-suspended", 1);
399    }
400
401    AString outputMIME;
402    CHECK(mOutputFormat->findString("mime", &outputMIME));
403
404    mEncoder = MediaCodec::CreateByType(
405            mCodecLooper, outputMIME.c_str(), true /* encoder */);
406
407    if (mEncoder == NULL) {
408        return NO_INIT;
409    }
410
411    ALOGV("output format is '%s'", mOutputFormat->debugString(0).c_str());
412
413    mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, mReflector);
414    mEncoder->setCallback(mEncoderActivityNotify);
415
416    status_t err = mEncoder->configure(
417                mOutputFormat,
418                NULL /* nativeWindow */,
419                NULL /* crypto */,
420                MediaCodec::CONFIGURE_FLAG_ENCODE);
421
422    if (err != OK) {
423        return err;
424    }
425
426    mEncoder->getOutputFormat(&mOutputFormat);
427    convertMessageToMetaData(mOutputFormat, mMeta);
428
429    if (mFlags & FLAG_USE_SURFACE_INPUT) {
430        CHECK(mIsVideo);
431
432        if (mGraphicBufferConsumer != NULL) {
433            // When using persistent surface, we are only interested in the
434            // consumer, but have to use PersistentSurface as a wrapper to
435            // pass consumer over messages (similar to BufferProducerWrapper)
436            err = mEncoder->setInputSurface(
437                    new PersistentSurface(NULL, mGraphicBufferConsumer));
438        } else {
439            err = mEncoder->createInputSurface(&mGraphicBufferProducer);
440        }
441
442        if (err != OK) {
443            return err;
444        }
445    }
446
447    sp<AMessage> inputFormat;
448    int32_t usingSwReadOften;
449    mSetEncoderFormat = false;
450    if (mEncoder->getInputFormat(&inputFormat) == OK
451            && inputFormat->findInt32("using-sw-read-often", &usingSwReadOften)
452            && usingSwReadOften) {
453        // this is a SW encoder; signal source to allocate SW readable buffers
454        mSetEncoderFormat = true;
455        mEncoderFormat = kDefaultSwVideoEncoderFormat;
456        mEncoderDataSpace = kDefaultSwVideoEncoderDataSpace;
457    }
458
459    err = mEncoder->start();
460
461    if (err != OK) {
462        return err;
463    }
464
465    mEncoderReachedEOS = false;
466    mErrorCode = OK;
467
468    return OK;
469}
470
471void MediaCodecSource::releaseEncoder() {
472    if (mEncoder == NULL) {
473        return;
474    }
475
476    mEncoder->release();
477    mEncoder.clear();
478
479    while (!mInputBufferQueue.empty()) {
480        MediaBuffer *mbuf = *mInputBufferQueue.begin();
481        mInputBufferQueue.erase(mInputBufferQueue.begin());
482        if (mbuf != NULL) {
483            mbuf->release();
484        }
485    }
486}
487
488status_t MediaCodecSource::postSynchronouslyAndReturnError(
489        const sp<AMessage> &msg) {
490    sp<AMessage> response;
491    status_t err = msg->postAndAwaitResponse(&response);
492
493    if (err != OK) {
494        return err;
495    }
496
497    if (!response->findInt32("err", &err)) {
498        err = OK;
499    }
500
501    return err;
502}
503
504void MediaCodecSource::signalEOS(status_t err) {
505    if (!mEncoderReachedEOS) {
506        ALOGV("encoder (%s) reached EOS", mIsVideo ? "video" : "audio");
507        {
508            Mutex::Autolock autoLock(mOutputBufferLock);
509            // release all unread media buffers
510            for (List<MediaBuffer*>::iterator it = mOutputBufferQueue.begin();
511                    it != mOutputBufferQueue.end(); it++) {
512                (*it)->release();
513            }
514            mOutputBufferQueue.clear();
515            mEncoderReachedEOS = true;
516            mErrorCode = err;
517            mOutputBufferCond.signal();
518        }
519
520        releaseEncoder();
521    }
522    if (mStopping && mEncoderReachedEOS) {
523        ALOGI("encoder (%s) stopped", mIsVideo ? "video" : "audio");
524        // posting reply to everyone that's waiting
525        List<sp<AReplyToken>>::iterator it;
526        for (it = mStopReplyIDQueue.begin();
527                it != mStopReplyIDQueue.end(); it++) {
528            (new AMessage)->postReply(*it);
529        }
530        mStopReplyIDQueue.clear();
531        mStopping = false;
532    }
533}
534
535void MediaCodecSource::suspend() {
536    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
537    if (mEncoder != NULL) {
538        sp<AMessage> params = new AMessage;
539        params->setInt32("drop-input-frames", true);
540        mEncoder->setParameters(params);
541    }
542}
543
544void MediaCodecSource::resume(int64_t skipFramesBeforeUs) {
545    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
546    if (mEncoder != NULL) {
547        sp<AMessage> params = new AMessage;
548        params->setInt32("drop-input-frames", false);
549        if (skipFramesBeforeUs > 0) {
550            params->setInt64("skip-frames-before", skipFramesBeforeUs);
551        }
552        mEncoder->setParameters(params);
553    }
554}
555
556status_t MediaCodecSource::feedEncoderInputBuffers() {
557    while (!mInputBufferQueue.empty()
558            && !mAvailEncoderInputIndices.empty()) {
559        MediaBuffer* mbuf = *mInputBufferQueue.begin();
560        mInputBufferQueue.erase(mInputBufferQueue.begin());
561
562        size_t bufferIndex = *mAvailEncoderInputIndices.begin();
563        mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
564
565        int64_t timeUs = 0ll;
566        uint32_t flags = 0;
567        size_t size = 0;
568
569        if (mbuf != NULL) {
570            CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
571
572            // push decoding time for video, or drift time for audio
573            if (mIsVideo) {
574                mDecodingTimeQueue.push_back(timeUs);
575            } else {
576#if DEBUG_DRIFT_TIME
577                if (mFirstSampleTimeUs < 0ll) {
578                    mFirstSampleTimeUs = timeUs;
579                }
580
581                int64_t driftTimeUs = 0;
582                if (mbuf->meta_data()->findInt64(kKeyDriftTime, &driftTimeUs)
583                        && driftTimeUs) {
584                    driftTimeUs = timeUs - mFirstSampleTimeUs - driftTimeUs;
585                }
586                mDriftTimeQueue.push_back(driftTimeUs);
587#endif // DEBUG_DRIFT_TIME
588            }
589
590            sp<ABuffer> inbuf;
591            status_t err = mEncoder->getInputBuffer(bufferIndex, &inbuf);
592            if (err != OK || inbuf == NULL) {
593                mbuf->release();
594                signalEOS();
595                break;
596            }
597
598            size = mbuf->size();
599
600            memcpy(inbuf->data(), mbuf->data(), size);
601
602            if (mIsVideo) {
603                // video encoder will release MediaBuffer when done
604                // with underlying data.
605                inbuf->setMediaBufferBase(mbuf);
606            } else {
607                mbuf->release();
608            }
609        } else {
610            flags = MediaCodec::BUFFER_FLAG_EOS;
611        }
612
613        status_t err = mEncoder->queueInputBuffer(
614                bufferIndex, 0, size, timeUs, flags);
615
616        if (err != OK) {
617            return err;
618        }
619    }
620
621    return OK;
622}
623
624status_t MediaCodecSource::onStart(MetaData *params) {
625    if (mStopping) {
626        ALOGE("Failed to start while we're stopping");
627        return INVALID_OPERATION;
628    }
629
630    if (mStarted) {
631        ALOGI("MediaCodecSource (%s) resuming", mIsVideo ? "video" : "audio");
632        if (mFlags & FLAG_USE_SURFACE_INPUT) {
633            resume();
634        } else {
635            CHECK(mPuller != NULL);
636            mPuller->resume();
637        }
638        return OK;
639    }
640
641    ALOGI("MediaCodecSource (%s) starting", mIsVideo ? "video" : "audio");
642
643    status_t err = OK;
644
645    if (mFlags & FLAG_USE_SURFACE_INPUT) {
646        int64_t startTimeUs;
647        if (!params || !params->findInt64(kKeyTime, &startTimeUs)) {
648            startTimeUs = -1ll;
649        }
650        resume(startTimeUs);
651    } else {
652        CHECK(mPuller != NULL);
653        sp<MetaData> meta = params;
654        if (mSetEncoderFormat) {
655            if (meta == NULL) {
656                meta = new MetaData;
657            }
658            meta->setInt32(kKeyPixelFormat, mEncoderFormat);
659            meta->setInt32(kKeyColorSpace, mEncoderDataSpace);
660        }
661
662        sp<AMessage> notify = new AMessage(kWhatPullerNotify, mReflector);
663        err = mPuller->start(meta.get(), notify);
664        if (err != OK) {
665            return err;
666        }
667    }
668
669    ALOGI("MediaCodecSource (%s) started", mIsVideo ? "video" : "audio");
670
671    mStarted = true;
672    return OK;
673}
674
675void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
676    switch (msg->what()) {
677    case kWhatPullerNotify:
678    {
679        MediaBuffer *mbuf;
680        CHECK(msg->findPointer("accessUnit", (void**)&mbuf));
681
682        if (mbuf == NULL) {
683            ALOGV("puller (%s) reached EOS",
684                    mIsVideo ? "video" : "audio");
685            signalEOS();
686        }
687
688        if (mEncoder == NULL) {
689            ALOGV("got msg '%s' after encoder shutdown.",
690                  msg->debugString().c_str());
691
692            if (mbuf != NULL) {
693                mbuf->release();
694            }
695
696            break;
697        }
698
699        mInputBufferQueue.push_back(mbuf);
700
701        feedEncoderInputBuffers();
702
703        break;
704    }
705    case kWhatEncoderActivity:
706    {
707        if (mEncoder == NULL) {
708            break;
709        }
710
711        int32_t cbID;
712        CHECK(msg->findInt32("callbackID", &cbID));
713        if (cbID == MediaCodec::CB_INPUT_AVAILABLE) {
714            int32_t index;
715            CHECK(msg->findInt32("index", &index));
716
717            mAvailEncoderInputIndices.push_back(index);
718            feedEncoderInputBuffers();
719        } else if (cbID == MediaCodec::CB_OUTPUT_AVAILABLE) {
720            int32_t index;
721            size_t offset;
722            size_t size;
723            int64_t timeUs;
724            int32_t flags;
725
726            CHECK(msg->findInt32("index", &index));
727            CHECK(msg->findSize("offset", &offset));
728            CHECK(msg->findSize("size", &size));
729            CHECK(msg->findInt64("timeUs", &timeUs));
730            CHECK(msg->findInt32("flags", &flags));
731
732            if (flags & MediaCodec::BUFFER_FLAG_EOS) {
733                mEncoder->releaseOutputBuffer(index);
734                signalEOS();
735                break;
736            }
737
738            sp<ABuffer> outbuf;
739            status_t err = mEncoder->getOutputBuffer(index, &outbuf);
740            if (err != OK || outbuf == NULL) {
741                signalEOS();
742                break;
743            }
744
745            MediaBuffer *mbuf = new MediaBuffer(outbuf->size());
746            memcpy(mbuf->data(), outbuf->data(), outbuf->size());
747
748            if (!(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
749                if (mIsVideo) {
750                    int64_t decodingTimeUs;
751                    if (mFlags & FLAG_USE_SURFACE_INPUT) {
752                        // GraphicBufferSource is supposed to discard samples
753                        // queued before start, and offset timeUs by start time
754                        CHECK_GE(timeUs, 0ll);
755                        // TODO:
756                        // Decoding time for surface source is unavailable,
757                        // use presentation time for now. May need to move
758                        // this logic into MediaCodec.
759                        decodingTimeUs = timeUs;
760                    } else {
761                        CHECK(!mDecodingTimeQueue.empty());
762                        decodingTimeUs = *(mDecodingTimeQueue.begin());
763                        mDecodingTimeQueue.erase(mDecodingTimeQueue.begin());
764                    }
765                    mbuf->meta_data()->setInt64(kKeyDecodingTime, decodingTimeUs);
766
767                    ALOGV("[video] time %" PRId64 " us (%.2f secs), dts/pts diff %" PRId64,
768                            timeUs, timeUs / 1E6, decodingTimeUs - timeUs);
769                } else {
770                    int64_t driftTimeUs = 0;
771#if DEBUG_DRIFT_TIME
772                    CHECK(!mDriftTimeQueue.empty());
773                    driftTimeUs = *(mDriftTimeQueue.begin());
774                    mDriftTimeQueue.erase(mDriftTimeQueue.begin());
775                    mbuf->meta_data()->setInt64(kKeyDriftTime, driftTimeUs);
776#endif // DEBUG_DRIFT_TIME
777                    ALOGV("[audio] time %" PRId64 " us (%.2f secs), drift %" PRId64,
778                            timeUs, timeUs / 1E6, driftTimeUs);
779                }
780                mbuf->meta_data()->setInt64(kKeyTime, timeUs);
781            } else {
782                mbuf->meta_data()->setInt32(kKeyIsCodecConfig, true);
783            }
784            if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
785                mbuf->meta_data()->setInt32(kKeyIsSyncFrame, true);
786            }
787            mbuf->setObserver(this);
788            mbuf->add_ref();
789
790            {
791                Mutex::Autolock autoLock(mOutputBufferLock);
792                mOutputBufferQueue.push_back(mbuf);
793                mOutputBufferCond.signal();
794            }
795
796            mEncoder->releaseOutputBuffer(index);
797       } else if (cbID == MediaCodec::CB_ERROR) {
798            status_t err;
799            CHECK(msg->findInt32("err", &err));
800            ALOGE("Encoder (%s) reported error : 0x%x",
801                    mIsVideo ? "video" : "audio", err);
802            signalEOS();
803       }
804       break;
805    }
806    case kWhatStart:
807    {
808        sp<AReplyToken> replyID;
809        CHECK(msg->senderAwaitsResponse(&replyID));
810
811        sp<RefBase> obj;
812        CHECK(msg->findObject("meta", &obj));
813        MetaData *params = static_cast<MetaData *>(obj.get());
814
815        sp<AMessage> response = new AMessage;
816        response->setInt32("err", onStart(params));
817        response->postReply(replyID);
818        break;
819    }
820    case kWhatStop:
821    {
822        ALOGI("encoder (%s) stopping", mIsVideo ? "video" : "audio");
823
824        sp<AReplyToken> replyID;
825        CHECK(msg->senderAwaitsResponse(&replyID));
826
827        if (mEncoderReachedEOS) {
828            // if we already reached EOS, reply and return now
829            ALOGI("encoder (%s) already stopped",
830                    mIsVideo ? "video" : "audio");
831            (new AMessage)->postReply(replyID);
832            break;
833        }
834
835        mStopReplyIDQueue.push_back(replyID);
836        if (mStopping) {
837            // nothing to do if we're already stopping, reply will be posted
838            // to all when we're stopped.
839            break;
840        }
841
842        mStopping = true;
843
844        // if using surface, signal source EOS and wait for EOS to come back.
845        // otherwise, release encoder and post EOS if haven't done already
846        if (mFlags & FLAG_USE_SURFACE_INPUT) {
847            mEncoder->signalEndOfInputStream();
848        } else {
849            signalEOS();
850        }
851        break;
852    }
853    case kWhatPause:
854    {
855        if (mFlags && FLAG_USE_SURFACE_INPUT) {
856            suspend();
857        } else {
858            CHECK(mPuller != NULL);
859            mPuller->pause();
860        }
861        break;
862    }
863    default:
864        TRESPASS();
865    }
866}
867
868} // namespace android
869