MediaCodecSource.cpp revision d008275796ac4cccf85fefce53cef733a49bc1fa
1/*
2 * Copyright 2014, The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *     http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "MediaCodecSource"
19#define DEBUG_DRIFT_TIME 0
20
21#include <inttypes.h>
22
23#include <gui/IGraphicBufferConsumer.h>
24#include <gui/IGraphicBufferProducer.h>
25#include <gui/Surface.h>
26#include <media/ICrypto.h>
27#include <media/stagefright/foundation/ABuffer.h>
28#include <media/stagefright/foundation/ADebug.h>
29#include <media/stagefright/foundation/ALooper.h>
30#include <media/stagefright/foundation/AMessage.h>
31#include <media/stagefright/MediaBuffer.h>
32#include <media/stagefright/MediaCodec.h>
33#include <media/stagefright/MediaCodecSource.h>
34#include <media/stagefright/MediaErrors.h>
35#include <media/stagefright/MediaSource.h>
36#include <media/stagefright/MetaData.h>
37#include <media/stagefright/PersistentSurface.h>
38#include <media/stagefright/Utils.h>
39
40namespace android {
41
42const int kDefaultSwVideoEncoderFormat = HAL_PIXEL_FORMAT_YCbCr_420_888;
43const int kDefaultSwVideoEncoderDataSpace = HAL_DATASPACE_BT709;
44
45struct MediaCodecSource::Puller : public AHandler {
46    Puller(const sp<MediaSource> &source);
47
48    status_t start(const sp<MetaData> &meta, const sp<AMessage> &notify);
49    void stop();
50
51    void pause();
52    void resume();
53
54protected:
55    virtual void onMessageReceived(const sp<AMessage> &msg);
56    virtual ~Puller();
57
58private:
59    enum {
60        kWhatStart = 'msta',
61        kWhatStop,
62        kWhatPull,
63        kWhatPause,
64        kWhatResume,
65    };
66
67    sp<MediaSource> mSource;
68    sp<AMessage> mNotify;
69    sp<ALooper> mLooper;
70    int32_t mPullGeneration;
71    bool mIsAudio;
72    bool mPaused;
73    bool mReachedEOS;
74
75    status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg);
76    void schedulePull();
77    void handleEOS();
78
79    DISALLOW_EVIL_CONSTRUCTORS(Puller);
80};
81
82MediaCodecSource::Puller::Puller(const sp<MediaSource> &source)
83    : mSource(source),
84      mLooper(new ALooper()),
85      mPullGeneration(0),
86      mIsAudio(false),
87      mPaused(false),
88      mReachedEOS(false) {
89    sp<MetaData> meta = source->getFormat();
90    const char *mime;
91    CHECK(meta->findCString(kKeyMIMEType, &mime));
92
93    mIsAudio = !strncasecmp(mime, "audio/", 6);
94
95    mLooper->setName("pull_looper");
96}
97
98MediaCodecSource::Puller::~Puller() {
99    mLooper->unregisterHandler(id());
100    mLooper->stop();
101}
102
103status_t MediaCodecSource::Puller::postSynchronouslyAndReturnError(
104        const sp<AMessage> &msg) {
105    sp<AMessage> response;
106    status_t err = msg->postAndAwaitResponse(&response);
107
108    if (err != OK) {
109        return err;
110    }
111
112    if (!response->findInt32("err", &err)) {
113        err = OK;
114    }
115
116    return err;
117}
118
119status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta,
120        const sp<AMessage> &notify) {
121    ALOGV("puller (%s) start", mIsAudio ? "audio" : "video");
122    mLooper->start(
123            false /* runOnCallingThread */,
124            false /* canCallJava */,
125            PRIORITY_AUDIO);
126    mLooper->registerHandler(this);
127    mNotify = notify;
128
129    sp<AMessage> msg = new AMessage(kWhatStart, this);
130    msg->setObject("meta", meta);
131    return postSynchronouslyAndReturnError(msg);
132}
133
134void MediaCodecSource::Puller::stop() {
135    // Stop source from caller's thread instead of puller's looper.
136    // mSource->stop() is thread-safe, doing it outside the puller's
137    // looper allows us to at least stop if source gets stuck.
138    // If source gets stuck in read(), the looper would never
139    // be able to process the stop(), which could lead to ANR.
140
141    ALOGV("source (%s) stopping", mIsAudio ? "audio" : "video");
142    mSource->stop();
143    ALOGV("source (%s) stopped", mIsAudio ? "audio" : "video");
144
145    (new AMessage(kWhatStop, this))->post();
146}
147
148void MediaCodecSource::Puller::pause() {
149    (new AMessage(kWhatPause, this))->post();
150}
151
152void MediaCodecSource::Puller::resume() {
153    (new AMessage(kWhatResume, this))->post();
154}
155
156void MediaCodecSource::Puller::schedulePull() {
157    sp<AMessage> msg = new AMessage(kWhatPull, this);
158    msg->setInt32("generation", mPullGeneration);
159    msg->post();
160}
161
162void MediaCodecSource::Puller::handleEOS() {
163    if (!mReachedEOS) {
164        ALOGV("puller (%s) posting EOS", mIsAudio ? "audio" : "video");
165        mReachedEOS = true;
166        sp<AMessage> notify = mNotify->dup();
167        notify->setPointer("accessUnit", NULL);
168        notify->post();
169    }
170}
171
172void MediaCodecSource::Puller::onMessageReceived(const sp<AMessage> &msg) {
173    switch (msg->what()) {
174        case kWhatStart:
175        {
176            sp<RefBase> obj;
177            CHECK(msg->findObject("meta", &obj));
178
179            mReachedEOS = false;
180
181            status_t err = mSource->start(static_cast<MetaData *>(obj.get()));
182
183            if (err == OK) {
184                schedulePull();
185            }
186
187            sp<AMessage> response = new AMessage;
188            response->setInt32("err", err);
189
190            sp<AReplyToken> replyID;
191            CHECK(msg->senderAwaitsResponse(&replyID));
192            response->postReply(replyID);
193            break;
194        }
195
196        case kWhatStop:
197        {
198            ++mPullGeneration;
199
200            handleEOS();
201            break;
202        }
203
204        case kWhatPull:
205        {
206            int32_t generation;
207            CHECK(msg->findInt32("generation", &generation));
208
209            if (generation != mPullGeneration) {
210                break;
211            }
212
213            MediaBuffer *mbuf;
214            status_t err = mSource->read(&mbuf);
215
216            if (mPaused) {
217                if (err == OK) {
218                    mbuf->release();
219                    mbuf = NULL;
220                }
221
222                msg->post();
223                break;
224            }
225
226            if (err != OK) {
227                if (err == ERROR_END_OF_STREAM) {
228                    ALOGV("stream ended, mbuf %p", mbuf);
229                } else {
230                    ALOGE("error %d reading stream.", err);
231                }
232                handleEOS();
233            } else {
234                sp<AMessage> notify = mNotify->dup();
235
236                notify->setPointer("accessUnit", mbuf);
237                notify->post();
238
239                msg->post();
240            }
241            break;
242        }
243
244        case kWhatPause:
245        {
246            mPaused = true;
247            break;
248        }
249
250        case kWhatResume:
251        {
252            mPaused = false;
253            break;
254        }
255
256        default:
257            TRESPASS();
258    }
259}
260
261// static
262sp<MediaCodecSource> MediaCodecSource::Create(
263        const sp<ALooper> &looper,
264        const sp<AMessage> &format,
265        const sp<MediaSource> &source,
266        const sp<IGraphicBufferConsumer> &consumer,
267        uint32_t flags) {
268    sp<MediaCodecSource> mediaSource =
269            new MediaCodecSource(looper, format, source, consumer, flags);
270
271    if (mediaSource->init() == OK) {
272        return mediaSource;
273    }
274    return NULL;
275}
276
277void MediaCodecSource::setInputBufferTimeOffset(int64_t timeOffsetUs) {
278    sp<AMessage> msg = new AMessage(kWhatSetInputBufferTimeOffset, mReflector);
279    msg->setInt64("time-offset-us", timeOffsetUs);
280    postSynchronouslyAndReturnError(msg);
281}
282
283status_t MediaCodecSource::start(MetaData* params) {
284    sp<AMessage> msg = new AMessage(kWhatStart, mReflector);
285    msg->setObject("meta", params);
286    return postSynchronouslyAndReturnError(msg);
287}
288
289status_t MediaCodecSource::stop() {
290    sp<AMessage> msg = new AMessage(kWhatStop, mReflector);
291    status_t err = postSynchronouslyAndReturnError(msg);
292
293    // mPuller->stop() needs to be done outside MediaCodecSource's looper,
294    // as it contains a synchronous call to stop the underlying MediaSource,
295    // which often waits for all outstanding MediaBuffers to return, but
296    // MediaBuffers are only returned when MediaCodecSource looper gets
297    // to process them.
298
299    if (mPuller != NULL) {
300        ALOGI("puller (%s) stopping", mIsVideo ? "video" : "audio");
301        mPuller->stop();
302        ALOGI("puller (%s) stopped", mIsVideo ? "video" : "audio");
303    }
304
305    return err;
306}
307
308status_t MediaCodecSource::pause() {
309    (new AMessage(kWhatPause, mReflector))->post();
310    return OK;
311}
312
313sp<IGraphicBufferProducer> MediaCodecSource::getGraphicBufferProducer() {
314    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
315    return mGraphicBufferProducer;
316}
317
318status_t MediaCodecSource::read(
319        MediaBuffer** buffer, const ReadOptions* /* options */) {
320    Mutex::Autolock autolock(mOutputBufferLock);
321
322    *buffer = NULL;
323    while (mOutputBufferQueue.size() == 0 && !mEncoderReachedEOS) {
324        mOutputBufferCond.wait(mOutputBufferLock);
325    }
326    if (!mEncoderReachedEOS) {
327        *buffer = *mOutputBufferQueue.begin();
328        mOutputBufferQueue.erase(mOutputBufferQueue.begin());
329        return OK;
330    }
331    return mErrorCode;
332}
333
334void MediaCodecSource::signalBufferReturned(MediaBuffer *buffer) {
335    buffer->setObserver(0);
336    buffer->release();
337}
338
339MediaCodecSource::MediaCodecSource(
340        const sp<ALooper> &looper,
341        const sp<AMessage> &outputFormat,
342        const sp<MediaSource> &source,
343        const sp<IGraphicBufferConsumer> &consumer,
344        uint32_t flags)
345    : mLooper(looper),
346      mOutputFormat(outputFormat),
347      mMeta(new MetaData),
348      mFlags(flags),
349      mIsVideo(false),
350      mStarted(false),
351      mStopping(false),
352      mDoMoreWorkPending(false),
353      mSetEncoderFormat(false),
354      mEncoderFormat(0),
355      mEncoderDataSpace(0),
356      mGraphicBufferConsumer(consumer),
357      mInputBufferTimeOffsetUs(0),
358      mFirstSampleTimeUs(-1ll),
359      mEncoderReachedEOS(false),
360      mErrorCode(OK) {
361    CHECK(mLooper != NULL);
362
363    AString mime;
364    CHECK(mOutputFormat->findString("mime", &mime));
365
366    if (!strncasecmp("video/", mime.c_str(), 6)) {
367        mIsVideo = true;
368    }
369
370    if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
371        mPuller = new Puller(source);
372    }
373}
374
375MediaCodecSource::~MediaCodecSource() {
376    releaseEncoder();
377
378    mCodecLooper->stop();
379    mLooper->unregisterHandler(mReflector->id());
380}
381
382status_t MediaCodecSource::init() {
383    status_t err = initEncoder();
384
385    if (err != OK) {
386        releaseEncoder();
387    }
388
389    return err;
390}
391
392status_t MediaCodecSource::initEncoder() {
393    mReflector = new AHandlerReflector<MediaCodecSource>(this);
394    mLooper->registerHandler(mReflector);
395
396    mCodecLooper = new ALooper;
397    mCodecLooper->setName("codec_looper");
398    mCodecLooper->start();
399
400    if (mFlags & FLAG_USE_METADATA_INPUT) {
401        mOutputFormat->setInt32("store-metadata-in-buffers", 1);
402    }
403
404    if (mFlags & FLAG_USE_SURFACE_INPUT) {
405        mOutputFormat->setInt32("create-input-buffers-suspended", 1);
406    }
407
408    AString outputMIME;
409    CHECK(mOutputFormat->findString("mime", &outputMIME));
410
411    mEncoder = MediaCodec::CreateByType(
412            mCodecLooper, outputMIME.c_str(), true /* encoder */);
413
414    if (mEncoder == NULL) {
415        return NO_INIT;
416    }
417
418    ALOGV("output format is '%s'", mOutputFormat->debugString(0).c_str());
419
420    mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, mReflector);
421    mEncoder->setCallback(mEncoderActivityNotify);
422
423    status_t err = mEncoder->configure(
424                mOutputFormat,
425                NULL /* nativeWindow */,
426                NULL /* crypto */,
427                MediaCodec::CONFIGURE_FLAG_ENCODE);
428
429    if (err != OK) {
430        return err;
431    }
432
433    mEncoder->getOutputFormat(&mOutputFormat);
434    convertMessageToMetaData(mOutputFormat, mMeta);
435
436    if (mFlags & FLAG_USE_SURFACE_INPUT) {
437        CHECK(mIsVideo);
438
439        if (mGraphicBufferConsumer != NULL) {
440            // When using persistent surface, we are only interested in the
441            // consumer, but have to use PersistentSurface as a wrapper to
442            // pass consumer over messages (similar to BufferProducerWrapper)
443            err = mEncoder->setInputSurface(
444                    new PersistentSurface(NULL, mGraphicBufferConsumer));
445        } else {
446            err = mEncoder->createInputSurface(&mGraphicBufferProducer);
447        }
448
449        if (err != OK) {
450            return err;
451        }
452    }
453
454    sp<AMessage> inputFormat;
455    int32_t usingSwReadOften;
456    mSetEncoderFormat = false;
457    if (mEncoder->getInputFormat(&inputFormat) == OK
458            && inputFormat->findInt32("using-sw-read-often", &usingSwReadOften)
459            && usingSwReadOften) {
460        // this is a SW encoder; signal source to allocate SW readable buffers
461        mSetEncoderFormat = true;
462        mEncoderFormat = kDefaultSwVideoEncoderFormat;
463        mEncoderDataSpace = kDefaultSwVideoEncoderDataSpace;
464    }
465
466    err = mEncoder->start();
467
468    if (err != OK) {
469        return err;
470    }
471
472    mEncoderReachedEOS = false;
473    mErrorCode = OK;
474
475    return OK;
476}
477
478void MediaCodecSource::releaseEncoder() {
479    if (mEncoder == NULL) {
480        return;
481    }
482
483    mEncoder->release();
484    mEncoder.clear();
485
486    while (!mInputBufferQueue.empty()) {
487        MediaBuffer *mbuf = *mInputBufferQueue.begin();
488        mInputBufferQueue.erase(mInputBufferQueue.begin());
489        if (mbuf != NULL) {
490            mbuf->release();
491        }
492    }
493}
494
495status_t MediaCodecSource::postSynchronouslyAndReturnError(
496        const sp<AMessage> &msg) {
497    sp<AMessage> response;
498    status_t err = msg->postAndAwaitResponse(&response);
499
500    if (err != OK) {
501        return err;
502    }
503
504    if (!response->findInt32("err", &err)) {
505        err = OK;
506    }
507
508    return err;
509}
510
511void MediaCodecSource::signalEOS(status_t err) {
512    if (!mEncoderReachedEOS) {
513        ALOGV("encoder (%s) reached EOS", mIsVideo ? "video" : "audio");
514        {
515            Mutex::Autolock autoLock(mOutputBufferLock);
516            // release all unread media buffers
517            for (List<MediaBuffer*>::iterator it = mOutputBufferQueue.begin();
518                    it != mOutputBufferQueue.end(); it++) {
519                (*it)->release();
520            }
521            mOutputBufferQueue.clear();
522            mEncoderReachedEOS = true;
523            mErrorCode = err;
524            mOutputBufferCond.signal();
525        }
526
527        releaseEncoder();
528    }
529    if (mStopping && mEncoderReachedEOS) {
530        ALOGI("encoder (%s) stopped", mIsVideo ? "video" : "audio");
531        // posting reply to everyone that's waiting
532        List<sp<AReplyToken>>::iterator it;
533        for (it = mStopReplyIDQueue.begin();
534                it != mStopReplyIDQueue.end(); it++) {
535            (new AMessage)->postReply(*it);
536        }
537        mStopReplyIDQueue.clear();
538        mStopping = false;
539    }
540}
541
542void MediaCodecSource::suspend() {
543    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
544    if (mEncoder != NULL) {
545        sp<AMessage> params = new AMessage;
546        params->setInt32("drop-input-frames", true);
547        mEncoder->setParameters(params);
548    }
549}
550
551void MediaCodecSource::resume(int64_t skipFramesBeforeUs) {
552    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
553    if (mEncoder != NULL) {
554        sp<AMessage> params = new AMessage;
555        params->setInt32("drop-input-frames", false);
556        if (skipFramesBeforeUs > 0) {
557            params->setInt64("skip-frames-before", skipFramesBeforeUs);
558        }
559        mEncoder->setParameters(params);
560    }
561}
562
563status_t MediaCodecSource::feedEncoderInputBuffers() {
564    while (!mInputBufferQueue.empty()
565            && !mAvailEncoderInputIndices.empty()) {
566        MediaBuffer* mbuf = *mInputBufferQueue.begin();
567        mInputBufferQueue.erase(mInputBufferQueue.begin());
568
569        size_t bufferIndex = *mAvailEncoderInputIndices.begin();
570        mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
571
572        int64_t timeUs = 0ll;
573        uint32_t flags = 0;
574        size_t size = 0;
575
576        if (mbuf != NULL) {
577            CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
578            timeUs += mInputBufferTimeOffsetUs;
579
580            // push decoding time for video, or drift time for audio
581            if (mIsVideo) {
582                mDecodingTimeQueue.push_back(timeUs);
583            } else {
584#if DEBUG_DRIFT_TIME
585                if (mFirstSampleTimeUs < 0ll) {
586                    mFirstSampleTimeUs = timeUs;
587                }
588
589                int64_t driftTimeUs = 0;
590                if (mbuf->meta_data()->findInt64(kKeyDriftTime, &driftTimeUs)
591                        && driftTimeUs) {
592                    driftTimeUs = timeUs - mFirstSampleTimeUs - driftTimeUs;
593                }
594                mDriftTimeQueue.push_back(driftTimeUs);
595#endif // DEBUG_DRIFT_TIME
596            }
597
598            sp<ABuffer> inbuf;
599            status_t err = mEncoder->getInputBuffer(bufferIndex, &inbuf);
600            if (err != OK || inbuf == NULL) {
601                mbuf->release();
602                signalEOS();
603                break;
604            }
605
606            size = mbuf->size();
607
608            memcpy(inbuf->data(), mbuf->data(), size);
609
610            if (mIsVideo) {
611                // video encoder will release MediaBuffer when done
612                // with underlying data.
613                inbuf->setMediaBufferBase(mbuf);
614            } else {
615                mbuf->release();
616            }
617        } else {
618            flags = MediaCodec::BUFFER_FLAG_EOS;
619        }
620
621        status_t err = mEncoder->queueInputBuffer(
622                bufferIndex, 0, size, timeUs, flags);
623
624        if (err != OK) {
625            return err;
626        }
627    }
628
629    return OK;
630}
631
632status_t MediaCodecSource::onStart(MetaData *params) {
633    if (mStopping) {
634        ALOGE("Failed to start while we're stopping");
635        return INVALID_OPERATION;
636    }
637
638    if (mStarted) {
639        ALOGI("MediaCodecSource (%s) resuming", mIsVideo ? "video" : "audio");
640        if (mFlags & FLAG_USE_SURFACE_INPUT) {
641            resume();
642        } else {
643            CHECK(mPuller != NULL);
644            mPuller->resume();
645        }
646        return OK;
647    }
648
649    ALOGI("MediaCodecSource (%s) starting", mIsVideo ? "video" : "audio");
650
651    status_t err = OK;
652
653    if (mFlags & FLAG_USE_SURFACE_INPUT) {
654        int64_t startTimeUs;
655        if (!params || !params->findInt64(kKeyTime, &startTimeUs)) {
656            startTimeUs = -1ll;
657        }
658        resume(startTimeUs);
659    } else {
660        CHECK(mPuller != NULL);
661        sp<MetaData> meta = params;
662        if (mSetEncoderFormat) {
663            if (meta == NULL) {
664                meta = new MetaData;
665            }
666            meta->setInt32(kKeyPixelFormat, mEncoderFormat);
667            meta->setInt32(kKeyColorSpace, mEncoderDataSpace);
668        }
669
670        sp<AMessage> notify = new AMessage(kWhatPullerNotify, mReflector);
671        err = mPuller->start(meta.get(), notify);
672        if (err != OK) {
673            return err;
674        }
675    }
676
677    ALOGI("MediaCodecSource (%s) started", mIsVideo ? "video" : "audio");
678
679    mStarted = true;
680    return OK;
681}
682
683void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
684    switch (msg->what()) {
685    case kWhatPullerNotify:
686    {
687        MediaBuffer *mbuf;
688        CHECK(msg->findPointer("accessUnit", (void**)&mbuf));
689
690        if (mbuf == NULL) {
691            ALOGV("puller (%s) reached EOS",
692                    mIsVideo ? "video" : "audio");
693            signalEOS();
694        }
695
696        if (mEncoder == NULL) {
697            ALOGV("got msg '%s' after encoder shutdown.",
698                  msg->debugString().c_str());
699
700            if (mbuf != NULL) {
701                mbuf->release();
702            }
703
704            break;
705        }
706
707        mInputBufferQueue.push_back(mbuf);
708
709        feedEncoderInputBuffers();
710
711        break;
712    }
713    case kWhatEncoderActivity:
714    {
715        if (mEncoder == NULL) {
716            break;
717        }
718
719        int32_t cbID;
720        CHECK(msg->findInt32("callbackID", &cbID));
721        if (cbID == MediaCodec::CB_INPUT_AVAILABLE) {
722            int32_t index;
723            CHECK(msg->findInt32("index", &index));
724
725            mAvailEncoderInputIndices.push_back(index);
726            feedEncoderInputBuffers();
727        } else if (cbID == MediaCodec::CB_OUTPUT_AVAILABLE) {
728            int32_t index;
729            size_t offset;
730            size_t size;
731            int64_t timeUs;
732            int32_t flags;
733
734            CHECK(msg->findInt32("index", &index));
735            CHECK(msg->findSize("offset", &offset));
736            CHECK(msg->findSize("size", &size));
737            CHECK(msg->findInt64("timeUs", &timeUs));
738            CHECK(msg->findInt32("flags", &flags));
739
740            if (flags & MediaCodec::BUFFER_FLAG_EOS) {
741                mEncoder->releaseOutputBuffer(index);
742                signalEOS();
743                break;
744            }
745
746            sp<ABuffer> outbuf;
747            status_t err = mEncoder->getOutputBuffer(index, &outbuf);
748            if (err != OK || outbuf == NULL) {
749                signalEOS();
750                break;
751            }
752
753            MediaBuffer *mbuf = new MediaBuffer(outbuf->size());
754            memcpy(mbuf->data(), outbuf->data(), outbuf->size());
755
756            if (!(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
757                if (mIsVideo) {
758                    int64_t decodingTimeUs;
759                    if (mFlags & FLAG_USE_SURFACE_INPUT) {
760                        // Time offset is not applied at
761                        // feedEncoderInputBuffer() in surface input case.
762                        timeUs += mInputBufferTimeOffsetUs;
763                        // GraphicBufferSource is supposed to discard samples
764                        // queued before start, and offset timeUs by start time
765                        CHECK_GE(timeUs, 0ll);
766                        // TODO:
767                        // Decoding time for surface source is unavailable,
768                        // use presentation time for now. May need to move
769                        // this logic into MediaCodec.
770                        decodingTimeUs = timeUs;
771                    } else {
772                        CHECK(!mDecodingTimeQueue.empty());
773                        decodingTimeUs = *(mDecodingTimeQueue.begin());
774                        mDecodingTimeQueue.erase(mDecodingTimeQueue.begin());
775                    }
776                    mbuf->meta_data()->setInt64(kKeyDecodingTime, decodingTimeUs);
777
778                    ALOGV("[video] time %" PRId64 " us (%.2f secs), dts/pts diff %" PRId64,
779                            timeUs, timeUs / 1E6, decodingTimeUs - timeUs);
780                } else {
781                    int64_t driftTimeUs = 0;
782#if DEBUG_DRIFT_TIME
783                    CHECK(!mDriftTimeQueue.empty());
784                    driftTimeUs = *(mDriftTimeQueue.begin());
785                    mDriftTimeQueue.erase(mDriftTimeQueue.begin());
786                    mbuf->meta_data()->setInt64(kKeyDriftTime, driftTimeUs);
787#endif // DEBUG_DRIFT_TIME
788                    ALOGV("[audio] time %" PRId64 " us (%.2f secs), drift %" PRId64,
789                            timeUs, timeUs / 1E6, driftTimeUs);
790                }
791                mbuf->meta_data()->setInt64(kKeyTime, timeUs);
792            } else {
793                mbuf->meta_data()->setInt32(kKeyIsCodecConfig, true);
794            }
795            if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
796                mbuf->meta_data()->setInt32(kKeyIsSyncFrame, true);
797            }
798            mbuf->setObserver(this);
799            mbuf->add_ref();
800
801            {
802                Mutex::Autolock autoLock(mOutputBufferLock);
803                mOutputBufferQueue.push_back(mbuf);
804                mOutputBufferCond.signal();
805            }
806
807            mEncoder->releaseOutputBuffer(index);
808       } else if (cbID == MediaCodec::CB_ERROR) {
809            status_t err;
810            CHECK(msg->findInt32("err", &err));
811            ALOGE("Encoder (%s) reported error : 0x%x",
812                    mIsVideo ? "video" : "audio", err);
813            signalEOS();
814       }
815       break;
816    }
817    case kWhatStart:
818    {
819        sp<AReplyToken> replyID;
820        CHECK(msg->senderAwaitsResponse(&replyID));
821
822        sp<RefBase> obj;
823        CHECK(msg->findObject("meta", &obj));
824        MetaData *params = static_cast<MetaData *>(obj.get());
825
826        sp<AMessage> response = new AMessage;
827        response->setInt32("err", onStart(params));
828        response->postReply(replyID);
829        break;
830    }
831    case kWhatStop:
832    {
833        ALOGI("encoder (%s) stopping", mIsVideo ? "video" : "audio");
834
835        sp<AReplyToken> replyID;
836        CHECK(msg->senderAwaitsResponse(&replyID));
837
838        if (mEncoderReachedEOS) {
839            // if we already reached EOS, reply and return now
840            ALOGI("encoder (%s) already stopped",
841                    mIsVideo ? "video" : "audio");
842            (new AMessage)->postReply(replyID);
843            break;
844        }
845
846        mStopReplyIDQueue.push_back(replyID);
847        if (mStopping) {
848            // nothing to do if we're already stopping, reply will be posted
849            // to all when we're stopped.
850            break;
851        }
852
853        mStopping = true;
854
855        // if using surface, signal source EOS and wait for EOS to come back.
856        // otherwise, release encoder and post EOS if haven't done already
857        if (mFlags & FLAG_USE_SURFACE_INPUT) {
858            mEncoder->signalEndOfInputStream();
859        } else {
860            signalEOS();
861        }
862        break;
863    }
864    case kWhatPause:
865    {
866        if (mFlags & FLAG_USE_SURFACE_INPUT) {
867            suspend();
868        } else {
869            CHECK(mPuller != NULL);
870            mPuller->pause();
871        }
872        break;
873    }
874    case kWhatSetInputBufferTimeOffset:
875    {
876        sp<AReplyToken> replyID;
877        CHECK(msg->senderAwaitsResponse(&replyID));
878
879        CHECK(msg->findInt64("time-offset-us", &mInputBufferTimeOffsetUs));
880
881        sp<AMessage> response = new AMessage;
882        response->postReply(replyID);
883        break;
884    }
885    default:
886        TRESPASS();
887    }
888}
889
890} // namespace android
891