MediaCodecSource.cpp revision 6a3a56fbcd6c01c3895f14e43858971b0edca9b2
1/*
2 * Copyright 2014, The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *     http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "MediaCodecSource"
19#define DEBUG_DRIFT_TIME 0
20
21#include <inttypes.h>
22
23#include <gui/IGraphicBufferConsumer.h>
24#include <gui/IGraphicBufferProducer.h>
25#include <gui/Surface.h>
26#include <media/ICrypto.h>
27#include <media/stagefright/foundation/ABuffer.h>
28#include <media/stagefright/foundation/ADebug.h>
29#include <media/stagefright/foundation/ALooper.h>
30#include <media/stagefright/foundation/AMessage.h>
31#include <media/stagefright/MediaBuffer.h>
32#include <media/stagefright/MediaCodec.h>
33#include <media/stagefright/MediaCodecList.h>
34#include <media/stagefright/MediaCodecSource.h>
35#include <media/stagefright/MediaErrors.h>
36#include <media/stagefright/MediaSource.h>
37#include <media/stagefright/MetaData.h>
38#include <media/stagefright/PersistentSurface.h>
39#include <media/stagefright/Utils.h>
40
41namespace android {
42
43const int kDefaultSwVideoEncoderFormat = HAL_PIXEL_FORMAT_YCbCr_420_888;
44const int kDefaultSwVideoEncoderDataSpace = HAL_DATASPACE_BT709;
45
46const int kStopTimeoutUs = 300000; // allow 1 sec for shutting down encoder
47
48struct MediaCodecSource::Puller : public AHandler {
49    Puller(const sp<MediaSource> &source);
50
51    status_t start(const sp<MetaData> &meta, const sp<AMessage> &notify);
52    void stop();
53    void stopSource();
54    void pause();
55    void resume();
56
57    bool readBuffer(MediaBuffer **buffer);
58
59protected:
60    virtual void onMessageReceived(const sp<AMessage> &msg);
61    virtual ~Puller();
62
63private:
64    enum {
65        kWhatStart = 'msta',
66        kWhatStop,
67        kWhatPull,
68    };
69
70    sp<MediaSource> mSource;
71    sp<AMessage> mNotify;
72    sp<ALooper> mLooper;
73    bool mIsAudio;
74
75    struct Queue {
76        Queue()
77            : mReadPendingSince(0),
78              mPaused(false),
79              mPulling(false) { }
80        int64_t mReadPendingSince;
81        bool mPaused;
82        bool mPulling;
83        Vector<MediaBuffer *> mReadBuffers;
84
85        void flush();
86        // if queue is empty, return false and set *|buffer| to NULL . Otherwise, pop
87        // buffer from front of the queue, place it into *|buffer| and return true.
88        bool readBuffer(MediaBuffer **buffer);
89        // add a buffer to the back of the queue
90        void pushBuffer(MediaBuffer *mbuf);
91    };
92    Mutexed<Queue> mQueue;
93
94    status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg);
95    void schedulePull();
96    void handleEOS();
97
98    DISALLOW_EVIL_CONSTRUCTORS(Puller);
99};
100
101MediaCodecSource::Puller::Puller(const sp<MediaSource> &source)
102    : mSource(source),
103      mLooper(new ALooper()),
104      mIsAudio(false)
105{
106    sp<MetaData> meta = source->getFormat();
107    const char *mime;
108    CHECK(meta->findCString(kKeyMIMEType, &mime));
109
110    mIsAudio = !strncasecmp(mime, "audio/", 6);
111
112    mLooper->setName("pull_looper");
113}
114
115MediaCodecSource::Puller::~Puller() {
116    mLooper->unregisterHandler(id());
117    mLooper->stop();
118}
119
120void MediaCodecSource::Puller::Queue::pushBuffer(MediaBuffer *mbuf) {
121    mReadBuffers.push_back(mbuf);
122}
123
124bool MediaCodecSource::Puller::Queue::readBuffer(MediaBuffer **mbuf) {
125    if (mReadBuffers.empty()) {
126        *mbuf = NULL;
127        return false;
128    }
129    *mbuf = *mReadBuffers.begin();
130    mReadBuffers.erase(mReadBuffers.begin());
131    return true;
132}
133
134void MediaCodecSource::Puller::Queue::flush() {
135    MediaBuffer *mbuf;
136    while (readBuffer(&mbuf)) {
137        // there are no null buffers in the queue
138        mbuf->release();
139    }
140}
141
142bool MediaCodecSource::Puller::readBuffer(MediaBuffer **mbuf) {
143    Mutexed<Queue>::Locked queue(mQueue);
144    return queue->readBuffer(mbuf);
145}
146
147status_t MediaCodecSource::Puller::postSynchronouslyAndReturnError(
148        const sp<AMessage> &msg) {
149    sp<AMessage> response;
150    status_t err = msg->postAndAwaitResponse(&response);
151
152    if (err != OK) {
153        return err;
154    }
155
156    if (!response->findInt32("err", &err)) {
157        err = OK;
158    }
159
160    return err;
161}
162
163status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta, const sp<AMessage> &notify) {
164    ALOGV("puller (%s) start", mIsAudio ? "audio" : "video");
165    mLooper->start(
166            false /* runOnCallingThread */,
167            false /* canCallJava */,
168            PRIORITY_AUDIO);
169    mLooper->registerHandler(this);
170    mNotify = notify;
171
172    sp<AMessage> msg = new AMessage(kWhatStart, this);
173    msg->setObject("meta", meta);
174    return postSynchronouslyAndReturnError(msg);
175}
176
177void MediaCodecSource::Puller::stop() {
178    bool interrupt = false;
179    {
180        // mark stopping before actually reaching kWhatStop on the looper, so the pulling will
181        // stop.
182        Mutexed<Queue>::Locked queue(mQueue);
183        queue->mPulling = false;
184        interrupt = queue->mReadPendingSince && (queue->mReadPendingSince < ALooper::GetNowUs() - 1000000);
185        queue->flush(); // flush any unprocessed pulled buffers
186    }
187
188    if (interrupt) {
189        // call source->stop if read has been pending for over a second
190        // TODO: we should really call this if kWhatStop has not returned for more than a second.
191        mSource->stop();
192    }
193}
194
195void MediaCodecSource::Puller::stopSource() {
196    (new AMessage(kWhatStop, this))->post();
197}
198
199void MediaCodecSource::Puller::pause() {
200    Mutexed<Queue>::Locked queue(mQueue);
201    queue->mPaused = true;
202}
203
204void MediaCodecSource::Puller::resume() {
205    Mutexed<Queue>::Locked queue(mQueue);
206    queue->mPaused = false;
207}
208
209void MediaCodecSource::Puller::schedulePull() {
210    (new AMessage(kWhatPull, this))->post();
211}
212
213void MediaCodecSource::Puller::handleEOS() {
214    ALOGV("puller (%s) posting EOS", mIsAudio ? "audio" : "video");
215    sp<AMessage> msg = mNotify->dup();
216    msg->setInt32("eos", 1);
217    msg->post();
218}
219
220void MediaCodecSource::Puller::onMessageReceived(const sp<AMessage> &msg) {
221    switch (msg->what()) {
222        case kWhatStart:
223        {
224            sp<RefBase> obj;
225            CHECK(msg->findObject("meta", &obj));
226
227            {
228                Mutexed<Queue>::Locked queue(mQueue);
229                queue->mPulling = true;
230            }
231
232            status_t err = mSource->start(static_cast<MetaData *>(obj.get()));
233
234            if (err == OK) {
235                schedulePull();
236            }
237
238            sp<AMessage> response = new AMessage;
239            response->setInt32("err", err);
240
241            sp<AReplyToken> replyID;
242            CHECK(msg->senderAwaitsResponse(&replyID));
243            response->postReply(replyID);
244            break;
245        }
246
247        case kWhatStop:
248        {
249            mSource->stop();
250            break;
251        }
252
253        case kWhatPull:
254        {
255            Mutexed<Queue>::Locked queue(mQueue);
256            queue->mReadPendingSince = ALooper::GetNowUs();
257            if (!queue->mPulling) {
258                handleEOS();
259                break;
260            }
261
262            queue.unlock();
263            MediaBuffer *mbuf = NULL;
264            status_t err = mSource->read(&mbuf);
265            queue.lock();
266
267            queue->mReadPendingSince = 0;
268            // if we need to discard buffer
269            if (!queue->mPulling || queue->mPaused || err != OK) {
270                if (mbuf != NULL) {
271                    mbuf->release();
272                    mbuf = NULL;
273                }
274                if (queue->mPulling && err == OK) {
275                    msg->post(); // if simply paused, keep pulling source
276                } else if (err == ERROR_END_OF_STREAM) {
277                    ALOGV("stream ended, mbuf %p", mbuf);
278                } else if (err != OK) {
279                    ALOGE("error %d reading stream.", err);
280                }
281            }
282
283            if (mbuf != NULL) {
284                queue->pushBuffer(mbuf);
285            }
286
287            queue.unlock();
288
289            if (mbuf != NULL) {
290                mNotify->post();
291                msg->post();
292            } else {
293                handleEOS();
294            }
295            break;
296        }
297
298        default:
299            TRESPASS();
300    }
301}
302
303MediaCodecSource::Output::Output()
304    : mEncoderReachedEOS(false),
305      mErrorCode(OK) {
306}
307
308// static
309sp<MediaCodecSource> MediaCodecSource::Create(
310        const sp<ALooper> &looper,
311        const sp<AMessage> &format,
312        const sp<MediaSource> &source,
313        const sp<IGraphicBufferConsumer> &consumer,
314        uint32_t flags) {
315    sp<MediaCodecSource> mediaSource =
316            new MediaCodecSource(looper, format, source, consumer, flags);
317
318    if (mediaSource->init() == OK) {
319        return mediaSource;
320    }
321    return NULL;
322}
323
324void MediaCodecSource::setInputBufferTimeOffset(int64_t timeOffsetUs) {
325    sp<AMessage> msg = new AMessage(kWhatSetInputBufferTimeOffset, mReflector);
326    msg->setInt64("time-offset-us", timeOffsetUs);
327    postSynchronouslyAndReturnError(msg);
328}
329
330status_t MediaCodecSource::start(MetaData* params) {
331    sp<AMessage> msg = new AMessage(kWhatStart, mReflector);
332    msg->setObject("meta", params);
333    return postSynchronouslyAndReturnError(msg);
334}
335
336status_t MediaCodecSource::stop() {
337    sp<AMessage> msg = new AMessage(kWhatStop, mReflector);
338    return postSynchronouslyAndReturnError(msg);
339}
340
341status_t MediaCodecSource::pause() {
342    (new AMessage(kWhatPause, mReflector))->post();
343    return OK;
344}
345
346sp<IGraphicBufferProducer> MediaCodecSource::getGraphicBufferProducer() {
347    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
348    return mGraphicBufferProducer;
349}
350
351status_t MediaCodecSource::read(
352        MediaBuffer** buffer, const ReadOptions* /* options */) {
353    Mutexed<Output>::Locked output(mOutput);
354
355    *buffer = NULL;
356    while (output->mBufferQueue.size() == 0 && !output->mEncoderReachedEOS) {
357        output.waitForCondition(output->mCond);
358    }
359    if (!output->mEncoderReachedEOS) {
360        *buffer = *output->mBufferQueue.begin();
361        output->mBufferQueue.erase(output->mBufferQueue.begin());
362        return OK;
363    }
364    return output->mErrorCode;
365}
366
367void MediaCodecSource::signalBufferReturned(MediaBuffer *buffer) {
368    buffer->setObserver(0);
369    buffer->release();
370}
371
372MediaCodecSource::MediaCodecSource(
373        const sp<ALooper> &looper,
374        const sp<AMessage> &outputFormat,
375        const sp<MediaSource> &source,
376        const sp<IGraphicBufferConsumer> &consumer,
377        uint32_t flags)
378    : mLooper(looper),
379      mOutputFormat(outputFormat),
380      mMeta(new MetaData),
381      mFlags(flags),
382      mIsVideo(false),
383      mStarted(false),
384      mStopping(false),
385      mDoMoreWorkPending(false),
386      mSetEncoderFormat(false),
387      mEncoderFormat(0),
388      mEncoderDataSpace(0),
389      mGraphicBufferConsumer(consumer),
390      mInputBufferTimeOffsetUs(0),
391      mFirstSampleTimeUs(-1ll),
392      mGeneration(0) {
393    CHECK(mLooper != NULL);
394
395    AString mime;
396    CHECK(mOutputFormat->findString("mime", &mime));
397
398    if (!strncasecmp("video/", mime.c_str(), 6)) {
399        mIsVideo = true;
400    }
401
402    if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
403        mPuller = new Puller(source);
404    }
405}
406
407MediaCodecSource::~MediaCodecSource() {
408    releaseEncoder();
409
410    mCodecLooper->stop();
411    mLooper->unregisterHandler(mReflector->id());
412}
413
414status_t MediaCodecSource::init() {
415    status_t err = initEncoder();
416
417    if (err != OK) {
418        releaseEncoder();
419    }
420
421    return err;
422}
423
424status_t MediaCodecSource::initEncoder() {
425    mReflector = new AHandlerReflector<MediaCodecSource>(this);
426    mLooper->registerHandler(mReflector);
427
428    mCodecLooper = new ALooper;
429    mCodecLooper->setName("codec_looper");
430    mCodecLooper->start();
431
432    if (mFlags & FLAG_USE_METADATA_INPUT) {
433        mOutputFormat->setInt32("store-metadata-in-buffers", 1);
434    }
435
436    if (mFlags & FLAG_USE_SURFACE_INPUT) {
437        mOutputFormat->setInt32("create-input-buffers-suspended", 1);
438    }
439
440    AString outputMIME;
441    CHECK(mOutputFormat->findString("mime", &outputMIME));
442
443    Vector<AString> matchingCodecs;
444    MediaCodecList::findMatchingCodecs(
445            outputMIME.c_str(), true /* encoder */,
446            ((mFlags & FLAG_PREFER_SOFTWARE_CODEC) ? MediaCodecList::kPreferSoftwareCodecs : 0),
447            &matchingCodecs);
448
449    status_t err = NO_INIT;
450    for (size_t ix = 0; ix < matchingCodecs.size(); ++ix) {
451        mEncoder = MediaCodec::CreateByComponentName(
452                mCodecLooper, matchingCodecs[ix]);
453
454        if (mEncoder == NULL) {
455            continue;
456        }
457
458        ALOGV("output format is '%s'", mOutputFormat->debugString(0).c_str());
459
460        mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, mReflector);
461        mEncoder->setCallback(mEncoderActivityNotify);
462
463        err = mEncoder->configure(
464                    mOutputFormat,
465                    NULL /* nativeWindow */,
466                    NULL /* crypto */,
467                    MediaCodec::CONFIGURE_FLAG_ENCODE);
468
469        if (err == OK) {
470            break;
471        }
472        mEncoder->release();
473        mEncoder = NULL;
474    }
475
476    if (err != OK) {
477        return err;
478    }
479
480    mEncoder->getOutputFormat(&mOutputFormat);
481    convertMessageToMetaData(mOutputFormat, mMeta);
482
483    if (mFlags & FLAG_USE_SURFACE_INPUT) {
484        CHECK(mIsVideo);
485
486        if (mGraphicBufferConsumer != NULL) {
487            // When using persistent surface, we are only interested in the
488            // consumer, but have to use PersistentSurface as a wrapper to
489            // pass consumer over messages (similar to BufferProducerWrapper)
490            err = mEncoder->setInputSurface(
491                    new PersistentSurface(NULL, mGraphicBufferConsumer));
492        } else {
493            err = mEncoder->createInputSurface(&mGraphicBufferProducer);
494        }
495
496        if (err != OK) {
497            return err;
498        }
499    }
500
501    sp<AMessage> inputFormat;
502    int32_t usingSwReadOften;
503    mSetEncoderFormat = false;
504    if (mEncoder->getInputFormat(&inputFormat) == OK
505            && inputFormat->findInt32("using-sw-read-often", &usingSwReadOften)
506            && usingSwReadOften) {
507        // this is a SW encoder; signal source to allocate SW readable buffers
508        mSetEncoderFormat = true;
509        mEncoderFormat = kDefaultSwVideoEncoderFormat;
510        mEncoderDataSpace = kDefaultSwVideoEncoderDataSpace;
511    }
512
513    err = mEncoder->start();
514
515    if (err != OK) {
516        return err;
517    }
518
519    {
520        Mutexed<Output>::Locked output(mOutput);
521        output->mEncoderReachedEOS = false;
522        output->mErrorCode = OK;
523    }
524
525    return OK;
526}
527
528void MediaCodecSource::releaseEncoder() {
529    if (mEncoder == NULL) {
530        return;
531    }
532
533    mEncoder->release();
534    mEncoder.clear();
535}
536
537status_t MediaCodecSource::postSynchronouslyAndReturnError(
538        const sp<AMessage> &msg) {
539    sp<AMessage> response;
540    status_t err = msg->postAndAwaitResponse(&response);
541
542    if (err != OK) {
543        return err;
544    }
545
546    if (!response->findInt32("err", &err)) {
547        err = OK;
548    }
549
550    return err;
551}
552
553void MediaCodecSource::signalEOS(status_t err) {
554    bool reachedEOS = false;
555    {
556        Mutexed<Output>::Locked output(mOutput);
557        reachedEOS = output->mEncoderReachedEOS;
558        if (!reachedEOS) {
559            ALOGV("encoder (%s) reached EOS", mIsVideo ? "video" : "audio");
560            // release all unread media buffers
561            for (List<MediaBuffer*>::iterator it = output->mBufferQueue.begin();
562                    it != output->mBufferQueue.end(); it++) {
563                (*it)->release();
564            }
565            output->mBufferQueue.clear();
566            output->mEncoderReachedEOS = true;
567            output->mErrorCode = err;
568            output->mCond.signal();
569
570            reachedEOS = true;
571            output.unlock();
572            releaseEncoder();
573        }
574    }
575
576    if (mStopping && reachedEOS) {
577        ALOGI("encoder (%s) stopped", mIsVideo ? "video" : "audio");
578        mPuller->stopSource();
579        ALOGV("source (%s) stopped", mIsVideo ? "video" : "audio");
580        // posting reply to everyone that's waiting
581        List<sp<AReplyToken>>::iterator it;
582        for (it = mStopReplyIDQueue.begin();
583                it != mStopReplyIDQueue.end(); it++) {
584            (new AMessage)->postReply(*it);
585        }
586        mStopReplyIDQueue.clear();
587        mStopping = false;
588        ++mGeneration;
589    }
590}
591
592void MediaCodecSource::suspend() {
593    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
594    if (mEncoder != NULL) {
595        sp<AMessage> params = new AMessage;
596        params->setInt32("drop-input-frames", true);
597        mEncoder->setParameters(params);
598    }
599}
600
601void MediaCodecSource::resume(int64_t skipFramesBeforeUs) {
602    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
603    if (mEncoder != NULL) {
604        sp<AMessage> params = new AMessage;
605        params->setInt32("drop-input-frames", false);
606        if (skipFramesBeforeUs > 0) {
607            params->setInt64("skip-frames-before", skipFramesBeforeUs);
608        }
609        mEncoder->setParameters(params);
610    }
611}
612
613status_t MediaCodecSource::feedEncoderInputBuffers() {
614    MediaBuffer* mbuf = NULL;
615    while (!mAvailEncoderInputIndices.empty() && mPuller->readBuffer(&mbuf)) {
616        size_t bufferIndex = *mAvailEncoderInputIndices.begin();
617        mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
618
619        int64_t timeUs = 0ll;
620        uint32_t flags = 0;
621        size_t size = 0;
622
623        if (mbuf != NULL) {
624            CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
625            timeUs += mInputBufferTimeOffsetUs;
626
627            // push decoding time for video, or drift time for audio
628            if (mIsVideo) {
629                mDecodingTimeQueue.push_back(timeUs);
630            } else {
631#if DEBUG_DRIFT_TIME
632                if (mFirstSampleTimeUs < 0ll) {
633                    mFirstSampleTimeUs = timeUs;
634                }
635
636                int64_t driftTimeUs = 0;
637                if (mbuf->meta_data()->findInt64(kKeyDriftTime, &driftTimeUs)
638                        && driftTimeUs) {
639                    driftTimeUs = timeUs - mFirstSampleTimeUs - driftTimeUs;
640                }
641                mDriftTimeQueue.push_back(driftTimeUs);
642#endif // DEBUG_DRIFT_TIME
643            }
644
645            sp<ABuffer> inbuf;
646            status_t err = mEncoder->getInputBuffer(bufferIndex, &inbuf);
647            if (err != OK || inbuf == NULL) {
648                mbuf->release();
649                signalEOS();
650                break;
651            }
652
653            size = mbuf->size();
654
655            memcpy(inbuf->data(), mbuf->data(), size);
656
657            if (mIsVideo) {
658                // video encoder will release MediaBuffer when done
659                // with underlying data.
660                inbuf->setMediaBufferBase(mbuf);
661            } else {
662                mbuf->release();
663            }
664        } else {
665            flags = MediaCodec::BUFFER_FLAG_EOS;
666        }
667
668        status_t err = mEncoder->queueInputBuffer(
669                bufferIndex, 0, size, timeUs, flags);
670
671        if (err != OK) {
672            return err;
673        }
674    }
675
676    return OK;
677}
678
679status_t MediaCodecSource::onStart(MetaData *params) {
680    if (mStopping) {
681        ALOGE("Failed to start while we're stopping");
682        return INVALID_OPERATION;
683    }
684
685    if (mStarted) {
686        ALOGI("MediaCodecSource (%s) resuming", mIsVideo ? "video" : "audio");
687        if (mFlags & FLAG_USE_SURFACE_INPUT) {
688            resume();
689        } else {
690            CHECK(mPuller != NULL);
691            mPuller->resume();
692        }
693        return OK;
694    }
695
696    ALOGI("MediaCodecSource (%s) starting", mIsVideo ? "video" : "audio");
697
698    status_t err = OK;
699
700    if (mFlags & FLAG_USE_SURFACE_INPUT) {
701        int64_t startTimeUs;
702        if (!params || !params->findInt64(kKeyTime, &startTimeUs)) {
703            startTimeUs = -1ll;
704        }
705        resume(startTimeUs);
706    } else {
707        CHECK(mPuller != NULL);
708        sp<MetaData> meta = params;
709        if (mSetEncoderFormat) {
710            if (meta == NULL) {
711                meta = new MetaData;
712            }
713            meta->setInt32(kKeyPixelFormat, mEncoderFormat);
714            meta->setInt32(kKeyColorSpace, mEncoderDataSpace);
715        }
716
717        sp<AMessage> notify = new AMessage(kWhatPullerNotify, mReflector);
718        err = mPuller->start(meta.get(), notify);
719        if (err != OK) {
720            return err;
721        }
722    }
723
724    ALOGI("MediaCodecSource (%s) started", mIsVideo ? "video" : "audio");
725
726    mStarted = true;
727    return OK;
728}
729
730void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
731    switch (msg->what()) {
732    case kWhatPullerNotify:
733    {
734        int32_t eos = 0;
735        if (msg->findInt32("eos", &eos) && eos) {
736            ALOGV("puller (%s) reached EOS", mIsVideo ? "video" : "audio");
737            signalEOS();
738            break;
739        }
740
741        if (mEncoder == NULL) {
742            ALOGV("got msg '%s' after encoder shutdown.", msg->debugString().c_str());
743            break;
744        }
745
746        feedEncoderInputBuffers();
747        break;
748    }
749    case kWhatEncoderActivity:
750    {
751        if (mEncoder == NULL) {
752            break;
753        }
754
755        int32_t cbID;
756        CHECK(msg->findInt32("callbackID", &cbID));
757        if (cbID == MediaCodec::CB_INPUT_AVAILABLE) {
758            int32_t index;
759            CHECK(msg->findInt32("index", &index));
760
761            mAvailEncoderInputIndices.push_back(index);
762            feedEncoderInputBuffers();
763        } else if (cbID == MediaCodec::CB_OUTPUT_AVAILABLE) {
764            int32_t index;
765            size_t offset;
766            size_t size;
767            int64_t timeUs;
768            int32_t flags;
769
770            CHECK(msg->findInt32("index", &index));
771            CHECK(msg->findSize("offset", &offset));
772            CHECK(msg->findSize("size", &size));
773            CHECK(msg->findInt64("timeUs", &timeUs));
774            CHECK(msg->findInt32("flags", &flags));
775
776            if (flags & MediaCodec::BUFFER_FLAG_EOS) {
777                mEncoder->releaseOutputBuffer(index);
778                signalEOS();
779                break;
780            }
781
782            sp<ABuffer> outbuf;
783            status_t err = mEncoder->getOutputBuffer(index, &outbuf);
784            if (err != OK || outbuf == NULL) {
785                signalEOS();
786                break;
787            }
788
789            MediaBuffer *mbuf = new MediaBuffer(outbuf->size());
790            memcpy(mbuf->data(), outbuf->data(), outbuf->size());
791
792            if (!(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
793                if (mIsVideo) {
794                    int64_t decodingTimeUs;
795                    if (mFlags & FLAG_USE_SURFACE_INPUT) {
796                        // Time offset is not applied at
797                        // feedEncoderInputBuffer() in surface input case.
798                        timeUs += mInputBufferTimeOffsetUs;
799                        // GraphicBufferSource is supposed to discard samples
800                        // queued before start, and offset timeUs by start time
801                        CHECK_GE(timeUs, 0ll);
802                        // TODO:
803                        // Decoding time for surface source is unavailable,
804                        // use presentation time for now. May need to move
805                        // this logic into MediaCodec.
806                        decodingTimeUs = timeUs;
807                    } else {
808                        CHECK(!mDecodingTimeQueue.empty());
809                        decodingTimeUs = *(mDecodingTimeQueue.begin());
810                        mDecodingTimeQueue.erase(mDecodingTimeQueue.begin());
811                    }
812                    mbuf->meta_data()->setInt64(kKeyDecodingTime, decodingTimeUs);
813
814                    ALOGV("[video] time %" PRId64 " us (%.2f secs), dts/pts diff %" PRId64,
815                            timeUs, timeUs / 1E6, decodingTimeUs - timeUs);
816                } else {
817                    int64_t driftTimeUs = 0;
818#if DEBUG_DRIFT_TIME
819                    CHECK(!mDriftTimeQueue.empty());
820                    driftTimeUs = *(mDriftTimeQueue.begin());
821                    mDriftTimeQueue.erase(mDriftTimeQueue.begin());
822                    mbuf->meta_data()->setInt64(kKeyDriftTime, driftTimeUs);
823#endif // DEBUG_DRIFT_TIME
824                    ALOGV("[audio] time %" PRId64 " us (%.2f secs), drift %" PRId64,
825                            timeUs, timeUs / 1E6, driftTimeUs);
826                }
827                mbuf->meta_data()->setInt64(kKeyTime, timeUs);
828            } else {
829                mbuf->meta_data()->setInt32(kKeyIsCodecConfig, true);
830            }
831            if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
832                mbuf->meta_data()->setInt32(kKeyIsSyncFrame, true);
833            }
834            mbuf->setObserver(this);
835            mbuf->add_ref();
836
837            {
838                Mutexed<Output>::Locked output(mOutput);
839                output->mBufferQueue.push_back(mbuf);
840                output->mCond.signal();
841            }
842
843            mEncoder->releaseOutputBuffer(index);
844       } else if (cbID == MediaCodec::CB_ERROR) {
845            status_t err;
846            CHECK(msg->findInt32("err", &err));
847            ALOGE("Encoder (%s) reported error : 0x%x",
848                    mIsVideo ? "video" : "audio", err);
849            signalEOS();
850       }
851       break;
852    }
853    case kWhatStart:
854    {
855        sp<AReplyToken> replyID;
856        CHECK(msg->senderAwaitsResponse(&replyID));
857
858        sp<RefBase> obj;
859        CHECK(msg->findObject("meta", &obj));
860        MetaData *params = static_cast<MetaData *>(obj.get());
861
862        sp<AMessage> response = new AMessage;
863        response->setInt32("err", onStart(params));
864        response->postReply(replyID);
865        break;
866    }
867    case kWhatStop:
868    {
869        ALOGI("encoder (%s) stopping", mIsVideo ? "video" : "audio");
870
871        sp<AReplyToken> replyID;
872        CHECK(msg->senderAwaitsResponse(&replyID));
873
874        if (mOutput.lock()->mEncoderReachedEOS) {
875            // if we already reached EOS, reply and return now
876            ALOGI("encoder (%s) already stopped",
877                    mIsVideo ? "video" : "audio");
878            (new AMessage)->postReply(replyID);
879            break;
880        }
881
882        mStopReplyIDQueue.push_back(replyID);
883        if (mStopping) {
884            // nothing to do if we're already stopping, reply will be posted
885            // to all when we're stopped.
886            break;
887        }
888
889        mStopping = true;
890
891        // if using surface, signal source EOS and wait for EOS to come back.
892        // otherwise, stop puller (which also clears the input buffer queue)
893        // and wait for the EOS message. We cannot call source->stop() because
894        // the encoder may still be processing input buffers.
895        if (mFlags & FLAG_USE_SURFACE_INPUT) {
896            mEncoder->signalEndOfInputStream();
897        } else {
898            mPuller->stop();
899        }
900
901        // complete stop even if encoder/puller stalled
902        sp<AMessage> timeoutMsg = new AMessage(kWhatStopStalled, mReflector);
903        timeoutMsg->setInt32("generation", mGeneration);
904        timeoutMsg->post(kStopTimeoutUs);
905        break;
906    }
907
908    case kWhatStopStalled:
909    {
910        int32_t generation;
911        CHECK(msg->findInt32("generation", &generation));
912        if (generation != mGeneration) {
913             break;
914        }
915
916        if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
917            ALOGV("source (%s) stopping", mIsVideo ? "video" : "audio");
918            mPuller->stopSource();
919            ALOGV("source (%s) stopped", mIsVideo ? "video" : "audio");
920        }
921        signalEOS();
922    }
923
924    case kWhatPause:
925    {
926        if (mFlags & FLAG_USE_SURFACE_INPUT) {
927            suspend();
928        } else {
929            CHECK(mPuller != NULL);
930            mPuller->pause();
931        }
932        break;
933    }
934    case kWhatSetInputBufferTimeOffset:
935    {
936        sp<AReplyToken> replyID;
937        CHECK(msg->senderAwaitsResponse(&replyID));
938
939        CHECK(msg->findInt64("time-offset-us", &mInputBufferTimeOffsetUs));
940
941        sp<AMessage> response = new AMessage;
942        response->postReply(replyID);
943        break;
944    }
945    default:
946        TRESPASS();
947    }
948}
949
950} // namespace android
951