MediaCodecSource.cpp revision a86b65e69f6a76cabab37ef480aaa86dca38b3ea
1/*
2 * Copyright 2014, The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *     http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "MediaCodecSource"
19#define DEBUG_DRIFT_TIME 0
20
21#include <inttypes.h>
22
23#include <gui/IGraphicBufferConsumer.h>
24#include <gui/IGraphicBufferProducer.h>
25#include <gui/Surface.h>
26#include <media/ICrypto.h>
27#include <media/MediaCodecBuffer.h>
28#include <media/stagefright/foundation/ABuffer.h>
29#include <media/stagefright/foundation/ADebug.h>
30#include <media/stagefright/foundation/ALooper.h>
31#include <media/stagefright/foundation/AMessage.h>
32#include <media/stagefright/MediaBuffer.h>
33#include <media/stagefright/MediaCodec.h>
34#include <media/stagefright/MediaCodecList.h>
35#include <media/stagefright/MediaCodecSource.h>
36#include <media/stagefright/MediaErrors.h>
37#include <media/stagefright/MediaSource.h>
38#include <media/stagefright/MetaData.h>
39#include <media/stagefright/PersistentSurface.h>
40#include <media/stagefright/Utils.h>
41
42namespace android {
43
44const int32_t kDefaultSwVideoEncoderFormat = HAL_PIXEL_FORMAT_YCbCr_420_888;
45const int32_t kDefaultHwVideoEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
46const int32_t kDefaultVideoEncoderDataSpace = HAL_DATASPACE_V0_BT709;
47
48const int kStopTimeoutUs = 300000; // allow 1 sec for shutting down encoder
49
50struct MediaCodecSource::Puller : public AHandler {
51    explicit Puller(const sp<MediaSource> &source);
52
53    void interruptSource();
54    status_t start(const sp<MetaData> &meta, const sp<AMessage> &notify);
55    void stop();
56    void stopSource();
57    void pause();
58    void resume();
59
60    bool readBuffer(MediaBuffer **buffer);
61
62protected:
63    virtual void onMessageReceived(const sp<AMessage> &msg);
64    virtual ~Puller();
65
66private:
67    enum {
68        kWhatStart = 'msta',
69        kWhatStop,
70        kWhatPull,
71    };
72
73    sp<MediaSource> mSource;
74    sp<AMessage> mNotify;
75    sp<ALooper> mLooper;
76    bool mIsAudio;
77
78    struct Queue {
79        Queue()
80            : mReadPendingSince(0),
81              mPaused(false),
82              mPulling(false) { }
83        int64_t mReadPendingSince;
84        bool mPaused;
85        bool mPulling;
86        Vector<MediaBuffer *> mReadBuffers;
87
88        void flush();
89        // if queue is empty, return false and set *|buffer| to NULL . Otherwise, pop
90        // buffer from front of the queue, place it into *|buffer| and return true.
91        bool readBuffer(MediaBuffer **buffer);
92        // add a buffer to the back of the queue
93        void pushBuffer(MediaBuffer *mbuf);
94    };
95    Mutexed<Queue> mQueue;
96
97    status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg);
98    void schedulePull();
99    void handleEOS();
100
101    DISALLOW_EVIL_CONSTRUCTORS(Puller);
102};
103
104MediaCodecSource::Puller::Puller(const sp<MediaSource> &source)
105    : mSource(source),
106      mLooper(new ALooper()),
107      mIsAudio(false)
108{
109    sp<MetaData> meta = source->getFormat();
110    const char *mime;
111    CHECK(meta->findCString(kKeyMIMEType, &mime));
112
113    mIsAudio = !strncasecmp(mime, "audio/", 6);
114
115    mLooper->setName("pull_looper");
116}
117
118MediaCodecSource::Puller::~Puller() {
119    mLooper->unregisterHandler(id());
120    mLooper->stop();
121}
122
123void MediaCodecSource::Puller::Queue::pushBuffer(MediaBuffer *mbuf) {
124    mReadBuffers.push_back(mbuf);
125}
126
127bool MediaCodecSource::Puller::Queue::readBuffer(MediaBuffer **mbuf) {
128    if (mReadBuffers.empty()) {
129        *mbuf = NULL;
130        return false;
131    }
132    *mbuf = *mReadBuffers.begin();
133    mReadBuffers.erase(mReadBuffers.begin());
134    return true;
135}
136
137void MediaCodecSource::Puller::Queue::flush() {
138    MediaBuffer *mbuf;
139    while (readBuffer(&mbuf)) {
140        // there are no null buffers in the queue
141        mbuf->release();
142    }
143}
144
145bool MediaCodecSource::Puller::readBuffer(MediaBuffer **mbuf) {
146    Mutexed<Queue>::Locked queue(mQueue);
147    return queue->readBuffer(mbuf);
148}
149
150status_t MediaCodecSource::Puller::postSynchronouslyAndReturnError(
151        const sp<AMessage> &msg) {
152    sp<AMessage> response;
153    status_t err = msg->postAndAwaitResponse(&response);
154
155    if (err != OK) {
156        return err;
157    }
158
159    if (!response->findInt32("err", &err)) {
160        err = OK;
161    }
162
163    return err;
164}
165
166status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta, const sp<AMessage> &notify) {
167    ALOGV("puller (%s) start", mIsAudio ? "audio" : "video");
168    mLooper->start(
169            false /* runOnCallingThread */,
170            false /* canCallJava */,
171            PRIORITY_AUDIO);
172    mLooper->registerHandler(this);
173    mNotify = notify;
174
175    sp<AMessage> msg = new AMessage(kWhatStart, this);
176    msg->setObject("meta", meta);
177    return postSynchronouslyAndReturnError(msg);
178}
179
180void MediaCodecSource::Puller::stop() {
181    bool interrupt = false;
182    {
183        // mark stopping before actually reaching kWhatStop on the looper, so the pulling will
184        // stop.
185        Mutexed<Queue>::Locked queue(mQueue);
186        queue->mPulling = false;
187        interrupt = queue->mReadPendingSince && (queue->mReadPendingSince < ALooper::GetNowUs() - 1000000);
188        queue->flush(); // flush any unprocessed pulled buffers
189    }
190
191    if (interrupt) {
192        interruptSource();
193    }
194}
195
196void MediaCodecSource::Puller::interruptSource() {
197    // call source->stop if read has been pending for over a second
198    // We have to call this outside the looper as looper is pending on the read.
199    mSource->stop();
200}
201
202void MediaCodecSource::Puller::stopSource() {
203    sp<AMessage> msg = new AMessage(kWhatStop, this);
204    (void)postSynchronouslyAndReturnError(msg);
205}
206
207void MediaCodecSource::Puller::pause() {
208    Mutexed<Queue>::Locked queue(mQueue);
209    queue->mPaused = true;
210}
211
212void MediaCodecSource::Puller::resume() {
213    Mutexed<Queue>::Locked queue(mQueue);
214    queue->mPaused = false;
215}
216
217void MediaCodecSource::Puller::schedulePull() {
218    (new AMessage(kWhatPull, this))->post();
219}
220
221void MediaCodecSource::Puller::handleEOS() {
222    ALOGV("puller (%s) posting EOS", mIsAudio ? "audio" : "video");
223    sp<AMessage> msg = mNotify->dup();
224    msg->setInt32("eos", 1);
225    msg->post();
226}
227
228void MediaCodecSource::Puller::onMessageReceived(const sp<AMessage> &msg) {
229    switch (msg->what()) {
230        case kWhatStart:
231        {
232            sp<RefBase> obj;
233            CHECK(msg->findObject("meta", &obj));
234
235            {
236                Mutexed<Queue>::Locked queue(mQueue);
237                queue->mPulling = true;
238            }
239
240            status_t err = mSource->start(static_cast<MetaData *>(obj.get()));
241
242            if (err == OK) {
243                schedulePull();
244            }
245
246            sp<AMessage> response = new AMessage;
247            response->setInt32("err", err);
248
249            sp<AReplyToken> replyID;
250            CHECK(msg->senderAwaitsResponse(&replyID));
251            response->postReply(replyID);
252            break;
253        }
254
255        case kWhatStop:
256        {
257            mSource->stop();
258
259            sp<AMessage> response = new AMessage;
260            response->setInt32("err", OK);
261
262            sp<AReplyToken> replyID;
263            CHECK(msg->senderAwaitsResponse(&replyID));
264            response->postReply(replyID);
265            break;
266        }
267
268        case kWhatPull:
269        {
270            Mutexed<Queue>::Locked queue(mQueue);
271            queue->mReadPendingSince = ALooper::GetNowUs();
272            if (!queue->mPulling) {
273                handleEOS();
274                break;
275            }
276
277            queue.unlock();
278            MediaBuffer *mbuf = NULL;
279            status_t err = mSource->read(&mbuf);
280            queue.lock();
281
282            queue->mReadPendingSince = 0;
283            // if we need to discard buffer
284            if (!queue->mPulling || queue->mPaused || err != OK) {
285                if (mbuf != NULL) {
286                    mbuf->release();
287                    mbuf = NULL;
288                }
289                if (queue->mPulling && err == OK) {
290                    msg->post(); // if simply paused, keep pulling source
291                    break;
292                } else if (err == ERROR_END_OF_STREAM) {
293                    ALOGV("stream ended, mbuf %p", mbuf);
294                } else if (err != OK) {
295                    ALOGE("error %d reading stream.", err);
296                }
297            }
298
299            if (mbuf != NULL) {
300                queue->pushBuffer(mbuf);
301            }
302
303            queue.unlock();
304
305            if (mbuf != NULL) {
306                mNotify->post();
307                msg->post();
308            } else {
309                handleEOS();
310            }
311            break;
312        }
313
314        default:
315            TRESPASS();
316    }
317}
318
319MediaCodecSource::Output::Output()
320    : mEncoderReachedEOS(false),
321      mErrorCode(OK) {
322}
323
324// static
325sp<MediaCodecSource> MediaCodecSource::Create(
326        const sp<ALooper> &looper,
327        const sp<AMessage> &format,
328        const sp<MediaSource> &source,
329        const sp<IGraphicBufferConsumer> &consumer,
330        uint32_t flags) {
331    sp<MediaCodecSource> mediaSource =
332            new MediaCodecSource(looper, format, source, consumer, flags);
333
334    if (mediaSource->init() == OK) {
335        return mediaSource;
336    }
337    return NULL;
338}
339
340status_t MediaCodecSource::setInputBufferTimeOffset(int64_t timeOffsetUs) {
341    sp<AMessage> msg = new AMessage(kWhatSetInputBufferTimeOffset, mReflector);
342    msg->setInt64("time-offset-us", timeOffsetUs);
343    return postSynchronouslyAndReturnError(msg);
344}
345
346int64_t MediaCodecSource::getFirstSampleSystemTimeUs() {
347    sp<AMessage> msg = new AMessage(kWhatGetFirstSampleSystemTimeUs, mReflector);
348    sp<AMessage> response;
349    msg->postAndAwaitResponse(&response);
350    int64_t timeUs;
351    if (!response->findInt64("time-us", &timeUs)) {
352        timeUs = -1ll;
353    }
354    return timeUs;
355}
356
357status_t MediaCodecSource::start(MetaData* params) {
358    sp<AMessage> msg = new AMessage(kWhatStart, mReflector);
359    msg->setObject("meta", params);
360    return postSynchronouslyAndReturnError(msg);
361}
362
363status_t MediaCodecSource::stop() {
364    sp<AMessage> msg = new AMessage(kWhatStop, mReflector);
365    return postSynchronouslyAndReturnError(msg);
366}
367
368status_t MediaCodecSource::pause() {
369    (new AMessage(kWhatPause, mReflector))->post();
370    return OK;
371}
372
373sp<MetaData> MediaCodecSource::getFormat() {
374    Mutexed<sp<MetaData>>::Locked meta(mMeta);
375    return *meta;
376}
377
378sp<IGraphicBufferProducer> MediaCodecSource::getGraphicBufferProducer() {
379    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
380    return mGraphicBufferProducer;
381}
382
383status_t MediaCodecSource::read(
384        MediaBuffer** buffer, const ReadOptions* /* options */) {
385    Mutexed<Output>::Locked output(mOutput);
386
387    *buffer = NULL;
388    while (output->mBufferQueue.size() == 0 && !output->mEncoderReachedEOS) {
389        output.waitForCondition(output->mCond);
390    }
391    if (!output->mEncoderReachedEOS) {
392        *buffer = *output->mBufferQueue.begin();
393        output->mBufferQueue.erase(output->mBufferQueue.begin());
394        return OK;
395    }
396    return output->mErrorCode;
397}
398
399void MediaCodecSource::signalBufferReturned(MediaBuffer *buffer) {
400    buffer->setObserver(0);
401    buffer->release();
402}
403
404MediaCodecSource::MediaCodecSource(
405        const sp<ALooper> &looper,
406        const sp<AMessage> &outputFormat,
407        const sp<MediaSource> &source,
408        const sp<IGraphicBufferConsumer> &consumer,
409        uint32_t flags)
410    : mLooper(looper),
411      mOutputFormat(outputFormat),
412      mMeta(new MetaData),
413      mFlags(flags),
414      mIsVideo(false),
415      mStarted(false),
416      mStopping(false),
417      mDoMoreWorkPending(false),
418      mSetEncoderFormat(false),
419      mEncoderFormat(0),
420      mEncoderDataSpace(0),
421      mGraphicBufferConsumer(consumer),
422      mInputBufferTimeOffsetUs(0),
423      mFirstSampleSystemTimeUs(-1ll),
424      mPausePending(false),
425      mFirstSampleTimeUs(-1ll),
426      mGeneration(0) {
427    CHECK(mLooper != NULL);
428
429    AString mime;
430    CHECK(mOutputFormat->findString("mime", &mime));
431
432    if (!strncasecmp("video/", mime.c_str(), 6)) {
433        mIsVideo = true;
434    }
435
436    if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
437        mPuller = new Puller(source);
438    }
439}
440
441MediaCodecSource::~MediaCodecSource() {
442    releaseEncoder();
443
444    mCodecLooper->stop();
445    mLooper->unregisterHandler(mReflector->id());
446}
447
448status_t MediaCodecSource::init() {
449    status_t err = initEncoder();
450
451    if (err != OK) {
452        releaseEncoder();
453    }
454
455    return err;
456}
457
458status_t MediaCodecSource::initEncoder() {
459    mReflector = new AHandlerReflector<MediaCodecSource>(this);
460    mLooper->registerHandler(mReflector);
461
462    mCodecLooper = new ALooper;
463    mCodecLooper->setName("codec_looper");
464    mCodecLooper->start();
465
466    if (mFlags & FLAG_USE_SURFACE_INPUT) {
467        mOutputFormat->setInt32("create-input-buffers-suspended", 1);
468    }
469
470    AString outputMIME;
471    CHECK(mOutputFormat->findString("mime", &outputMIME));
472
473    Vector<AString> matchingCodecs;
474    MediaCodecList::findMatchingCodecs(
475            outputMIME.c_str(), true /* encoder */,
476            ((mFlags & FLAG_PREFER_SOFTWARE_CODEC) ? MediaCodecList::kPreferSoftwareCodecs : 0),
477            &matchingCodecs);
478
479    status_t err = NO_INIT;
480    for (size_t ix = 0; ix < matchingCodecs.size(); ++ix) {
481        mEncoder = MediaCodec::CreateByComponentName(
482                mCodecLooper, matchingCodecs[ix]);
483
484        if (mEncoder == NULL) {
485            continue;
486        }
487
488        ALOGV("output format is '%s'", mOutputFormat->debugString(0).c_str());
489
490        mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, mReflector);
491        mEncoder->setCallback(mEncoderActivityNotify);
492
493        err = mEncoder->configure(
494                    mOutputFormat,
495                    NULL /* nativeWindow */,
496                    NULL /* crypto */,
497                    MediaCodec::CONFIGURE_FLAG_ENCODE);
498
499        if (err == OK) {
500            break;
501        }
502        mEncoder->release();
503        mEncoder = NULL;
504    }
505
506    if (err != OK) {
507        return err;
508    }
509
510    mEncoder->getOutputFormat(&mOutputFormat);
511    sp<MetaData> meta = new MetaData;
512    convertMessageToMetaData(mOutputFormat, meta);
513    mMeta.lock().set(meta);
514
515    if (mFlags & FLAG_USE_SURFACE_INPUT) {
516        CHECK(mIsVideo);
517
518        if (mGraphicBufferConsumer != NULL) {
519            // When using persistent surface, we are only interested in the
520            // consumer, but have to use PersistentSurface as a wrapper to
521            // pass consumer over messages (similar to BufferProducerWrapper)
522            err = mEncoder->setInputSurface(
523                    new PersistentSurface(NULL, mGraphicBufferConsumer));
524        } else {
525            err = mEncoder->createInputSurface(&mGraphicBufferProducer);
526        }
527
528        if (err != OK) {
529            return err;
530        }
531    }
532
533    sp<AMessage> inputFormat;
534    int32_t usingSwReadOften;
535    mSetEncoderFormat = false;
536    if (mEncoder->getInputFormat(&inputFormat) == OK) {
537        mSetEncoderFormat = true;
538        if (inputFormat->findInt32("using-sw-read-often", &usingSwReadOften)
539                && usingSwReadOften) {
540            // this is a SW encoder; signal source to allocate SW readable buffers
541            mEncoderFormat = kDefaultSwVideoEncoderFormat;
542        } else {
543            mEncoderFormat = kDefaultHwVideoEncoderFormat;
544        }
545        if (!inputFormat->findInt32("android._dataspace", &mEncoderDataSpace)) {
546            mEncoderDataSpace = kDefaultVideoEncoderDataSpace;
547        }
548        ALOGV("setting dataspace %#x, format %#x", mEncoderDataSpace, mEncoderFormat);
549    }
550
551    err = mEncoder->start();
552
553    if (err != OK) {
554        return err;
555    }
556
557    {
558        Mutexed<Output>::Locked output(mOutput);
559        output->mEncoderReachedEOS = false;
560        output->mErrorCode = OK;
561    }
562
563    return OK;
564}
565
566void MediaCodecSource::releaseEncoder() {
567    if (mEncoder == NULL) {
568        return;
569    }
570
571    mEncoder->release();
572    mEncoder.clear();
573}
574
575status_t MediaCodecSource::postSynchronouslyAndReturnError(
576        const sp<AMessage> &msg) {
577    sp<AMessage> response;
578    status_t err = msg->postAndAwaitResponse(&response);
579
580    if (err != OK) {
581        return err;
582    }
583
584    if (!response->findInt32("err", &err)) {
585        err = OK;
586    }
587
588    return err;
589}
590
591void MediaCodecSource::signalEOS(status_t err) {
592    bool reachedEOS = false;
593    {
594        Mutexed<Output>::Locked output(mOutput);
595        reachedEOS = output->mEncoderReachedEOS;
596        if (!reachedEOS) {
597            ALOGV("encoder (%s) reached EOS", mIsVideo ? "video" : "audio");
598            // release all unread media buffers
599            for (List<MediaBuffer*>::iterator it = output->mBufferQueue.begin();
600                    it != output->mBufferQueue.end(); it++) {
601                (*it)->release();
602            }
603            output->mBufferQueue.clear();
604            output->mEncoderReachedEOS = true;
605            output->mErrorCode = err;
606            output->mCond.signal();
607
608            reachedEOS = true;
609            output.unlock();
610            releaseEncoder();
611        }
612    }
613
614    if (mStopping && reachedEOS) {
615        ALOGI("encoder (%s) stopped", mIsVideo ? "video" : "audio");
616        mPuller->stopSource();
617        ALOGV("source (%s) stopped", mIsVideo ? "video" : "audio");
618        // posting reply to everyone that's waiting
619        List<sp<AReplyToken>>::iterator it;
620        for (it = mStopReplyIDQueue.begin();
621                it != mStopReplyIDQueue.end(); it++) {
622            (new AMessage)->postReply(*it);
623        }
624        mStopReplyIDQueue.clear();
625        mStopping = false;
626        ++mGeneration;
627    }
628}
629
630void MediaCodecSource::suspend() {
631    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
632    if (mEncoder != NULL) {
633        sp<AMessage> params = new AMessage;
634        params->setInt32("drop-input-frames", true);
635        mEncoder->setParameters(params);
636    }
637}
638
639void MediaCodecSource::resume(int64_t skipFramesBeforeUs) {
640    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
641    if (mEncoder != NULL) {
642        sp<AMessage> params = new AMessage;
643        params->setInt32("drop-input-frames", false);
644        if (skipFramesBeforeUs > 0) {
645            params->setInt64("skip-frames-before", skipFramesBeforeUs);
646        }
647        mEncoder->setParameters(params);
648    }
649}
650
651status_t MediaCodecSource::feedEncoderInputBuffers() {
652    MediaBuffer* mbuf = NULL;
653    while (!mAvailEncoderInputIndices.empty() && mPuller->readBuffer(&mbuf)) {
654        size_t bufferIndex = *mAvailEncoderInputIndices.begin();
655        mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
656
657        int64_t timeUs = 0ll;
658        uint32_t flags = 0;
659        size_t size = 0;
660
661        if (mbuf != NULL) {
662            CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
663            if (mFirstSampleSystemTimeUs < 0ll) {
664                mFirstSampleSystemTimeUs = systemTime() / 1000;
665                if (mPausePending) {
666                    mPausePending = false;
667                    onPause();
668                    mbuf->release();
669                    mAvailEncoderInputIndices.push_back(bufferIndex);
670                    return OK;
671                }
672            }
673
674            timeUs += mInputBufferTimeOffsetUs;
675
676            // push decoding time for video, or drift time for audio
677            if (mIsVideo) {
678                mDecodingTimeQueue.push_back(timeUs);
679            } else {
680#if DEBUG_DRIFT_TIME
681                if (mFirstSampleTimeUs < 0ll) {
682                    mFirstSampleTimeUs = timeUs;
683                }
684                int64_t driftTimeUs = 0;
685                if (mbuf->meta_data()->findInt64(kKeyDriftTime, &driftTimeUs)
686                        && driftTimeUs) {
687                    driftTimeUs = timeUs - mFirstSampleTimeUs - driftTimeUs;
688                }
689                mDriftTimeQueue.push_back(driftTimeUs);
690#endif // DEBUG_DRIFT_TIME
691            }
692
693            sp<MediaCodecBuffer> inbuf;
694            status_t err = mEncoder->getInputBuffer(bufferIndex, &inbuf);
695            if (err != OK || inbuf == NULL) {
696                mbuf->release();
697                signalEOS();
698                break;
699            }
700
701            size = mbuf->size();
702
703            memcpy(inbuf->data(), mbuf->data(), size);
704
705            if (mIsVideo) {
706                // video encoder will release MediaBuffer when done
707                // with underlying data.
708                inbuf->setMediaBufferBase(mbuf);
709            } else {
710                mbuf->release();
711            }
712        } else {
713            flags = MediaCodec::BUFFER_FLAG_EOS;
714        }
715
716        status_t err = mEncoder->queueInputBuffer(
717                bufferIndex, 0, size, timeUs, flags);
718
719        if (err != OK) {
720            return err;
721        }
722    }
723
724    return OK;
725}
726
727status_t MediaCodecSource::onStart(MetaData *params) {
728    if (mStopping) {
729        ALOGE("Failed to start while we're stopping");
730        return INVALID_OPERATION;
731    }
732
733    if (mStarted) {
734        ALOGI("MediaCodecSource (%s) resuming", mIsVideo ? "video" : "audio");
735        if (mPausePending) {
736            mPausePending = false;
737            return OK;
738        }
739        if (mIsVideo) {
740            mEncoder->requestIDRFrame();
741        }
742        if (mFlags & FLAG_USE_SURFACE_INPUT) {
743            resume();
744        } else {
745            CHECK(mPuller != NULL);
746            mPuller->resume();
747        }
748        return OK;
749    }
750
751    ALOGI("MediaCodecSource (%s) starting", mIsVideo ? "video" : "audio");
752
753    status_t err = OK;
754
755    if (mFlags & FLAG_USE_SURFACE_INPUT) {
756        int64_t startTimeUs;
757        if (!params || !params->findInt64(kKeyTime, &startTimeUs)) {
758            startTimeUs = -1ll;
759        }
760        resume(startTimeUs);
761    } else {
762        CHECK(mPuller != NULL);
763        sp<MetaData> meta = params;
764        if (mSetEncoderFormat) {
765            if (meta == NULL) {
766                meta = new MetaData;
767            }
768            meta->setInt32(kKeyPixelFormat, mEncoderFormat);
769            meta->setInt32(kKeyColorSpace, mEncoderDataSpace);
770        }
771
772        sp<AMessage> notify = new AMessage(kWhatPullerNotify, mReflector);
773        err = mPuller->start(meta.get(), notify);
774        if (err != OK) {
775            return err;
776        }
777    }
778
779    ALOGI("MediaCodecSource (%s) started", mIsVideo ? "video" : "audio");
780
781    mStarted = true;
782    return OK;
783}
784
785void MediaCodecSource::onPause() {
786    if (mFlags & FLAG_USE_SURFACE_INPUT) {
787        suspend();
788    } else {
789        CHECK(mPuller != NULL);
790        mPuller->pause();
791    }
792}
793
794void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
795    switch (msg->what()) {
796    case kWhatPullerNotify:
797    {
798        int32_t eos = 0;
799        if (msg->findInt32("eos", &eos) && eos) {
800            ALOGV("puller (%s) reached EOS", mIsVideo ? "video" : "audio");
801            signalEOS();
802            break;
803        }
804
805        if (mEncoder == NULL) {
806            ALOGV("got msg '%s' after encoder shutdown.", msg->debugString().c_str());
807            break;
808        }
809
810        feedEncoderInputBuffers();
811        break;
812    }
813    case kWhatEncoderActivity:
814    {
815        if (mEncoder == NULL) {
816            break;
817        }
818
819        int32_t cbID;
820        CHECK(msg->findInt32("callbackID", &cbID));
821        if (cbID == MediaCodec::CB_INPUT_AVAILABLE) {
822            int32_t index;
823            CHECK(msg->findInt32("index", &index));
824
825            mAvailEncoderInputIndices.push_back(index);
826            feedEncoderInputBuffers();
827        } else if (cbID == MediaCodec::CB_OUTPUT_FORMAT_CHANGED) {
828            status_t err = mEncoder->getOutputFormat(&mOutputFormat);
829            if (err != OK) {
830                signalEOS(err);
831                break;
832            }
833            sp<MetaData> meta = new MetaData;
834            convertMessageToMetaData(mOutputFormat, meta);
835            mMeta.lock().set(meta);
836        } else if (cbID == MediaCodec::CB_OUTPUT_AVAILABLE) {
837            int32_t index;
838            size_t offset;
839            size_t size;
840            int64_t timeUs;
841            int32_t flags;
842
843            CHECK(msg->findInt32("index", &index));
844            CHECK(msg->findSize("offset", &offset));
845            CHECK(msg->findSize("size", &size));
846            CHECK(msg->findInt64("timeUs", &timeUs));
847            CHECK(msg->findInt32("flags", &flags));
848
849            if (flags & MediaCodec::BUFFER_FLAG_EOS) {
850                mEncoder->releaseOutputBuffer(index);
851                signalEOS();
852                break;
853            }
854
855            sp<MediaCodecBuffer> outbuf;
856            status_t err = mEncoder->getOutputBuffer(index, &outbuf);
857            if (err != OK || outbuf == NULL) {
858                signalEOS();
859                break;
860            }
861
862            MediaBuffer *mbuf = new MediaBuffer(outbuf->size());
863            mbuf->add_ref();
864
865            if (!(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
866                if (mIsVideo) {
867                    int64_t decodingTimeUs;
868                    if (mFlags & FLAG_USE_SURFACE_INPUT) {
869                        if (mFirstSampleSystemTimeUs < 0ll) {
870                            mFirstSampleSystemTimeUs = systemTime() / 1000;
871                            if (mPausePending) {
872                                mPausePending = false;
873                                onPause();
874                                mbuf->release();
875                                break;
876                            }
877                        }
878                        // Timestamp offset is already adjusted in GraphicBufferSource.
879                        // GraphicBufferSource is supposed to discard samples
880                        // queued before start, and offset timeUs by start time
881                        CHECK_GE(timeUs, 0ll);
882                        // TODO:
883                        // Decoding time for surface source is unavailable,
884                        // use presentation time for now. May need to move
885                        // this logic into MediaCodec.
886                        decodingTimeUs = timeUs;
887                    } else {
888                        CHECK(!mDecodingTimeQueue.empty());
889                        decodingTimeUs = *(mDecodingTimeQueue.begin());
890                        mDecodingTimeQueue.erase(mDecodingTimeQueue.begin());
891                    }
892                    mbuf->meta_data()->setInt64(kKeyDecodingTime, decodingTimeUs);
893
894                    ALOGV("[video] time %" PRId64 " us (%.2f secs), dts/pts diff %" PRId64,
895                            timeUs, timeUs / 1E6, decodingTimeUs - timeUs);
896                } else {
897                    int64_t driftTimeUs = 0;
898#if DEBUG_DRIFT_TIME
899                    CHECK(!mDriftTimeQueue.empty());
900                    driftTimeUs = *(mDriftTimeQueue.begin());
901                    mDriftTimeQueue.erase(mDriftTimeQueue.begin());
902                    mbuf->meta_data()->setInt64(kKeyDriftTime, driftTimeUs);
903#endif // DEBUG_DRIFT_TIME
904                    ALOGV("[audio] time %" PRId64 " us (%.2f secs), drift %" PRId64,
905                            timeUs, timeUs / 1E6, driftTimeUs);
906                }
907                mbuf->meta_data()->setInt64(kKeyTime, timeUs);
908            } else {
909                mbuf->meta_data()->setInt64(kKeyTime, 0ll);
910                mbuf->meta_data()->setInt32(kKeyIsCodecConfig, true);
911            }
912            if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
913                mbuf->meta_data()->setInt32(kKeyIsSyncFrame, true);
914            }
915            memcpy(mbuf->data(), outbuf->data(), outbuf->size());
916            mbuf->setObserver(this);
917
918            {
919                Mutexed<Output>::Locked output(mOutput);
920                output->mBufferQueue.push_back(mbuf);
921                output->mCond.signal();
922            }
923
924            mEncoder->releaseOutputBuffer(index);
925       } else if (cbID == MediaCodec::CB_ERROR) {
926            status_t err;
927            CHECK(msg->findInt32("err", &err));
928            ALOGE("Encoder (%s) reported error : 0x%x",
929                    mIsVideo ? "video" : "audio", err);
930            signalEOS();
931       }
932       break;
933    }
934    case kWhatStart:
935    {
936        sp<AReplyToken> replyID;
937        CHECK(msg->senderAwaitsResponse(&replyID));
938
939        sp<RefBase> obj;
940        CHECK(msg->findObject("meta", &obj));
941        MetaData *params = static_cast<MetaData *>(obj.get());
942
943        sp<AMessage> response = new AMessage;
944        response->setInt32("err", onStart(params));
945        response->postReply(replyID);
946        break;
947    }
948    case kWhatStop:
949    {
950        ALOGI("encoder (%s) stopping", mIsVideo ? "video" : "audio");
951
952        sp<AReplyToken> replyID;
953        CHECK(msg->senderAwaitsResponse(&replyID));
954
955        if (mOutput.lock()->mEncoderReachedEOS) {
956            // if we already reached EOS, reply and return now
957            ALOGI("encoder (%s) already stopped",
958                    mIsVideo ? "video" : "audio");
959            (new AMessage)->postReply(replyID);
960            break;
961        }
962
963        mStopReplyIDQueue.push_back(replyID);
964        if (mStopping) {
965            // nothing to do if we're already stopping, reply will be posted
966            // to all when we're stopped.
967            break;
968        }
969
970        mStopping = true;
971
972        // if using surface, signal source EOS and wait for EOS to come back.
973        // otherwise, stop puller (which also clears the input buffer queue)
974        // and wait for the EOS message. We cannot call source->stop() because
975        // the encoder may still be processing input buffers.
976        if (mFlags & FLAG_USE_SURFACE_INPUT) {
977            mEncoder->signalEndOfInputStream();
978        } else {
979            mPuller->stop();
980        }
981
982        // complete stop even if encoder/puller stalled
983        sp<AMessage> timeoutMsg = new AMessage(kWhatStopStalled, mReflector);
984        timeoutMsg->setInt32("generation", mGeneration);
985        timeoutMsg->post(kStopTimeoutUs);
986        break;
987    }
988
989    case kWhatStopStalled:
990    {
991        int32_t generation;
992        CHECK(msg->findInt32("generation", &generation));
993        if (generation != mGeneration) {
994             break;
995        }
996
997        if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
998            ALOGV("source (%s) stopping", mIsVideo ? "video" : "audio");
999            mPuller->interruptSource();
1000            ALOGV("source (%s) stopped", mIsVideo ? "video" : "audio");
1001        }
1002        signalEOS();
1003    }
1004
1005    case kWhatPause:
1006    {
1007        if (mFirstSampleSystemTimeUs < 0) {
1008            mPausePending = true;
1009        } else {
1010            onPause();
1011        }
1012        break;
1013    }
1014    case kWhatSetInputBufferTimeOffset:
1015    {
1016        sp<AReplyToken> replyID;
1017        CHECK(msg->senderAwaitsResponse(&replyID));
1018        status_t err = OK;
1019        CHECK(msg->findInt64("time-offset-us", &mInputBufferTimeOffsetUs));
1020
1021        // Propagate the timestamp offset to GraphicBufferSource.
1022        if (mFlags & FLAG_USE_SURFACE_INPUT) {
1023            sp<AMessage> params = new AMessage;
1024            params->setInt64("time-offset-us", mInputBufferTimeOffsetUs);
1025            err = mEncoder->setParameters(params);
1026        }
1027
1028        sp<AMessage> response = new AMessage;
1029        response->setInt32("err", err);
1030        response->postReply(replyID);
1031        break;
1032    }
1033    case kWhatGetFirstSampleSystemTimeUs:
1034    {
1035        sp<AReplyToken> replyID;
1036        CHECK(msg->senderAwaitsResponse(&replyID));
1037
1038        sp<AMessage> response = new AMessage;
1039        response->setInt64("time-us", mFirstSampleSystemTimeUs);
1040        response->postReply(replyID);
1041        break;
1042    }
1043    default:
1044        TRESPASS();
1045    }
1046}
1047
1048} // namespace android
1049