MediaCodecSource.cpp revision f2a64852a4a48c5a3d8a08ffcda20d6884586672
1/*
2 * Copyright 2014, The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *     http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "MediaCodecSource"
19#define DEBUG_DRIFT_TIME 0
20
21#include <inttypes.h>
22
23#include <gui/IGraphicBufferProducer.h>
24#include <gui/Surface.h>
25#include <media/ICrypto.h>
26#include <media/stagefright/foundation/ABuffer.h>
27#include <media/stagefright/foundation/ADebug.h>
28#include <media/stagefright/foundation/ALooper.h>
29#include <media/stagefright/foundation/AMessage.h>
30#include <media/stagefright/MediaBuffer.h>
31#include <media/stagefright/MediaCodec.h>
32#include <media/stagefright/MetaData.h>
33#include <media/stagefright/MediaErrors.h>
34#include <media/stagefright/MediaSource.h>
35#include <media/stagefright/MediaCodecSource.h>
36#include <media/stagefright/Utils.h>
37
38namespace android {
39
40struct MediaCodecSource::Puller : public AHandler {
41    Puller(const sp<MediaSource> &source);
42
43    status_t start(const sp<MetaData> &meta, const sp<AMessage> &notify);
44    void stop();
45
46    void pause();
47    void resume();
48
49protected:
50    virtual void onMessageReceived(const sp<AMessage> &msg);
51    virtual ~Puller();
52
53private:
54    enum {
55        kWhatStart = 'msta',
56        kWhatStop,
57        kWhatPull,
58        kWhatPause,
59        kWhatResume,
60    };
61
62    sp<MediaSource> mSource;
63    sp<AMessage> mNotify;
64    sp<ALooper> mLooper;
65    int32_t mPullGeneration;
66    bool mIsAudio;
67    bool mPaused;
68    bool mReachedEOS;
69
70    status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg);
71    void schedulePull();
72    void handleEOS();
73
74    DISALLOW_EVIL_CONSTRUCTORS(Puller);
75};
76
77MediaCodecSource::Puller::Puller(const sp<MediaSource> &source)
78    : mSource(source),
79      mLooper(new ALooper()),
80      mPullGeneration(0),
81      mIsAudio(false),
82      mPaused(false),
83      mReachedEOS(false) {
84    sp<MetaData> meta = source->getFormat();
85    const char *mime;
86    CHECK(meta->findCString(kKeyMIMEType, &mime));
87
88    mIsAudio = !strncasecmp(mime, "audio/", 6);
89
90    mLooper->setName("pull_looper");
91}
92
93MediaCodecSource::Puller::~Puller() {
94    mLooper->unregisterHandler(id());
95    mLooper->stop();
96}
97
98status_t MediaCodecSource::Puller::postSynchronouslyAndReturnError(
99        const sp<AMessage> &msg) {
100    sp<AMessage> response;
101    status_t err = msg->postAndAwaitResponse(&response);
102
103    if (err != OK) {
104        return err;
105    }
106
107    if (!response->findInt32("err", &err)) {
108        err = OK;
109    }
110
111    return err;
112}
113
114status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta,
115        const sp<AMessage> &notify) {
116    ALOGV("puller (%s) start", mIsAudio ? "audio" : "video");
117    mLooper->start(
118            false /* runOnCallingThread */,
119            false /* canCallJava */,
120            PRIORITY_AUDIO);
121    mLooper->registerHandler(this);
122    mNotify = notify;
123
124    sp<AMessage> msg = new AMessage(kWhatStart, id());
125    msg->setObject("meta", meta);
126    return postSynchronouslyAndReturnError(msg);
127}
128
129void MediaCodecSource::Puller::stop() {
130    // Stop source from caller's thread instead of puller's looper.
131    // mSource->stop() is thread-safe, doing it outside the puller's
132    // looper allows us to at least stop if source gets stuck.
133    // If source gets stuck in read(), the looper would never
134    // be able to process the stop(), which could lead to ANR.
135
136    ALOGV("source (%s) stopping", mIsAudio ? "audio" : "video");
137    mSource->stop();
138    ALOGV("source (%s) stopped", mIsAudio ? "audio" : "video");
139
140    (new AMessage(kWhatStop, id()))->post();
141}
142
143void MediaCodecSource::Puller::pause() {
144    (new AMessage(kWhatPause, id()))->post();
145}
146
147void MediaCodecSource::Puller::resume() {
148    (new AMessage(kWhatResume, id()))->post();
149}
150
151void MediaCodecSource::Puller::schedulePull() {
152    sp<AMessage> msg = new AMessage(kWhatPull, id());
153    msg->setInt32("generation", mPullGeneration);
154    msg->post();
155}
156
157void MediaCodecSource::Puller::handleEOS() {
158    if (!mReachedEOS) {
159        ALOGV("puller (%s) posting EOS", mIsAudio ? "audio" : "video");
160        mReachedEOS = true;
161        sp<AMessage> notify = mNotify->dup();
162        notify->setPointer("accessUnit", NULL);
163        notify->post();
164    }
165}
166
167void MediaCodecSource::Puller::onMessageReceived(const sp<AMessage> &msg) {
168    switch (msg->what()) {
169        case kWhatStart:
170        {
171            sp<RefBase> obj;
172            CHECK(msg->findObject("meta", &obj));
173
174            mReachedEOS = false;
175
176            status_t err = mSource->start(static_cast<MetaData *>(obj.get()));
177
178            if (err == OK) {
179                schedulePull();
180            }
181
182            sp<AMessage> response = new AMessage;
183            response->setInt32("err", err);
184
185            uint32_t replyID;
186            CHECK(msg->senderAwaitsResponse(&replyID));
187            response->postReply(replyID);
188            break;
189        }
190
191        case kWhatStop:
192        {
193            ++mPullGeneration;
194
195            handleEOS();
196            break;
197        }
198
199        case kWhatPull:
200        {
201            int32_t generation;
202            CHECK(msg->findInt32("generation", &generation));
203
204            if (generation != mPullGeneration) {
205                break;
206            }
207
208            MediaBuffer *mbuf;
209            status_t err = mSource->read(&mbuf);
210
211            if (mPaused) {
212                if (err == OK) {
213                    mbuf->release();
214                    mbuf = NULL;
215                }
216
217                msg->post();
218                break;
219            }
220
221            if (err != OK) {
222                if (err == ERROR_END_OF_STREAM) {
223                    ALOGV("stream ended, mbuf %p", mbuf);
224                } else {
225                    ALOGE("error %d reading stream.", err);
226                }
227                handleEOS();
228            } else {
229                sp<AMessage> notify = mNotify->dup();
230
231                notify->setPointer("accessUnit", mbuf);
232                notify->post();
233
234                msg->post();
235            }
236            break;
237        }
238
239        case kWhatPause:
240        {
241            mPaused = true;
242            break;
243        }
244
245        case kWhatResume:
246        {
247            mPaused = false;
248            break;
249        }
250
251        default:
252            TRESPASS();
253    }
254}
255
256// static
257sp<MediaCodecSource> MediaCodecSource::Create(
258        const sp<ALooper> &looper,
259        const sp<AMessage> &format,
260        const sp<MediaSource> &source,
261        uint32_t flags) {
262    sp<MediaCodecSource> mediaSource =
263            new MediaCodecSource(looper, format, source, flags);
264
265    if (mediaSource->init() == OK) {
266        return mediaSource;
267    }
268    return NULL;
269}
270
271status_t MediaCodecSource::start(MetaData* params) {
272    sp<AMessage> msg = new AMessage(kWhatStart, mReflector->id());
273    msg->setObject("meta", params);
274    return postSynchronouslyAndReturnError(msg);
275}
276
277status_t MediaCodecSource::stop() {
278    sp<AMessage> msg = new AMessage(kWhatStop, mReflector->id());
279    status_t err = postSynchronouslyAndReturnError(msg);
280
281    // mPuller->stop() needs to be done outside MediaCodecSource's looper,
282    // as it contains a synchronous call to stop the underlying MediaSource,
283    // which often waits for all outstanding MediaBuffers to return, but
284    // MediaBuffers are only returned when MediaCodecSource looper gets
285    // to process them.
286
287    if (mPuller != NULL) {
288        ALOGI("puller (%s) stopping", mIsVideo ? "video" : "audio");
289        mPuller->stop();
290        ALOGI("puller (%s) stopped", mIsVideo ? "video" : "audio");
291    }
292
293    return err;
294}
295
296status_t MediaCodecSource::pause() {
297    (new AMessage(kWhatPause, mReflector->id()))->post();
298    return OK;
299}
300
301sp<IGraphicBufferProducer> MediaCodecSource::getGraphicBufferProducer() {
302    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
303    return mGraphicBufferProducer;
304}
305
306status_t MediaCodecSource::read(
307        MediaBuffer** buffer, const ReadOptions* /* options */) {
308    Mutex::Autolock autolock(mOutputBufferLock);
309
310    *buffer = NULL;
311    while (mOutputBufferQueue.size() == 0 && !mEncoderReachedEOS) {
312        mOutputBufferCond.wait(mOutputBufferLock);
313    }
314    if (!mEncoderReachedEOS) {
315        *buffer = *mOutputBufferQueue.begin();
316        mOutputBufferQueue.erase(mOutputBufferQueue.begin());
317        return OK;
318    }
319    return mErrorCode;
320}
321
322void MediaCodecSource::signalBufferReturned(MediaBuffer *buffer) {
323    buffer->setObserver(0);
324    buffer->release();
325}
326
327MediaCodecSource::MediaCodecSource(
328        const sp<ALooper> &looper,
329        const sp<AMessage> &outputFormat,
330        const sp<MediaSource> &source,
331        uint32_t flags)
332    : mLooper(looper),
333      mOutputFormat(outputFormat),
334      mMeta(new MetaData),
335      mFlags(flags),
336      mIsVideo(false),
337      mStarted(false),
338      mStopping(false),
339      mDoMoreWorkPending(false),
340      mFirstSampleTimeUs(-1ll),
341      mEncoderReachedEOS(false),
342      mErrorCode(OK) {
343    CHECK(mLooper != NULL);
344
345    AString mime;
346    CHECK(mOutputFormat->findString("mime", &mime));
347
348    if (!strncasecmp("video/", mime.c_str(), 6)) {
349        mIsVideo = true;
350    }
351
352    if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
353        mPuller = new Puller(source);
354    }
355}
356
357MediaCodecSource::~MediaCodecSource() {
358    releaseEncoder();
359
360    mCodecLooper->stop();
361    mLooper->unregisterHandler(mReflector->id());
362}
363
364status_t MediaCodecSource::init() {
365    status_t err = initEncoder();
366
367    if (err != OK) {
368        releaseEncoder();
369    }
370
371    return err;
372}
373
374status_t MediaCodecSource::initEncoder() {
375    mReflector = new AHandlerReflector<MediaCodecSource>(this);
376    mLooper->registerHandler(mReflector);
377
378    mCodecLooper = new ALooper;
379    mCodecLooper->setName("codec_looper");
380    mCodecLooper->start();
381
382    if (mFlags & FLAG_USE_METADATA_INPUT) {
383        mOutputFormat->setInt32("store-metadata-in-buffers", 1);
384    }
385
386    if (mFlags & FLAG_USE_SURFACE_INPUT) {
387        mOutputFormat->setInt32("create-input-buffers-suspended", 1);
388    }
389
390    AString outputMIME;
391    CHECK(mOutputFormat->findString("mime", &outputMIME));
392
393    mEncoder = MediaCodec::CreateByType(
394            mCodecLooper, outputMIME.c_str(), true /* encoder */);
395
396    if (mEncoder == NULL) {
397        return NO_INIT;
398    }
399
400    ALOGV("output format is '%s'", mOutputFormat->debugString(0).c_str());
401
402    status_t err = mEncoder->configure(
403                mOutputFormat,
404                NULL /* nativeWindow */,
405                NULL /* crypto */,
406                MediaCodec::CONFIGURE_FLAG_ENCODE);
407
408    if (err != OK) {
409        return err;
410    }
411
412    mEncoder->getOutputFormat(&mOutputFormat);
413    convertMessageToMetaData(mOutputFormat, mMeta);
414
415    if (mFlags & FLAG_USE_SURFACE_INPUT) {
416        CHECK(mIsVideo);
417
418        err = mEncoder->createInputSurface(&mGraphicBufferProducer);
419
420        if (err != OK) {
421            return err;
422        }
423    }
424
425    err = mEncoder->start();
426
427    if (err != OK) {
428        return err;
429    }
430
431    err = mEncoder->getInputBuffers(&mEncoderInputBuffers);
432
433    if (err != OK) {
434        return err;
435    }
436
437    err = mEncoder->getOutputBuffers(&mEncoderOutputBuffers);
438
439    if (err != OK) {
440        return err;
441    }
442
443    mEncoderReachedEOS = false;
444    mErrorCode = OK;
445
446    return OK;
447}
448
449void MediaCodecSource::releaseEncoder() {
450    if (mEncoder == NULL) {
451        return;
452    }
453
454    mEncoder->release();
455    mEncoder.clear();
456
457    while (!mInputBufferQueue.empty()) {
458        MediaBuffer *mbuf = *mInputBufferQueue.begin();
459        mInputBufferQueue.erase(mInputBufferQueue.begin());
460        if (mbuf != NULL) {
461            mbuf->release();
462        }
463    }
464
465    for (size_t i = 0; i < mEncoderInputBuffers.size(); ++i) {
466        sp<ABuffer> accessUnit = mEncoderInputBuffers.itemAt(i);
467        accessUnit->setMediaBufferBase(NULL);
468    }
469
470    mEncoderInputBuffers.clear();
471    mEncoderOutputBuffers.clear();
472}
473
474status_t MediaCodecSource::postSynchronouslyAndReturnError(
475        const sp<AMessage> &msg) {
476    sp<AMessage> response;
477    status_t err = msg->postAndAwaitResponse(&response);
478
479    if (err != OK) {
480        return err;
481    }
482
483    if (!response->findInt32("err", &err)) {
484        err = OK;
485    }
486
487    return err;
488}
489
490void MediaCodecSource::signalEOS(status_t err) {
491    if (!mEncoderReachedEOS) {
492        ALOGV("encoder (%s) reached EOS", mIsVideo ? "video" : "audio");
493        {
494            Mutex::Autolock autoLock(mOutputBufferLock);
495            // release all unread media buffers
496            for (List<MediaBuffer*>::iterator it = mOutputBufferQueue.begin();
497                    it != mOutputBufferQueue.end(); it++) {
498                (*it)->release();
499            }
500            mOutputBufferQueue.clear();
501            mEncoderReachedEOS = true;
502            mErrorCode = err;
503            mOutputBufferCond.signal();
504        }
505
506        releaseEncoder();
507    }
508    if (mStopping && mEncoderReachedEOS) {
509        ALOGI("encoder (%s) stopped", mIsVideo ? "video" : "audio");
510        // posting reply to everyone that's waiting
511        List<uint32_t>::iterator it;
512        for (it = mStopReplyIDQueue.begin();
513                it != mStopReplyIDQueue.end(); it++) {
514            (new AMessage)->postReply(*it);
515        }
516        mStopReplyIDQueue.clear();
517        mStopping = false;
518    }
519}
520
521void MediaCodecSource::suspend() {
522    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
523    if (mEncoder != NULL) {
524        sp<AMessage> params = new AMessage;
525        params->setInt32("drop-input-frames", true);
526        mEncoder->setParameters(params);
527    }
528}
529
530void MediaCodecSource::resume(int64_t skipFramesBeforeUs) {
531    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
532    if (mEncoder != NULL) {
533        sp<AMessage> params = new AMessage;
534        params->setInt32("drop-input-frames", false);
535        if (skipFramesBeforeUs > 0) {
536            params->setInt64("skip-frames-before", skipFramesBeforeUs);
537        }
538        mEncoder->setParameters(params);
539    }
540}
541
542void MediaCodecSource::scheduleDoMoreWork() {
543    if (mDoMoreWorkPending) {
544        return;
545    }
546
547    mDoMoreWorkPending = true;
548
549    if (mEncoderActivityNotify == NULL) {
550        mEncoderActivityNotify = new AMessage(
551                kWhatEncoderActivity, mReflector->id());
552    }
553    mEncoder->requestActivityNotification(mEncoderActivityNotify);
554}
555
556status_t MediaCodecSource::feedEncoderInputBuffers() {
557    while (!mInputBufferQueue.empty()
558            && !mAvailEncoderInputIndices.empty()) {
559        MediaBuffer* mbuf = *mInputBufferQueue.begin();
560        mInputBufferQueue.erase(mInputBufferQueue.begin());
561
562        size_t bufferIndex = *mAvailEncoderInputIndices.begin();
563        mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
564
565        int64_t timeUs = 0ll;
566        uint32_t flags = 0;
567        size_t size = 0;
568
569        if (mbuf != NULL) {
570            CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
571
572            // push decoding time for video, or drift time for audio
573            if (mIsVideo) {
574                mDecodingTimeQueue.push_back(timeUs);
575            } else {
576#if DEBUG_DRIFT_TIME
577                if (mFirstSampleTimeUs < 0ll) {
578                    mFirstSampleTimeUs = timeUs;
579                }
580
581                int64_t driftTimeUs = 0;
582                if (mbuf->meta_data()->findInt64(kKeyDriftTime, &driftTimeUs)
583                        && driftTimeUs) {
584                    driftTimeUs = timeUs - mFirstSampleTimeUs - driftTimeUs;
585                }
586                mDriftTimeQueue.push_back(driftTimeUs);
587#endif // DEBUG_DRIFT_TIME
588            }
589
590            size = mbuf->size();
591
592            memcpy(mEncoderInputBuffers.itemAt(bufferIndex)->data(),
593                   mbuf->data(), size);
594
595            if (mIsVideo) {
596                // video encoder will release MediaBuffer when done
597                // with underlying data.
598                mEncoderInputBuffers.itemAt(bufferIndex)->setMediaBufferBase(
599                        mbuf);
600            } else {
601                mbuf->release();
602            }
603        } else {
604            flags = MediaCodec::BUFFER_FLAG_EOS;
605        }
606
607        status_t err = mEncoder->queueInputBuffer(
608                bufferIndex, 0, size, timeUs, flags);
609
610        if (err != OK) {
611            return err;
612        }
613    }
614
615    return OK;
616}
617
618status_t MediaCodecSource::doMoreWork(int32_t numInput, int32_t numOutput) {
619    status_t err = OK;
620
621    if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
622        while (numInput-- > 0) {
623            size_t bufferIndex;
624            err = mEncoder->dequeueInputBuffer(&bufferIndex);
625
626            if (err != OK) {
627                break;
628            }
629
630            mAvailEncoderInputIndices.push_back(bufferIndex);
631        }
632
633        feedEncoderInputBuffers();
634    }
635
636    while (numOutput-- > 0) {
637        size_t bufferIndex;
638        size_t offset;
639        size_t size;
640        int64_t timeUs;
641        uint32_t flags;
642        native_handle_t* handle = NULL;
643        err = mEncoder->dequeueOutputBuffer(
644                &bufferIndex, &offset, &size, &timeUs, &flags);
645
646        if (err != OK) {
647            if (err == INFO_FORMAT_CHANGED) {
648                continue;
649            } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
650                mEncoder->getOutputBuffers(&mEncoderOutputBuffers);
651                continue;
652            }
653
654            if (err == -EAGAIN) {
655                err = OK;
656            }
657            break;
658        }
659        if (!(flags & MediaCodec::BUFFER_FLAG_EOS)) {
660            sp<ABuffer> outbuf = mEncoderOutputBuffers.itemAt(bufferIndex);
661
662            MediaBuffer *mbuf = new MediaBuffer(outbuf->size());
663            memcpy(mbuf->data(), outbuf->data(), outbuf->size());
664
665            if (!(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
666                if (mIsVideo) {
667                    int64_t decodingTimeUs;
668                    if (mFlags & FLAG_USE_SURFACE_INPUT) {
669                        // GraphicBufferSource is supposed to discard samples
670                        // queued before start, and offset timeUs by start time
671                        CHECK_GE(timeUs, 0ll);
672                        // TODO:
673                        // Decoding time for surface source is unavailable,
674                        // use presentation time for now. May need to move
675                        // this logic into MediaCodec.
676                        decodingTimeUs = timeUs;
677                    } else {
678                        CHECK(!mDecodingTimeQueue.empty());
679                        decodingTimeUs = *(mDecodingTimeQueue.begin());
680                        mDecodingTimeQueue.erase(mDecodingTimeQueue.begin());
681                    }
682                    mbuf->meta_data()->setInt64(kKeyDecodingTime, decodingTimeUs);
683
684                    ALOGV("[video] time %" PRId64 " us (%.2f secs), dts/pts diff %" PRId64,
685                            timeUs, timeUs / 1E6, decodingTimeUs - timeUs);
686                } else {
687                    int64_t driftTimeUs = 0;
688#if DEBUG_DRIFT_TIME
689                    CHECK(!mDriftTimeQueue.empty());
690                    driftTimeUs = *(mDriftTimeQueue.begin());
691                    mDriftTimeQueue.erase(mDriftTimeQueue.begin());
692                    mbuf->meta_data()->setInt64(kKeyDriftTime, driftTimeUs);
693#endif // DEBUG_DRIFT_TIME
694                    ALOGV("[audio] time %" PRId64 " us (%.2f secs), drift %" PRId64,
695                            timeUs, timeUs / 1E6, driftTimeUs);
696                }
697                mbuf->meta_data()->setInt64(kKeyTime, timeUs);
698            } else {
699                mbuf->meta_data()->setInt32(kKeyIsCodecConfig, true);
700            }
701            if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
702                mbuf->meta_data()->setInt32(kKeyIsSyncFrame, true);
703            }
704            mbuf->setObserver(this);
705            mbuf->add_ref();
706
707            {
708                Mutex::Autolock autoLock(mOutputBufferLock);
709                mOutputBufferQueue.push_back(mbuf);
710                mOutputBufferCond.signal();
711            }
712        }
713
714        mEncoder->releaseOutputBuffer(bufferIndex);
715
716        if (flags & MediaCodec::BUFFER_FLAG_EOS) {
717            err = ERROR_END_OF_STREAM;
718            break;
719        }
720    }
721
722    return err;
723}
724
725status_t MediaCodecSource::onStart(MetaData *params) {
726    if (mStopping) {
727        ALOGE("Failed to start while we're stopping");
728        return INVALID_OPERATION;
729    }
730
731    if (mStarted) {
732        ALOGI("MediaCodecSource (%s) resuming", mIsVideo ? "video" : "audio");
733        if (mFlags & FLAG_USE_SURFACE_INPUT) {
734            resume();
735        } else {
736            CHECK(mPuller != NULL);
737            mPuller->resume();
738        }
739        return OK;
740    }
741
742    ALOGI("MediaCodecSource (%s) starting", mIsVideo ? "video" : "audio");
743
744    status_t err = OK;
745
746    if (mFlags & FLAG_USE_SURFACE_INPUT) {
747        int64_t startTimeUs;
748        if (!params || !params->findInt64(kKeyTime, &startTimeUs)) {
749            startTimeUs = -1ll;
750        }
751        resume(startTimeUs);
752        scheduleDoMoreWork();
753    } else {
754        CHECK(mPuller != NULL);
755        sp<AMessage> notify = new AMessage(
756                kWhatPullerNotify, mReflector->id());
757        err = mPuller->start(params, notify);
758        if (err != OK) {
759            return err;
760        }
761    }
762
763    ALOGI("MediaCodecSource (%s) started", mIsVideo ? "video" : "audio");
764
765    mStarted = true;
766    return OK;
767}
768
769void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
770    switch (msg->what()) {
771    case kWhatPullerNotify:
772    {
773        MediaBuffer *mbuf;
774        CHECK(msg->findPointer("accessUnit", (void**)&mbuf));
775
776        if (mbuf == NULL) {
777            ALOGV("puller (%s) reached EOS",
778                    mIsVideo ? "video" : "audio");
779            signalEOS();
780        }
781
782        if (mEncoder == NULL) {
783            ALOGV("got msg '%s' after encoder shutdown.",
784                  msg->debugString().c_str());
785
786            if (mbuf != NULL) {
787                mbuf->release();
788            }
789
790            break;
791        }
792
793        mInputBufferQueue.push_back(mbuf);
794
795        feedEncoderInputBuffers();
796        scheduleDoMoreWork();
797
798        break;
799    }
800    case kWhatEncoderActivity:
801    {
802        mDoMoreWorkPending = false;
803
804        if (mEncoder == NULL) {
805            break;
806        }
807
808        int32_t numInput, numOutput;
809
810        if (!msg->findInt32("input-buffers", &numInput)) {
811            numInput = INT32_MAX;
812        }
813        if (!msg->findInt32("output-buffers", &numOutput)) {
814            numOutput = INT32_MAX;
815        }
816
817        status_t err = doMoreWork(numInput, numOutput);
818
819        if (err == OK) {
820            scheduleDoMoreWork();
821        } else {
822            // reached EOS, or error
823            signalEOS(err);
824        }
825
826        break;
827    }
828    case kWhatStart:
829    {
830        uint32_t replyID;
831        CHECK(msg->senderAwaitsResponse(&replyID));
832
833        sp<RefBase> obj;
834        CHECK(msg->findObject("meta", &obj));
835        MetaData *params = static_cast<MetaData *>(obj.get());
836
837        sp<AMessage> response = new AMessage;
838        response->setInt32("err", onStart(params));
839        response->postReply(replyID);
840        break;
841    }
842    case kWhatStop:
843    {
844        ALOGI("encoder (%s) stopping", mIsVideo ? "video" : "audio");
845
846        uint32_t replyID;
847        CHECK(msg->senderAwaitsResponse(&replyID));
848
849        if (mEncoderReachedEOS) {
850            // if we already reached EOS, reply and return now
851            ALOGI("encoder (%s) already stopped",
852                    mIsVideo ? "video" : "audio");
853            (new AMessage)->postReply(replyID);
854            break;
855        }
856
857        mStopReplyIDQueue.push_back(replyID);
858        if (mStopping) {
859            // nothing to do if we're already stopping, reply will be posted
860            // to all when we're stopped.
861            break;
862        }
863
864        mStopping = true;
865
866        // if using surface, signal source EOS and wait for EOS to come back.
867        // otherwise, release encoder and post EOS if haven't done already
868        if (mFlags & FLAG_USE_SURFACE_INPUT) {
869            mEncoder->signalEndOfInputStream();
870        } else {
871            signalEOS();
872        }
873        break;
874    }
875    case kWhatPause:
876    {
877        if (mFlags && FLAG_USE_SURFACE_INPUT) {
878            suspend();
879        } else {
880            CHECK(mPuller != NULL);
881            mPuller->pause();
882        }
883        break;
884    }
885    default:
886        TRESPASS();
887    }
888}
889
890} // namespace android
891