MediaCodecSource.cpp revision 1099188151eb63af24ecf542b58d4257bbb8236a
1/*
2 * Copyright 2014, The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *     http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "MediaCodecSource"
19#define DEBUG_DRIFT_TIME 0
20
21#include <inttypes.h>
22
23#include <gui/IGraphicBufferProducer.h>
24#include <gui/Surface.h>
25#include <media/ICrypto.h>
26#include <media/stagefright/foundation/ABuffer.h>
27#include <media/stagefright/foundation/ADebug.h>
28#include <media/stagefright/foundation/ALooper.h>
29#include <media/stagefright/foundation/AMessage.h>
30#include <media/stagefright/MediaBuffer.h>
31#include <media/stagefright/MediaCodec.h>
32#include <media/stagefright/MetaData.h>
33#include <media/stagefright/MediaErrors.h>
34#include <media/stagefright/MediaSource.h>
35#include <media/stagefright/MediaCodecSource.h>
36#include <media/stagefright/Utils.h>
37
38namespace android {
39
40struct MediaCodecSource::Puller : public AHandler {
41    Puller(const sp<MediaSource> &source);
42
43    status_t start(const sp<MetaData> &meta, const sp<AMessage> &notify);
44    void stop();
45
46    void pause();
47    void resume();
48
49protected:
50    virtual void onMessageReceived(const sp<AMessage> &msg);
51    virtual ~Puller();
52
53private:
54    enum {
55        kWhatStart = 'msta',
56        kWhatStop,
57        kWhatPull,
58        kWhatPause,
59        kWhatResume,
60    };
61
62    sp<MediaSource> mSource;
63    sp<AMessage> mNotify;
64    sp<ALooper> mLooper;
65    int32_t mPullGeneration;
66    bool mIsAudio;
67    bool mPaused;
68    bool mReachedEOS;
69
70    status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg);
71    void schedulePull();
72    void handleEOS();
73
74    DISALLOW_EVIL_CONSTRUCTORS(Puller);
75};
76
77MediaCodecSource::Puller::Puller(const sp<MediaSource> &source)
78    : mSource(source),
79      mLooper(new ALooper()),
80      mPullGeneration(0),
81      mIsAudio(false),
82      mPaused(false),
83      mReachedEOS(false) {
84    sp<MetaData> meta = source->getFormat();
85    const char *mime;
86    CHECK(meta->findCString(kKeyMIMEType, &mime));
87
88    mIsAudio = !strncasecmp(mime, "audio/", 6);
89
90    mLooper->setName("pull_looper");
91}
92
93MediaCodecSource::Puller::~Puller() {
94    mLooper->unregisterHandler(id());
95    mLooper->stop();
96}
97
98status_t MediaCodecSource::Puller::postSynchronouslyAndReturnError(
99        const sp<AMessage> &msg) {
100    sp<AMessage> response;
101    status_t err = msg->postAndAwaitResponse(&response);
102
103    if (err != OK) {
104        return err;
105    }
106
107    if (!response->findInt32("err", &err)) {
108        err = OK;
109    }
110
111    return err;
112}
113
114status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta,
115        const sp<AMessage> &notify) {
116    ALOGV("puller (%s) start", mIsAudio ? "audio" : "video");
117    mLooper->start(
118            false /* runOnCallingThread */,
119            false /* canCallJava */,
120            PRIORITY_AUDIO);
121    mLooper->registerHandler(this);
122    mNotify = notify;
123
124    sp<AMessage> msg = new AMessage(kWhatStart, id());
125    msg->setObject("meta", meta);
126    return postSynchronouslyAndReturnError(msg);
127}
128
129void MediaCodecSource::Puller::stop() {
130    // Stop source from caller's thread instead of puller's looper.
131    // mSource->stop() is thread-safe, doing it outside the puller's
132    // looper allows us to at least stop if source gets stuck.
133    // If source gets stuck in read(), the looper would never
134    // be able to process the stop(), which could lead to ANR.
135
136    ALOGV("source (%s) stopping", mIsAudio ? "audio" : "video");
137    mSource->stop();
138    ALOGV("source (%s) stopped", mIsAudio ? "audio" : "video");
139
140    (new AMessage(kWhatStop, id()))->post();
141}
142
143void MediaCodecSource::Puller::pause() {
144    (new AMessage(kWhatPause, id()))->post();
145}
146
147void MediaCodecSource::Puller::resume() {
148    (new AMessage(kWhatResume, id()))->post();
149}
150
151void MediaCodecSource::Puller::schedulePull() {
152    sp<AMessage> msg = new AMessage(kWhatPull, id());
153    msg->setInt32("generation", mPullGeneration);
154    msg->post();
155}
156
157void MediaCodecSource::Puller::handleEOS() {
158    if (!mReachedEOS) {
159        ALOGV("puller (%s) posting EOS", mIsAudio ? "audio" : "video");
160        mReachedEOS = true;
161        sp<AMessage> notify = mNotify->dup();
162        notify->setPointer("accessUnit", NULL);
163        notify->post();
164    }
165}
166
167void MediaCodecSource::Puller::onMessageReceived(const sp<AMessage> &msg) {
168    switch (msg->what()) {
169        case kWhatStart:
170        {
171            sp<RefBase> obj;
172            CHECK(msg->findObject("meta", &obj));
173
174            mReachedEOS = false;
175
176            status_t err = mSource->start(static_cast<MetaData *>(obj.get()));
177
178            if (err == OK) {
179                schedulePull();
180            }
181
182            sp<AMessage> response = new AMessage;
183            response->setInt32("err", err);
184
185            uint32_t replyID;
186            CHECK(msg->senderAwaitsResponse(&replyID));
187            response->postReply(replyID);
188            break;
189        }
190
191        case kWhatStop:
192        {
193            ++mPullGeneration;
194
195            handleEOS();
196            break;
197        }
198
199        case kWhatPull:
200        {
201            int32_t generation;
202            CHECK(msg->findInt32("generation", &generation));
203
204            if (generation != mPullGeneration) {
205                break;
206            }
207
208            MediaBuffer *mbuf;
209            status_t err = mSource->read(&mbuf);
210
211            if (mPaused) {
212                if (err == OK) {
213                    mbuf->release();
214                    mbuf = NULL;
215                }
216
217                msg->post();
218                break;
219            }
220
221            if (err != OK) {
222                if (err == ERROR_END_OF_STREAM) {
223                    ALOGV("stream ended, mbuf %p", mbuf);
224                } else {
225                    ALOGE("error %d reading stream.", err);
226                }
227                handleEOS();
228            } else {
229                sp<AMessage> notify = mNotify->dup();
230
231                notify->setPointer("accessUnit", mbuf);
232                notify->post();
233
234                msg->post();
235            }
236            break;
237        }
238
239        case kWhatPause:
240        {
241            mPaused = true;
242            break;
243        }
244
245        case kWhatResume:
246        {
247            mPaused = false;
248            break;
249        }
250
251        default:
252            TRESPASS();
253    }
254}
255
256// static
257sp<MediaCodecSource> MediaCodecSource::Create(
258        const sp<ALooper> &looper,
259        const sp<AMessage> &format,
260        const sp<MediaSource> &source,
261        uint32_t flags) {
262    sp<MediaCodecSource> mediaSource =
263            new MediaCodecSource(looper, format, source, flags);
264
265    if (mediaSource->init() == OK) {
266        return mediaSource;
267    }
268    return NULL;
269}
270
271status_t MediaCodecSource::start(MetaData* params) {
272    sp<AMessage> msg = new AMessage(kWhatStart, mReflector->id());
273    msg->setObject("meta", params);
274    return postSynchronouslyAndReturnError(msg);
275}
276
277status_t MediaCodecSource::stop() {
278    sp<AMessage> msg = new AMessage(kWhatStop, mReflector->id());
279    status_t err = postSynchronouslyAndReturnError(msg);
280
281    // mPuller->stop() needs to be done outside MediaCodecSource's looper,
282    // as it contains a synchronous call to stop the underlying MediaSource,
283    // which often waits for all outstanding MediaBuffers to return, but
284    // MediaBuffers are only returned when MediaCodecSource looper gets
285    // to process them.
286
287    if (mPuller != NULL) {
288        ALOGI("puller (%s) stopping", mIsVideo ? "video" : "audio");
289        mPuller->stop();
290        ALOGI("puller (%s) stopped", mIsVideo ? "video" : "audio");
291    }
292
293    return err;
294}
295
296status_t MediaCodecSource::pause() {
297    (new AMessage(kWhatPause, mReflector->id()))->post();
298    return OK;
299}
300
301sp<IGraphicBufferProducer> MediaCodecSource::getGraphicBufferProducer() {
302    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
303    return mGraphicBufferProducer;
304}
305
306status_t MediaCodecSource::read(
307        MediaBuffer** buffer, const ReadOptions* /* options */) {
308    Mutex::Autolock autolock(mOutputBufferLock);
309
310    *buffer = NULL;
311    while (mOutputBufferQueue.size() == 0 && !mEncoderReachedEOS) {
312        mOutputBufferCond.wait(mOutputBufferLock);
313    }
314    if (!mEncoderReachedEOS) {
315        *buffer = *mOutputBufferQueue.begin();
316        mOutputBufferQueue.erase(mOutputBufferQueue.begin());
317        return OK;
318    }
319    return mErrorCode;
320}
321
322void MediaCodecSource::signalBufferReturned(MediaBuffer *buffer) {
323    buffer->setObserver(0);
324    buffer->release();
325}
326
327MediaCodecSource::MediaCodecSource(
328        const sp<ALooper> &looper,
329        const sp<AMessage> &outputFormat,
330        const sp<MediaSource> &source,
331        uint32_t flags)
332    : mLooper(looper),
333      mOutputFormat(outputFormat),
334      mMeta(new MetaData),
335      mFlags(flags),
336      mIsVideo(false),
337      mStarted(false),
338      mStopping(false),
339      mDoMoreWorkPending(false),
340      mFirstSampleTimeUs(-1ll),
341      mEncoderReachedEOS(false),
342      mErrorCode(OK) {
343    CHECK(mLooper != NULL);
344
345    AString mime;
346    CHECK(mOutputFormat->findString("mime", &mime));
347
348    if (!strncasecmp("video/", mime.c_str(), 6)) {
349        mIsVideo = true;
350    }
351
352    if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
353        mPuller = new Puller(source);
354    }
355}
356
357MediaCodecSource::~MediaCodecSource() {
358    releaseEncoder();
359
360    mCodecLooper->stop();
361    mLooper->unregisterHandler(mReflector->id());
362}
363
364status_t MediaCodecSource::init() {
365    status_t err = initEncoder();
366
367    if (err != OK) {
368        releaseEncoder();
369    }
370
371    return err;
372}
373
374status_t MediaCodecSource::initEncoder() {
375    mReflector = new AHandlerReflector<MediaCodecSource>(this);
376    mLooper->registerHandler(mReflector);
377
378    mCodecLooper = new ALooper;
379    mCodecLooper->setName("codec_looper");
380    mCodecLooper->start();
381
382    if (mFlags & FLAG_USE_METADATA_INPUT) {
383        mOutputFormat->setInt32("store-metadata-in-buffers", 1);
384    }
385
386    if (mFlags & FLAG_USE_SURFACE_INPUT) {
387        mOutputFormat->setInt32("create-input-buffers-suspended", 1);
388    }
389
390    AString outputMIME;
391    CHECK(mOutputFormat->findString("mime", &outputMIME));
392
393    mEncoder = MediaCodec::CreateByType(
394            mCodecLooper, outputMIME.c_str(), true /* encoder */);
395
396    if (mEncoder == NULL) {
397        return NO_INIT;
398    }
399
400    ALOGV("output format is '%s'", mOutputFormat->debugString(0).c_str());
401
402    status_t err = mEncoder->configure(
403                mOutputFormat,
404                NULL /* nativeWindow */,
405                NULL /* crypto */,
406                MediaCodec::CONFIGURE_FLAG_ENCODE);
407
408    if (err != OK) {
409        return err;
410    }
411
412    mEncoder->getOutputFormat(&mOutputFormat);
413    convertMessageToMetaData(mOutputFormat, mMeta);
414
415    if (mFlags & FLAG_USE_SURFACE_INPUT) {
416        CHECK(mIsVideo);
417
418        err = mEncoder->createInputSurface(&mGraphicBufferProducer);
419
420        if (err != OK) {
421            return err;
422        }
423    }
424
425    mEncoderActivityNotify = new AMessage(
426            kWhatEncoderActivity, mReflector->id());
427    mEncoder->setCallback(mEncoderActivityNotify);
428
429    err = mEncoder->start();
430
431    if (err != OK) {
432        return err;
433    }
434
435    mEncoderReachedEOS = false;
436    mErrorCode = OK;
437
438    return OK;
439}
440
441void MediaCodecSource::releaseEncoder() {
442    if (mEncoder == NULL) {
443        return;
444    }
445
446    mEncoder->release();
447    mEncoder.clear();
448
449    while (!mInputBufferQueue.empty()) {
450        MediaBuffer *mbuf = *mInputBufferQueue.begin();
451        mInputBufferQueue.erase(mInputBufferQueue.begin());
452        if (mbuf != NULL) {
453            mbuf->release();
454        }
455    }
456}
457
458status_t MediaCodecSource::postSynchronouslyAndReturnError(
459        const sp<AMessage> &msg) {
460    sp<AMessage> response;
461    status_t err = msg->postAndAwaitResponse(&response);
462
463    if (err != OK) {
464        return err;
465    }
466
467    if (!response->findInt32("err", &err)) {
468        err = OK;
469    }
470
471    return err;
472}
473
474void MediaCodecSource::signalEOS(status_t err) {
475    if (!mEncoderReachedEOS) {
476        ALOGV("encoder (%s) reached EOS", mIsVideo ? "video" : "audio");
477        {
478            Mutex::Autolock autoLock(mOutputBufferLock);
479            // release all unread media buffers
480            for (List<MediaBuffer*>::iterator it = mOutputBufferQueue.begin();
481                    it != mOutputBufferQueue.end(); it++) {
482                (*it)->release();
483            }
484            mOutputBufferQueue.clear();
485            mEncoderReachedEOS = true;
486            mErrorCode = err;
487            mOutputBufferCond.signal();
488        }
489
490        releaseEncoder();
491    }
492    if (mStopping && mEncoderReachedEOS) {
493        ALOGI("encoder (%s) stopped", mIsVideo ? "video" : "audio");
494        // posting reply to everyone that's waiting
495        List<uint32_t>::iterator it;
496        for (it = mStopReplyIDQueue.begin();
497                it != mStopReplyIDQueue.end(); it++) {
498            (new AMessage)->postReply(*it);
499        }
500        mStopReplyIDQueue.clear();
501        mStopping = false;
502    }
503}
504
505void MediaCodecSource::suspend() {
506    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
507    if (mEncoder != NULL) {
508        sp<AMessage> params = new AMessage;
509        params->setInt32("drop-input-frames", true);
510        mEncoder->setParameters(params);
511    }
512}
513
514void MediaCodecSource::resume(int64_t skipFramesBeforeUs) {
515    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
516    if (mEncoder != NULL) {
517        sp<AMessage> params = new AMessage;
518        params->setInt32("drop-input-frames", false);
519        if (skipFramesBeforeUs > 0) {
520            params->setInt64("skip-frames-before", skipFramesBeforeUs);
521        }
522        mEncoder->setParameters(params);
523    }
524}
525
526status_t MediaCodecSource::feedEncoderInputBuffers() {
527    while (!mInputBufferQueue.empty()
528            && !mAvailEncoderInputIndices.empty()) {
529        MediaBuffer* mbuf = *mInputBufferQueue.begin();
530        mInputBufferQueue.erase(mInputBufferQueue.begin());
531
532        size_t bufferIndex = *mAvailEncoderInputIndices.begin();
533        mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
534
535        int64_t timeUs = 0ll;
536        uint32_t flags = 0;
537        size_t size = 0;
538
539        if (mbuf != NULL) {
540            CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
541
542            // push decoding time for video, or drift time for audio
543            if (mIsVideo) {
544                mDecodingTimeQueue.push_back(timeUs);
545            } else {
546#if DEBUG_DRIFT_TIME
547                if (mFirstSampleTimeUs < 0ll) {
548                    mFirstSampleTimeUs = timeUs;
549                }
550
551                int64_t driftTimeUs = 0;
552                if (mbuf->meta_data()->findInt64(kKeyDriftTime, &driftTimeUs)
553                        && driftTimeUs) {
554                    driftTimeUs = timeUs - mFirstSampleTimeUs - driftTimeUs;
555                }
556                mDriftTimeQueue.push_back(driftTimeUs);
557#endif // DEBUG_DRIFT_TIME
558            }
559
560            sp<ABuffer> inbuf;
561            status_t err = mEncoder->getInputBuffer(bufferIndex, &inbuf);
562            if (err != OK || inbuf == NULL) {
563                mbuf->release();
564                signalEOS();
565                break;
566            }
567
568            size = mbuf->size();
569
570            memcpy(inbuf->data(), mbuf->data(), size);
571
572            if (mIsVideo) {
573                // video encoder will release MediaBuffer when done
574                // with underlying data.
575                inbuf->setMediaBufferBase(mbuf);
576            } else {
577                mbuf->release();
578            }
579        } else {
580            flags = MediaCodec::BUFFER_FLAG_EOS;
581        }
582
583        status_t err = mEncoder->queueInputBuffer(
584                bufferIndex, 0, size, timeUs, flags);
585
586        if (err != OK) {
587            return err;
588        }
589    }
590
591    return OK;
592}
593
594status_t MediaCodecSource::onStart(MetaData *params) {
595    if (mStopping) {
596        ALOGE("Failed to start while we're stopping");
597        return INVALID_OPERATION;
598    }
599
600    if (mStarted) {
601        ALOGI("MediaCodecSource (%s) resuming", mIsVideo ? "video" : "audio");
602        if (mFlags & FLAG_USE_SURFACE_INPUT) {
603            resume();
604        } else {
605            CHECK(mPuller != NULL);
606            mPuller->resume();
607        }
608        return OK;
609    }
610
611    ALOGI("MediaCodecSource (%s) starting", mIsVideo ? "video" : "audio");
612
613    status_t err = OK;
614
615    if (mFlags & FLAG_USE_SURFACE_INPUT) {
616        int64_t startTimeUs;
617        if (!params || !params->findInt64(kKeyTime, &startTimeUs)) {
618            startTimeUs = -1ll;
619        }
620        resume(startTimeUs);
621    } else {
622        CHECK(mPuller != NULL);
623        sp<AMessage> notify = new AMessage(
624                kWhatPullerNotify, mReflector->id());
625        err = mPuller->start(params, notify);
626        if (err != OK) {
627            return err;
628        }
629    }
630
631    ALOGI("MediaCodecSource (%s) started", mIsVideo ? "video" : "audio");
632
633    mStarted = true;
634    return OK;
635}
636
637void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
638    switch (msg->what()) {
639    case kWhatPullerNotify:
640    {
641        MediaBuffer *mbuf;
642        CHECK(msg->findPointer("accessUnit", (void**)&mbuf));
643
644        if (mbuf == NULL) {
645            ALOGV("puller (%s) reached EOS",
646                    mIsVideo ? "video" : "audio");
647            signalEOS();
648        }
649
650        if (mEncoder == NULL) {
651            ALOGV("got msg '%s' after encoder shutdown.",
652                  msg->debugString().c_str());
653
654            if (mbuf != NULL) {
655                mbuf->release();
656            }
657
658            break;
659        }
660
661        mInputBufferQueue.push_back(mbuf);
662
663        feedEncoderInputBuffers();
664
665        break;
666    }
667    case kWhatEncoderActivity:
668    {
669        if (mEncoder == NULL) {
670            break;
671        }
672
673        int32_t cbID;
674        CHECK(msg->findInt32("callbackID", &cbID));
675        if (cbID == MediaCodec::CB_INPUT_AVAILABLE) {
676            int32_t index;
677            CHECK(msg->findInt32("index", &index));
678
679            mAvailEncoderInputIndices.push_back(index);
680            feedEncoderInputBuffers();
681        } else if (cbID == MediaCodec::CB_OUTPUT_AVAILABLE) {
682            int32_t index;
683            size_t offset;
684            size_t size;
685            int64_t timeUs;
686            int32_t flags;
687            native_handle_t* handle = NULL;
688
689            CHECK(msg->findInt32("index", &index));
690            CHECK(msg->findSize("offset", &offset));
691            CHECK(msg->findSize("size", &size));
692            CHECK(msg->findInt64("timeUs", &timeUs));
693            CHECK(msg->findInt32("flags", &flags));
694
695            if (flags & MediaCodec::BUFFER_FLAG_EOS) {
696                mEncoder->releaseOutputBuffer(index);
697                signalEOS();
698                break;
699            }
700
701            sp<ABuffer> outbuf;
702            status_t err = mEncoder->getOutputBuffer(index, &outbuf);
703            if (err != OK || outbuf == NULL) {
704                signalEOS();
705                break;
706            }
707
708            MediaBuffer *mbuf = new MediaBuffer(outbuf->size());
709            memcpy(mbuf->data(), outbuf->data(), outbuf->size());
710
711            if (!(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
712                if (mIsVideo) {
713                    int64_t decodingTimeUs;
714                    if (mFlags & FLAG_USE_SURFACE_INPUT) {
715                        // GraphicBufferSource is supposed to discard samples
716                        // queued before start, and offset timeUs by start time
717                        CHECK_GE(timeUs, 0ll);
718                        // TODO:
719                        // Decoding time for surface source is unavailable,
720                        // use presentation time for now. May need to move
721                        // this logic into MediaCodec.
722                        decodingTimeUs = timeUs;
723                    } else {
724                        CHECK(!mDecodingTimeQueue.empty());
725                        decodingTimeUs = *(mDecodingTimeQueue.begin());
726                        mDecodingTimeQueue.erase(mDecodingTimeQueue.begin());
727                    }
728                    mbuf->meta_data()->setInt64(kKeyDecodingTime, decodingTimeUs);
729
730                    ALOGV("[video] time %" PRId64 " us (%.2f secs), dts/pts diff %" PRId64,
731                            timeUs, timeUs / 1E6, decodingTimeUs - timeUs);
732                } else {
733                    int64_t driftTimeUs = 0;
734#if DEBUG_DRIFT_TIME
735                    CHECK(!mDriftTimeQueue.empty());
736                    driftTimeUs = *(mDriftTimeQueue.begin());
737                    mDriftTimeQueue.erase(mDriftTimeQueue.begin());
738                    mbuf->meta_data()->setInt64(kKeyDriftTime, driftTimeUs);
739#endif // DEBUG_DRIFT_TIME
740                    ALOGV("[audio] time %" PRId64 " us (%.2f secs), drift %" PRId64,
741                            timeUs, timeUs / 1E6, driftTimeUs);
742                }
743                mbuf->meta_data()->setInt64(kKeyTime, timeUs);
744            } else {
745                mbuf->meta_data()->setInt32(kKeyIsCodecConfig, true);
746            }
747            if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
748                mbuf->meta_data()->setInt32(kKeyIsSyncFrame, true);
749            }
750            mbuf->setObserver(this);
751            mbuf->add_ref();
752
753            {
754                Mutex::Autolock autoLock(mOutputBufferLock);
755                mOutputBufferQueue.push_back(mbuf);
756                mOutputBufferCond.signal();
757            }
758
759            mEncoder->releaseOutputBuffer(index);
760       } else if (cbID == MediaCodec::CB_ERROR) {
761            status_t err;
762            CHECK(msg->findInt32("err", &err));
763            ALOGE("Encoder (%s) reported error : 0x%x",
764                    mIsVideo ? "video" : "audio", err);
765            signalEOS();
766       }
767       break;
768    }
769    case kWhatStart:
770    {
771        uint32_t replyID;
772        CHECK(msg->senderAwaitsResponse(&replyID));
773
774        sp<RefBase> obj;
775        CHECK(msg->findObject("meta", &obj));
776        MetaData *params = static_cast<MetaData *>(obj.get());
777
778        sp<AMessage> response = new AMessage;
779        response->setInt32("err", onStart(params));
780        response->postReply(replyID);
781        break;
782    }
783    case kWhatStop:
784    {
785        ALOGI("encoder (%s) stopping", mIsVideo ? "video" : "audio");
786
787        uint32_t replyID;
788        CHECK(msg->senderAwaitsResponse(&replyID));
789
790        if (mEncoderReachedEOS) {
791            // if we already reached EOS, reply and return now
792            ALOGI("encoder (%s) already stopped",
793                    mIsVideo ? "video" : "audio");
794            (new AMessage)->postReply(replyID);
795            break;
796        }
797
798        mStopReplyIDQueue.push_back(replyID);
799        if (mStopping) {
800            // nothing to do if we're already stopping, reply will be posted
801            // to all when we're stopped.
802            break;
803        }
804
805        mStopping = true;
806
807        // if using surface, signal source EOS and wait for EOS to come back.
808        // otherwise, release encoder and post EOS if haven't done already
809        if (mFlags & FLAG_USE_SURFACE_INPUT) {
810            mEncoder->signalEndOfInputStream();
811        } else {
812            signalEOS();
813        }
814        break;
815    }
816    case kWhatPause:
817    {
818        if (mFlags && FLAG_USE_SURFACE_INPUT) {
819            suspend();
820        } else {
821            CHECK(mPuller != NULL);
822            mPuller->pause();
823        }
824        break;
825    }
826    default:
827        TRESPASS();
828    }
829}
830
831} // namespace android
832