MediaCodecSource.cpp revision 3b0da19c78f6a61ed343a07f2448b94faafe4c02
1/*
2 * Copyright 2014, The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *     http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "MediaCodecSource"
19#define DEBUG_DRIFT_TIME 0
20
21#include <inttypes.h>
22
23#include <gui/IGraphicBufferProducer.h>
24#include <gui/Surface.h>
25#include <media/ICrypto.h>
26#include <media/stagefright/foundation/ABuffer.h>
27#include <media/stagefright/foundation/ADebug.h>
28#include <media/stagefright/foundation/ALooper.h>
29#include <media/stagefright/foundation/AMessage.h>
30#include <media/stagefright/MediaBuffer.h>
31#include <media/stagefright/MediaCodec.h>
32#include <media/stagefright/MetaData.h>
33#include <media/stagefright/MediaErrors.h>
34#include <media/stagefright/MediaSource.h>
35#include <media/stagefright/MediaCodecSource.h>
36#include <media/stagefright/Utils.h>
37
38namespace android {
39
40struct MediaCodecSource::Puller : public AHandler {
41    Puller(const sp<MediaSource> &source);
42
43    status_t start(const sp<MetaData> &meta, const sp<AMessage> &notify);
44    void stop();
45
46    void pause();
47    void resume();
48
49protected:
50    virtual void onMessageReceived(const sp<AMessage> &msg);
51    virtual ~Puller();
52
53private:
54    enum {
55        kWhatStart = 'msta',
56        kWhatStop,
57        kWhatPull,
58        kWhatPause,
59        kWhatResume,
60    };
61
62    sp<MediaSource> mSource;
63    sp<AMessage> mNotify;
64    sp<ALooper> mLooper;
65    int32_t mPullGeneration;
66    bool mIsAudio;
67    bool mPaused;
68    bool mReachedEOS;
69
70    status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg);
71    void schedulePull();
72    void handleEOS();
73
74    DISALLOW_EVIL_CONSTRUCTORS(Puller);
75};
76
77MediaCodecSource::Puller::Puller(const sp<MediaSource> &source)
78    : mSource(source),
79      mLooper(new ALooper()),
80      mPullGeneration(0),
81      mIsAudio(false),
82      mPaused(false),
83      mReachedEOS(false) {
84    sp<MetaData> meta = source->getFormat();
85    const char *mime;
86    CHECK(meta->findCString(kKeyMIMEType, &mime));
87
88    mIsAudio = !strncasecmp(mime, "audio/", 6);
89
90    mLooper->setName("pull_looper");
91}
92
93MediaCodecSource::Puller::~Puller() {
94    mLooper->unregisterHandler(id());
95    mLooper->stop();
96}
97
98status_t MediaCodecSource::Puller::postSynchronouslyAndReturnError(
99        const sp<AMessage> &msg) {
100    sp<AMessage> response;
101    status_t err = msg->postAndAwaitResponse(&response);
102
103    if (err != OK) {
104        return err;
105    }
106
107    if (!response->findInt32("err", &err)) {
108        err = OK;
109    }
110
111    return err;
112}
113
114status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta,
115        const sp<AMessage> &notify) {
116    ALOGV("puller (%s) start", mIsAudio ? "audio" : "video");
117    mLooper->start(
118            false /* runOnCallingThread */,
119            false /* canCallJava */,
120            PRIORITY_AUDIO);
121    mLooper->registerHandler(this);
122    mNotify = notify;
123
124    sp<AMessage> msg = new AMessage(kWhatStart, this);
125    msg->setObject("meta", meta);
126    return postSynchronouslyAndReturnError(msg);
127}
128
129void MediaCodecSource::Puller::stop() {
130    // Stop source from caller's thread instead of puller's looper.
131    // mSource->stop() is thread-safe, doing it outside the puller's
132    // looper allows us to at least stop if source gets stuck.
133    // If source gets stuck in read(), the looper would never
134    // be able to process the stop(), which could lead to ANR.
135
136    ALOGV("source (%s) stopping", mIsAudio ? "audio" : "video");
137    mSource->stop();
138    ALOGV("source (%s) stopped", mIsAudio ? "audio" : "video");
139
140    (new AMessage(kWhatStop, this))->post();
141}
142
143void MediaCodecSource::Puller::pause() {
144    (new AMessage(kWhatPause, this))->post();
145}
146
147void MediaCodecSource::Puller::resume() {
148    (new AMessage(kWhatResume, this))->post();
149}
150
151void MediaCodecSource::Puller::schedulePull() {
152    sp<AMessage> msg = new AMessage(kWhatPull, this);
153    msg->setInt32("generation", mPullGeneration);
154    msg->post();
155}
156
157void MediaCodecSource::Puller::handleEOS() {
158    if (!mReachedEOS) {
159        ALOGV("puller (%s) posting EOS", mIsAudio ? "audio" : "video");
160        mReachedEOS = true;
161        sp<AMessage> notify = mNotify->dup();
162        notify->setPointer("accessUnit", NULL);
163        notify->post();
164    }
165}
166
167void MediaCodecSource::Puller::onMessageReceived(const sp<AMessage> &msg) {
168    switch (msg->what()) {
169        case kWhatStart:
170        {
171            sp<RefBase> obj;
172            CHECK(msg->findObject("meta", &obj));
173
174            mReachedEOS = false;
175
176            status_t err = mSource->start(static_cast<MetaData *>(obj.get()));
177
178            if (err == OK) {
179                schedulePull();
180            }
181
182            sp<AMessage> response = new AMessage;
183            response->setInt32("err", err);
184
185            sp<AReplyToken> replyID;
186            CHECK(msg->senderAwaitsResponse(&replyID));
187            response->postReply(replyID);
188            break;
189        }
190
191        case kWhatStop:
192        {
193            ++mPullGeneration;
194
195            handleEOS();
196            break;
197        }
198
199        case kWhatPull:
200        {
201            int32_t generation;
202            CHECK(msg->findInt32("generation", &generation));
203
204            if (generation != mPullGeneration) {
205                break;
206            }
207
208            MediaBuffer *mbuf;
209            status_t err = mSource->read(&mbuf);
210
211            if (mPaused) {
212                if (err == OK) {
213                    mbuf->release();
214                    mbuf = NULL;
215                }
216
217                msg->post();
218                break;
219            }
220
221            if (err != OK) {
222                if (err == ERROR_END_OF_STREAM) {
223                    ALOGV("stream ended, mbuf %p", mbuf);
224                } else {
225                    ALOGE("error %d reading stream.", err);
226                }
227                handleEOS();
228            } else {
229                sp<AMessage> notify = mNotify->dup();
230
231                notify->setPointer("accessUnit", mbuf);
232                notify->post();
233
234                msg->post();
235            }
236            break;
237        }
238
239        case kWhatPause:
240        {
241            mPaused = true;
242            break;
243        }
244
245        case kWhatResume:
246        {
247            mPaused = false;
248            break;
249        }
250
251        default:
252            TRESPASS();
253    }
254}
255
256// static
257sp<MediaCodecSource> MediaCodecSource::Create(
258        const sp<ALooper> &looper,
259        const sp<AMessage> &format,
260        const sp<MediaSource> &source,
261        uint32_t flags) {
262    sp<MediaCodecSource> mediaSource =
263            new MediaCodecSource(looper, format, source, flags);
264
265    if (mediaSource->init() == OK) {
266        return mediaSource;
267    }
268    return NULL;
269}
270
271status_t MediaCodecSource::start(MetaData* params) {
272    sp<AMessage> msg = new AMessage(kWhatStart, mReflector);
273    msg->setObject("meta", params);
274    return postSynchronouslyAndReturnError(msg);
275}
276
277status_t MediaCodecSource::stop() {
278    sp<AMessage> msg = new AMessage(kWhatStop, mReflector);
279    status_t err = postSynchronouslyAndReturnError(msg);
280
281    // mPuller->stop() needs to be done outside MediaCodecSource's looper,
282    // as it contains a synchronous call to stop the underlying MediaSource,
283    // which often waits for all outstanding MediaBuffers to return, but
284    // MediaBuffers are only returned when MediaCodecSource looper gets
285    // to process them.
286
287    if (mPuller != NULL) {
288        ALOGI("puller (%s) stopping", mIsVideo ? "video" : "audio");
289        mPuller->stop();
290        ALOGI("puller (%s) stopped", mIsVideo ? "video" : "audio");
291    }
292
293    return err;
294}
295
296status_t MediaCodecSource::pause() {
297    (new AMessage(kWhatPause, mReflector))->post();
298    return OK;
299}
300
301sp<IGraphicBufferProducer> MediaCodecSource::getGraphicBufferProducer() {
302    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
303    return mGraphicBufferProducer;
304}
305
306status_t MediaCodecSource::read(
307        MediaBuffer** buffer, const ReadOptions* /* options */) {
308    Mutex::Autolock autolock(mOutputBufferLock);
309
310    *buffer = NULL;
311    while (mOutputBufferQueue.size() == 0 && !mEncoderReachedEOS) {
312        mOutputBufferCond.wait(mOutputBufferLock);
313    }
314    if (!mEncoderReachedEOS) {
315        *buffer = *mOutputBufferQueue.begin();
316        mOutputBufferQueue.erase(mOutputBufferQueue.begin());
317        return OK;
318    }
319    return mErrorCode;
320}
321
322void MediaCodecSource::signalBufferReturned(MediaBuffer *buffer) {
323    buffer->setObserver(0);
324    buffer->release();
325}
326
327MediaCodecSource::MediaCodecSource(
328        const sp<ALooper> &looper,
329        const sp<AMessage> &outputFormat,
330        const sp<MediaSource> &source,
331        uint32_t flags)
332    : mLooper(looper),
333      mOutputFormat(outputFormat),
334      mMeta(new MetaData),
335      mFlags(flags),
336      mIsVideo(false),
337      mStarted(false),
338      mStopping(false),
339      mDoMoreWorkPending(false),
340      mFirstSampleTimeUs(-1ll),
341      mEncoderReachedEOS(false),
342      mErrorCode(OK) {
343    CHECK(mLooper != NULL);
344
345    AString mime;
346    CHECK(mOutputFormat->findString("mime", &mime));
347
348    if (!strncasecmp("video/", mime.c_str(), 6)) {
349        mIsVideo = true;
350    }
351
352    if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
353        mPuller = new Puller(source);
354    }
355}
356
357MediaCodecSource::~MediaCodecSource() {
358    releaseEncoder();
359
360    mCodecLooper->stop();
361    mLooper->unregisterHandler(mReflector->id());
362}
363
364status_t MediaCodecSource::init() {
365    status_t err = initEncoder();
366
367    if (err != OK) {
368        releaseEncoder();
369    }
370
371    return err;
372}
373
374status_t MediaCodecSource::initEncoder() {
375    mReflector = new AHandlerReflector<MediaCodecSource>(this);
376    mLooper->registerHandler(mReflector);
377
378    mCodecLooper = new ALooper;
379    mCodecLooper->setName("codec_looper");
380    mCodecLooper->start();
381
382    if (mFlags & FLAG_USE_METADATA_INPUT) {
383        mOutputFormat->setInt32("store-metadata-in-buffers", 1);
384    }
385
386    if (mFlags & FLAG_USE_SURFACE_INPUT) {
387        mOutputFormat->setInt32("create-input-buffers-suspended", 1);
388    }
389
390    AString outputMIME;
391    CHECK(mOutputFormat->findString("mime", &outputMIME));
392
393    mEncoder = MediaCodec::CreateByType(
394            mCodecLooper, outputMIME.c_str(), true /* encoder */);
395
396    if (mEncoder == NULL) {
397        return NO_INIT;
398    }
399
400    ALOGV("output format is '%s'", mOutputFormat->debugString(0).c_str());
401
402    mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, mReflector);
403    mEncoder->setCallback(mEncoderActivityNotify);
404
405    status_t err = mEncoder->configure(
406                mOutputFormat,
407                NULL /* nativeWindow */,
408                NULL /* crypto */,
409                MediaCodec::CONFIGURE_FLAG_ENCODE);
410
411    if (err != OK) {
412        return err;
413    }
414
415    mEncoder->getOutputFormat(&mOutputFormat);
416    convertMessageToMetaData(mOutputFormat, mMeta);
417
418    if (mFlags & FLAG_USE_SURFACE_INPUT) {
419        CHECK(mIsVideo);
420
421        err = mEncoder->createInputSurface(&mGraphicBufferProducer);
422
423        if (err != OK) {
424            return err;
425        }
426    }
427
428    err = mEncoder->start();
429
430    if (err != OK) {
431        return err;
432    }
433
434    mEncoderReachedEOS = false;
435    mErrorCode = OK;
436
437    return OK;
438}
439
440void MediaCodecSource::releaseEncoder() {
441    if (mEncoder == NULL) {
442        return;
443    }
444
445    mEncoder->release();
446    mEncoder.clear();
447
448    while (!mInputBufferQueue.empty()) {
449        MediaBuffer *mbuf = *mInputBufferQueue.begin();
450        mInputBufferQueue.erase(mInputBufferQueue.begin());
451        if (mbuf != NULL) {
452            mbuf->release();
453        }
454    }
455}
456
457status_t MediaCodecSource::postSynchronouslyAndReturnError(
458        const sp<AMessage> &msg) {
459    sp<AMessage> response;
460    status_t err = msg->postAndAwaitResponse(&response);
461
462    if (err != OK) {
463        return err;
464    }
465
466    if (!response->findInt32("err", &err)) {
467        err = OK;
468    }
469
470    return err;
471}
472
473void MediaCodecSource::signalEOS(status_t err) {
474    if (!mEncoderReachedEOS) {
475        ALOGV("encoder (%s) reached EOS", mIsVideo ? "video" : "audio");
476        {
477            Mutex::Autolock autoLock(mOutputBufferLock);
478            // release all unread media buffers
479            for (List<MediaBuffer*>::iterator it = mOutputBufferQueue.begin();
480                    it != mOutputBufferQueue.end(); it++) {
481                (*it)->release();
482            }
483            mOutputBufferQueue.clear();
484            mEncoderReachedEOS = true;
485            mErrorCode = err;
486            mOutputBufferCond.signal();
487        }
488
489        releaseEncoder();
490    }
491    if (mStopping && mEncoderReachedEOS) {
492        ALOGI("encoder (%s) stopped", mIsVideo ? "video" : "audio");
493        // posting reply to everyone that's waiting
494        List<sp<AReplyToken>>::iterator it;
495        for (it = mStopReplyIDQueue.begin();
496                it != mStopReplyIDQueue.end(); it++) {
497            (new AMessage)->postReply(*it);
498        }
499        mStopReplyIDQueue.clear();
500        mStopping = false;
501    }
502}
503
504void MediaCodecSource::suspend() {
505    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
506    if (mEncoder != NULL) {
507        sp<AMessage> params = new AMessage;
508        params->setInt32("drop-input-frames", true);
509        mEncoder->setParameters(params);
510    }
511}
512
513void MediaCodecSource::resume(int64_t skipFramesBeforeUs) {
514    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
515    if (mEncoder != NULL) {
516        sp<AMessage> params = new AMessage;
517        params->setInt32("drop-input-frames", false);
518        if (skipFramesBeforeUs > 0) {
519            params->setInt64("skip-frames-before", skipFramesBeforeUs);
520        }
521        mEncoder->setParameters(params);
522    }
523}
524
525status_t MediaCodecSource::feedEncoderInputBuffers() {
526    while (!mInputBufferQueue.empty()
527            && !mAvailEncoderInputIndices.empty()) {
528        MediaBuffer* mbuf = *mInputBufferQueue.begin();
529        mInputBufferQueue.erase(mInputBufferQueue.begin());
530
531        size_t bufferIndex = *mAvailEncoderInputIndices.begin();
532        mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
533
534        int64_t timeUs = 0ll;
535        uint32_t flags = 0;
536        size_t size = 0;
537
538        if (mbuf != NULL) {
539            CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
540
541            // push decoding time for video, or drift time for audio
542            if (mIsVideo) {
543                mDecodingTimeQueue.push_back(timeUs);
544            } else {
545#if DEBUG_DRIFT_TIME
546                if (mFirstSampleTimeUs < 0ll) {
547                    mFirstSampleTimeUs = timeUs;
548                }
549
550                int64_t driftTimeUs = 0;
551                if (mbuf->meta_data()->findInt64(kKeyDriftTime, &driftTimeUs)
552                        && driftTimeUs) {
553                    driftTimeUs = timeUs - mFirstSampleTimeUs - driftTimeUs;
554                }
555                mDriftTimeQueue.push_back(driftTimeUs);
556#endif // DEBUG_DRIFT_TIME
557            }
558
559            sp<ABuffer> inbuf;
560            status_t err = mEncoder->getInputBuffer(bufferIndex, &inbuf);
561            if (err != OK || inbuf == NULL) {
562                mbuf->release();
563                signalEOS();
564                break;
565            }
566
567            size = mbuf->size();
568
569            memcpy(inbuf->data(), mbuf->data(), size);
570
571            if (mIsVideo) {
572                // video encoder will release MediaBuffer when done
573                // with underlying data.
574                inbuf->setMediaBufferBase(mbuf);
575            } else {
576                mbuf->release();
577            }
578        } else {
579            flags = MediaCodec::BUFFER_FLAG_EOS;
580        }
581
582        status_t err = mEncoder->queueInputBuffer(
583                bufferIndex, 0, size, timeUs, flags);
584
585        if (err != OK) {
586            return err;
587        }
588    }
589
590    return OK;
591}
592
593status_t MediaCodecSource::onStart(MetaData *params) {
594    if (mStopping) {
595        ALOGE("Failed to start while we're stopping");
596        return INVALID_OPERATION;
597    }
598
599    if (mStarted) {
600        ALOGI("MediaCodecSource (%s) resuming", mIsVideo ? "video" : "audio");
601        if (mFlags & FLAG_USE_SURFACE_INPUT) {
602            resume();
603        } else {
604            CHECK(mPuller != NULL);
605            mPuller->resume();
606        }
607        return OK;
608    }
609
610    ALOGI("MediaCodecSource (%s) starting", mIsVideo ? "video" : "audio");
611
612    status_t err = OK;
613
614    if (mFlags & FLAG_USE_SURFACE_INPUT) {
615        int64_t startTimeUs;
616        if (!params || !params->findInt64(kKeyTime, &startTimeUs)) {
617            startTimeUs = -1ll;
618        }
619        resume(startTimeUs);
620    } else {
621        CHECK(mPuller != NULL);
622        sp<AMessage> notify = new AMessage(kWhatPullerNotify, mReflector);
623        err = mPuller->start(params, notify);
624        if (err != OK) {
625            return err;
626        }
627    }
628
629    ALOGI("MediaCodecSource (%s) started", mIsVideo ? "video" : "audio");
630
631    mStarted = true;
632    return OK;
633}
634
635void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
636    switch (msg->what()) {
637    case kWhatPullerNotify:
638    {
639        MediaBuffer *mbuf;
640        CHECK(msg->findPointer("accessUnit", (void**)&mbuf));
641
642        if (mbuf == NULL) {
643            ALOGV("puller (%s) reached EOS",
644                    mIsVideo ? "video" : "audio");
645            signalEOS();
646        }
647
648        if (mEncoder == NULL) {
649            ALOGV("got msg '%s' after encoder shutdown.",
650                  msg->debugString().c_str());
651
652            if (mbuf != NULL) {
653                mbuf->release();
654            }
655
656            break;
657        }
658
659        mInputBufferQueue.push_back(mbuf);
660
661        feedEncoderInputBuffers();
662
663        break;
664    }
665    case kWhatEncoderActivity:
666    {
667        if (mEncoder == NULL) {
668            break;
669        }
670
671        int32_t cbID;
672        CHECK(msg->findInt32("callbackID", &cbID));
673        if (cbID == MediaCodec::CB_INPUT_AVAILABLE) {
674            int32_t index;
675            CHECK(msg->findInt32("index", &index));
676
677            mAvailEncoderInputIndices.push_back(index);
678            feedEncoderInputBuffers();
679        } else if (cbID == MediaCodec::CB_OUTPUT_AVAILABLE) {
680            int32_t index;
681            size_t offset;
682            size_t size;
683            int64_t timeUs;
684            int32_t flags;
685
686            CHECK(msg->findInt32("index", &index));
687            CHECK(msg->findSize("offset", &offset));
688            CHECK(msg->findSize("size", &size));
689            CHECK(msg->findInt64("timeUs", &timeUs));
690            CHECK(msg->findInt32("flags", &flags));
691
692            if (flags & MediaCodec::BUFFER_FLAG_EOS) {
693                mEncoder->releaseOutputBuffer(index);
694                signalEOS();
695                break;
696            }
697
698            sp<ABuffer> outbuf;
699            status_t err = mEncoder->getOutputBuffer(index, &outbuf);
700            if (err != OK || outbuf == NULL) {
701                signalEOS();
702                break;
703            }
704
705            MediaBuffer *mbuf = new MediaBuffer(outbuf->size());
706            memcpy(mbuf->data(), outbuf->data(), outbuf->size());
707
708            if (!(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
709                if (mIsVideo) {
710                    int64_t decodingTimeUs;
711                    if (mFlags & FLAG_USE_SURFACE_INPUT) {
712                        // GraphicBufferSource is supposed to discard samples
713                        // queued before start, and offset timeUs by start time
714                        CHECK_GE(timeUs, 0ll);
715                        // TODO:
716                        // Decoding time for surface source is unavailable,
717                        // use presentation time for now. May need to move
718                        // this logic into MediaCodec.
719                        decodingTimeUs = timeUs;
720                    } else {
721                        CHECK(!mDecodingTimeQueue.empty());
722                        decodingTimeUs = *(mDecodingTimeQueue.begin());
723                        mDecodingTimeQueue.erase(mDecodingTimeQueue.begin());
724                    }
725                    mbuf->meta_data()->setInt64(kKeyDecodingTime, decodingTimeUs);
726
727                    ALOGV("[video] time %" PRId64 " us (%.2f secs), dts/pts diff %" PRId64,
728                            timeUs, timeUs / 1E6, decodingTimeUs - timeUs);
729                } else {
730                    int64_t driftTimeUs = 0;
731#if DEBUG_DRIFT_TIME
732                    CHECK(!mDriftTimeQueue.empty());
733                    driftTimeUs = *(mDriftTimeQueue.begin());
734                    mDriftTimeQueue.erase(mDriftTimeQueue.begin());
735                    mbuf->meta_data()->setInt64(kKeyDriftTime, driftTimeUs);
736#endif // DEBUG_DRIFT_TIME
737                    ALOGV("[audio] time %" PRId64 " us (%.2f secs), drift %" PRId64,
738                            timeUs, timeUs / 1E6, driftTimeUs);
739                }
740                mbuf->meta_data()->setInt64(kKeyTime, timeUs);
741            } else {
742                mbuf->meta_data()->setInt32(kKeyIsCodecConfig, true);
743            }
744            if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
745                mbuf->meta_data()->setInt32(kKeyIsSyncFrame, true);
746            }
747            mbuf->setObserver(this);
748            mbuf->add_ref();
749
750            {
751                Mutex::Autolock autoLock(mOutputBufferLock);
752                mOutputBufferQueue.push_back(mbuf);
753                mOutputBufferCond.signal();
754            }
755
756            mEncoder->releaseOutputBuffer(index);
757       } else if (cbID == MediaCodec::CB_ERROR) {
758            status_t err;
759            CHECK(msg->findInt32("err", &err));
760            ALOGE("Encoder (%s) reported error : 0x%x",
761                    mIsVideo ? "video" : "audio", err);
762            signalEOS();
763       }
764       break;
765    }
766    case kWhatStart:
767    {
768        sp<AReplyToken> replyID;
769        CHECK(msg->senderAwaitsResponse(&replyID));
770
771        sp<RefBase> obj;
772        CHECK(msg->findObject("meta", &obj));
773        MetaData *params = static_cast<MetaData *>(obj.get());
774
775        sp<AMessage> response = new AMessage;
776        response->setInt32("err", onStart(params));
777        response->postReply(replyID);
778        break;
779    }
780    case kWhatStop:
781    {
782        ALOGI("encoder (%s) stopping", mIsVideo ? "video" : "audio");
783
784        sp<AReplyToken> replyID;
785        CHECK(msg->senderAwaitsResponse(&replyID));
786
787        if (mEncoderReachedEOS) {
788            // if we already reached EOS, reply and return now
789            ALOGI("encoder (%s) already stopped",
790                    mIsVideo ? "video" : "audio");
791            (new AMessage)->postReply(replyID);
792            break;
793        }
794
795        mStopReplyIDQueue.push_back(replyID);
796        if (mStopping) {
797            // nothing to do if we're already stopping, reply will be posted
798            // to all when we're stopped.
799            break;
800        }
801
802        mStopping = true;
803
804        // if using surface, signal source EOS and wait for EOS to come back.
805        // otherwise, release encoder and post EOS if haven't done already
806        if (mFlags & FLAG_USE_SURFACE_INPUT) {
807            mEncoder->signalEndOfInputStream();
808        } else {
809            signalEOS();
810        }
811        break;
812    }
813    case kWhatPause:
814    {
815        if (mFlags && FLAG_USE_SURFACE_INPUT) {
816            suspend();
817        } else {
818            CHECK(mPuller != NULL);
819            mPuller->pause();
820        }
821        break;
822    }
823    default:
824        TRESPASS();
825    }
826}
827
828} // namespace android
829