NuPlayer.cpp revision 1948eb3ea6eee336e8cdab9b0c693f93f5f19993
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "NuPlayer"
19#include <utils/Log.h>
20
21#include "NuPlayer.h"
22
23#include "HTTPLiveSource.h"
24#include "NuPlayerDecoder.h"
25#include "NuPlayerDriver.h"
26#include "NuPlayerRenderer.h"
27#include "NuPlayerSource.h"
28#include "RTSPSource.h"
29#include "StreamingSource.h"
30#include "GenericSource.h"
31
32#include "ATSParser.h"
33
34#include <media/stagefright/foundation/hexdump.h>
35#include <media/stagefright/foundation/ABuffer.h>
36#include <media/stagefright/foundation/ADebug.h>
37#include <media/stagefright/foundation/AMessage.h>
38#include <media/stagefright/ACodec.h>
39#include <media/stagefright/MediaDefs.h>
40#include <media/stagefright/MediaErrors.h>
41#include <media/stagefright/MetaData.h>
42#include <media/stagefright/SkipCutBuffer.h>
43#include <gui/ISurfaceTexture.h>
44
45#include "avc_utils.h"
46
47namespace android {
48
49////////////////////////////////////////////////////////////////////////////////
50
51NuPlayer::NuPlayer()
52    : mUIDValid(false),
53      mVideoIsAVC(false),
54      mAudioEOS(false),
55      mVideoEOS(false),
56      mScanSourcesPending(false),
57      mScanSourcesGeneration(0),
58      mTimeDiscontinuityPending(false),
59      mFlushingAudio(NONE),
60      mFlushingVideo(NONE),
61      mResetInProgress(false),
62      mResetPostponed(false),
63      mSkipRenderingAudioUntilMediaTimeUs(-1ll),
64      mSkipRenderingVideoUntilMediaTimeUs(-1ll),
65      mVideoLateByUs(0ll),
66      mNumFramesTotal(0ll),
67      mNumFramesDropped(0ll),
68      mSkipCutBuffer(NULL) {
69}
70
71NuPlayer::~NuPlayer() {
72    delete mSkipCutBuffer;
73    mSkipCutBuffer = NULL;
74}
75
76void NuPlayer::setUID(uid_t uid) {
77    mUIDValid = true;
78    mUID = uid;
79}
80
81void NuPlayer::setDriver(const wp<NuPlayerDriver> &driver) {
82    mDriver = driver;
83}
84
85void NuPlayer::setDataSource(const sp<IStreamSource> &source) {
86    sp<AMessage> msg = new AMessage(kWhatSetDataSource, id());
87
88    msg->setObject("source", new StreamingSource(source));
89    msg->post();
90}
91
92static bool IsHTTPLiveURL(const char *url) {
93    if (!strncasecmp("http://", url, 7)
94            || !strncasecmp("https://", url, 8)) {
95        size_t len = strlen(url);
96        if (len >= 5 && !strcasecmp(".m3u8", &url[len - 5])) {
97            return true;
98        }
99
100        if (strstr(url,"m3u8")) {
101            return true;
102        }
103    }
104
105    return false;
106}
107
108void NuPlayer::setDataSource(
109        const char *url, const KeyedVector<String8, String8> *headers) {
110    sp<AMessage> msg = new AMessage(kWhatSetDataSource, id());
111
112    sp<Source> source;
113    if (IsHTTPLiveURL(url)) {
114        source = new HTTPLiveSource(url, headers, mUIDValid, mUID);
115    } else if (!strncasecmp(url, "rtsp://", 7)) {
116        source = new RTSPSource(url, headers, mUIDValid, mUID);
117    } else {
118        source = new GenericSource(url, headers, mUIDValid, mUID);
119    }
120
121    msg->setObject("source", source);
122    msg->post();
123}
124
125void NuPlayer::setDataSource(int fd, int64_t offset, int64_t length) {
126    sp<AMessage> msg = new AMessage(kWhatSetDataSource, id());
127
128    sp<Source> source = new GenericSource(fd, offset, length);
129    msg->setObject("source", source);
130    msg->post();
131}
132
133void NuPlayer::setVideoSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture) {
134    sp<AMessage> msg = new AMessage(kWhatSetVideoNativeWindow, id());
135    sp<SurfaceTextureClient> surfaceTextureClient(surfaceTexture != NULL ?
136                new SurfaceTextureClient(surfaceTexture) : NULL);
137    msg->setObject("native-window", new NativeWindowWrapper(surfaceTextureClient));
138    msg->post();
139}
140
141void NuPlayer::setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink) {
142    sp<AMessage> msg = new AMessage(kWhatSetAudioSink, id());
143    msg->setObject("sink", sink);
144    msg->post();
145}
146
147void NuPlayer::start() {
148    (new AMessage(kWhatStart, id()))->post();
149}
150
151void NuPlayer::pause() {
152    (new AMessage(kWhatPause, id()))->post();
153}
154
155void NuPlayer::resume() {
156    (new AMessage(kWhatResume, id()))->post();
157}
158
159void NuPlayer::resetAsync() {
160    (new AMessage(kWhatReset, id()))->post();
161}
162
163void NuPlayer::seekToAsync(int64_t seekTimeUs) {
164    sp<AMessage> msg = new AMessage(kWhatSeek, id());
165    msg->setInt64("seekTimeUs", seekTimeUs);
166    msg->post();
167}
168
169// static
170bool NuPlayer::IsFlushingState(FlushStatus state, bool *needShutdown) {
171    switch (state) {
172        case FLUSHING_DECODER:
173            if (needShutdown != NULL) {
174                *needShutdown = false;
175            }
176            return true;
177
178        case FLUSHING_DECODER_SHUTDOWN:
179            if (needShutdown != NULL) {
180                *needShutdown = true;
181            }
182            return true;
183
184        default:
185            return false;
186    }
187}
188
189void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
190    switch (msg->what()) {
191        case kWhatSetDataSource:
192        {
193            ALOGV("kWhatSetDataSource");
194
195            CHECK(mSource == NULL);
196
197            sp<RefBase> obj;
198            CHECK(msg->findObject("source", &obj));
199
200            mSource = static_cast<Source *>(obj.get());
201            break;
202        }
203
204        case kWhatSetVideoNativeWindow:
205        {
206            ALOGV("kWhatSetVideoNativeWindow");
207
208            sp<RefBase> obj;
209            CHECK(msg->findObject("native-window", &obj));
210
211            mNativeWindow = static_cast<NativeWindowWrapper *>(obj.get());
212            break;
213        }
214
215        case kWhatSetAudioSink:
216        {
217            ALOGV("kWhatSetAudioSink");
218
219            sp<RefBase> obj;
220            CHECK(msg->findObject("sink", &obj));
221
222            mAudioSink = static_cast<MediaPlayerBase::AudioSink *>(obj.get());
223            break;
224        }
225
226        case kWhatStart:
227        {
228            ALOGV("kWhatStart");
229
230            mVideoIsAVC = false;
231            mAudioEOS = false;
232            mVideoEOS = false;
233            mSkipRenderingAudioUntilMediaTimeUs = -1;
234            mSkipRenderingVideoUntilMediaTimeUs = -1;
235            mVideoLateByUs = 0;
236            mNumFramesTotal = 0;
237            mNumFramesDropped = 0;
238
239            mSource->start();
240
241            sp<MetaData> meta = mSource->getFormat(true /* audio */);
242            if (meta != NULL) {
243                int32_t delay = 0;
244                if (!meta->findInt32(kKeyEncoderDelay, &delay)) {
245                    delay = 0;
246                }
247                int32_t padding = 0;
248                if (!meta->findInt32(kKeyEncoderPadding, &padding)) {
249                    padding = 0;
250                }
251                int32_t numchannels = 0;
252                if (delay + padding) {
253                    if (meta->findInt32(kKeyChannelCount, &numchannels)) {
254                        size_t frameSize = numchannels * sizeof(int16_t);
255                        if (mSkipCutBuffer) {
256                            size_t prevbuffersize = mSkipCutBuffer->size();
257                            if (prevbuffersize != 0) {
258                                ALOGW("Replacing SkipCutBuffer holding %d bytes", prevbuffersize);
259                            }
260                            delete mSkipCutBuffer;
261                        }
262                        mSkipCutBuffer = new SkipCutBuffer(delay * frameSize, padding * frameSize);
263                    }
264                }
265            }
266
267            mRenderer = new Renderer(
268                    mAudioSink,
269                    new AMessage(kWhatRendererNotify, id()));
270
271            looper()->registerHandler(mRenderer);
272
273            postScanSources();
274            break;
275        }
276
277        case kWhatScanSources:
278        {
279            int32_t generation;
280            CHECK(msg->findInt32("generation", &generation));
281            if (generation != mScanSourcesGeneration) {
282                // Drop obsolete msg.
283                break;
284            }
285
286            mScanSourcesPending = false;
287
288            ALOGV("scanning sources haveAudio=%d, haveVideo=%d",
289                 mAudioDecoder != NULL, mVideoDecoder != NULL);
290
291            instantiateDecoder(false, &mVideoDecoder);
292
293            if (mAudioSink != NULL) {
294                instantiateDecoder(true, &mAudioDecoder);
295            }
296
297            status_t err;
298            if ((err = mSource->feedMoreTSData()) != OK) {
299                if (mAudioDecoder == NULL && mVideoDecoder == NULL) {
300                    // We're not currently decoding anything (no audio or
301                    // video tracks found) and we just ran out of input data.
302
303                    if (err == ERROR_END_OF_STREAM) {
304                        notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
305                    } else {
306                        notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
307                    }
308                }
309                break;
310            }
311
312            if (mAudioDecoder == NULL || mVideoDecoder == NULL) {
313                msg->post(100000ll);
314                mScanSourcesPending = true;
315            }
316            break;
317        }
318
319        case kWhatVideoNotify:
320        case kWhatAudioNotify:
321        {
322            bool audio = msg->what() == kWhatAudioNotify;
323
324            sp<AMessage> codecRequest;
325            CHECK(msg->findMessage("codec-request", &codecRequest));
326
327            int32_t what;
328            CHECK(codecRequest->findInt32("what", &what));
329
330            if (what == ACodec::kWhatFillThisBuffer) {
331                status_t err = feedDecoderInputData(
332                        audio, codecRequest);
333
334                if (err == -EWOULDBLOCK) {
335                    if (mSource->feedMoreTSData() == OK) {
336                        msg->post(10000ll);
337                    }
338                }
339            } else if (what == ACodec::kWhatEOS) {
340                int32_t err;
341                CHECK(codecRequest->findInt32("err", &err));
342
343                if (err == ERROR_END_OF_STREAM) {
344                    ALOGV("got %s decoder EOS", audio ? "audio" : "video");
345                } else {
346                    ALOGV("got %s decoder EOS w/ error %d",
347                         audio ? "audio" : "video",
348                         err);
349                }
350
351                mRenderer->queueEOS(audio, err);
352            } else if (what == ACodec::kWhatFlushCompleted) {
353                bool needShutdown;
354
355                if (audio) {
356                    CHECK(IsFlushingState(mFlushingAudio, &needShutdown));
357                    mFlushingAudio = FLUSHED;
358                } else {
359                    CHECK(IsFlushingState(mFlushingVideo, &needShutdown));
360                    mFlushingVideo = FLUSHED;
361
362                    mVideoLateByUs = 0;
363                }
364
365                ALOGV("decoder %s flush completed", audio ? "audio" : "video");
366
367                if (needShutdown) {
368                    ALOGV("initiating %s decoder shutdown",
369                         audio ? "audio" : "video");
370
371                    (audio ? mAudioDecoder : mVideoDecoder)->initiateShutdown();
372
373                    if (audio) {
374                        mFlushingAudio = SHUTTING_DOWN_DECODER;
375                    } else {
376                        mFlushingVideo = SHUTTING_DOWN_DECODER;
377                    }
378                }
379
380                finishFlushIfPossible();
381            } else if (what == ACodec::kWhatOutputFormatChanged) {
382                if (audio) {
383                    int32_t numChannels;
384                    CHECK(codecRequest->findInt32("channel-count", &numChannels));
385
386                    int32_t sampleRate;
387                    CHECK(codecRequest->findInt32("sample-rate", &sampleRate));
388
389                    ALOGV("Audio output format changed to %d Hz, %d channels",
390                         sampleRate, numChannels);
391
392                    mAudioSink->close();
393
394                    audio_output_flags_t flags;
395                    int64_t durationUs;
396                    // FIXME: we should handle the case where the video decoder is created after
397                    // we receive the format change indication. Current code will just make that
398                    // we select deep buffer with video which should not be a problem as it should
399                    // not prevent from keeping A/V sync.
400                    if (mVideoDecoder == NULL &&
401                            mSource->getDuration(&durationUs) == OK &&
402                            durationUs > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) {
403                        flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
404                    } else {
405                        flags = AUDIO_OUTPUT_FLAG_NONE;
406                    }
407
408                    CHECK_EQ(mAudioSink->open(
409                                sampleRate,
410                                numChannels,
411                                CHANNEL_MASK_USE_CHANNEL_ORDER,
412                                AUDIO_FORMAT_PCM_16_BIT,
413                                8 /* bufferCount */,
414                                NULL,
415                                NULL,
416                                flags),
417                             (status_t)OK);
418                    mAudioSink->start();
419
420                    mRenderer->signalAudioSinkChanged();
421                } else {
422                    // video
423
424                    int32_t width, height;
425                    CHECK(codecRequest->findInt32("width", &width));
426                    CHECK(codecRequest->findInt32("height", &height));
427
428                    int32_t cropLeft, cropTop, cropRight, cropBottom;
429                    CHECK(codecRequest->findRect(
430                                "crop",
431                                &cropLeft, &cropTop, &cropRight, &cropBottom));
432
433                    ALOGV("Video output format changed to %d x %d "
434                         "(crop: %d x %d @ (%d, %d))",
435                         width, height,
436                         (cropRight - cropLeft + 1),
437                         (cropBottom - cropTop + 1),
438                         cropLeft, cropTop);
439
440                    notifyListener(
441                            MEDIA_SET_VIDEO_SIZE,
442                            cropRight - cropLeft + 1,
443                            cropBottom - cropTop + 1);
444                }
445            } else if (what == ACodec::kWhatShutdownCompleted) {
446                ALOGV("%s shutdown completed", audio ? "audio" : "video");
447                if (audio) {
448                    mAudioDecoder.clear();
449
450                    CHECK_EQ((int)mFlushingAudio, (int)SHUTTING_DOWN_DECODER);
451                    mFlushingAudio = SHUT_DOWN;
452                } else {
453                    mVideoDecoder.clear();
454
455                    CHECK_EQ((int)mFlushingVideo, (int)SHUTTING_DOWN_DECODER);
456                    mFlushingVideo = SHUT_DOWN;
457                }
458
459                finishFlushIfPossible();
460            } else if (what == ACodec::kWhatError) {
461                ALOGE("Received error from %s decoder, aborting playback.",
462                     audio ? "audio" : "video");
463
464                mRenderer->queueEOS(audio, UNKNOWN_ERROR);
465            } else if (what == ACodec::kWhatDrainThisBuffer) {
466                renderBuffer(audio, codecRequest);
467            } else {
468                ALOGV("Unhandled codec notification %d.", what);
469            }
470
471            break;
472        }
473
474        case kWhatRendererNotify:
475        {
476            int32_t what;
477            CHECK(msg->findInt32("what", &what));
478
479            if (what == Renderer::kWhatEOS) {
480                int32_t audio;
481                CHECK(msg->findInt32("audio", &audio));
482
483                int32_t finalResult;
484                CHECK(msg->findInt32("finalResult", &finalResult));
485
486                if (audio) {
487                    mAudioEOS = true;
488                } else {
489                    mVideoEOS = true;
490                }
491
492                if (finalResult == ERROR_END_OF_STREAM) {
493                    ALOGV("reached %s EOS", audio ? "audio" : "video");
494                } else {
495                    ALOGE("%s track encountered an error (%d)",
496                         audio ? "audio" : "video", finalResult);
497
498                    notifyListener(
499                            MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, finalResult);
500                }
501
502                if ((mAudioEOS || mAudioDecoder == NULL)
503                        && (mVideoEOS || mVideoDecoder == NULL)) {
504                    notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
505                }
506            } else if (what == Renderer::kWhatPosition) {
507                int64_t positionUs;
508                CHECK(msg->findInt64("positionUs", &positionUs));
509
510                CHECK(msg->findInt64("videoLateByUs", &mVideoLateByUs));
511
512                if (mDriver != NULL) {
513                    sp<NuPlayerDriver> driver = mDriver.promote();
514                    if (driver != NULL) {
515                        driver->notifyPosition(positionUs);
516
517                        driver->notifyFrameStats(
518                                mNumFramesTotal, mNumFramesDropped);
519                    }
520                }
521            } else if (what == Renderer::kWhatFlushComplete) {
522                CHECK_EQ(what, (int32_t)Renderer::kWhatFlushComplete);
523
524                int32_t audio;
525                CHECK(msg->findInt32("audio", &audio));
526
527                ALOGV("renderer %s flush completed.", audio ? "audio" : "video");
528            }
529            break;
530        }
531
532        case kWhatMoreDataQueued:
533        {
534            break;
535        }
536
537        case kWhatReset:
538        {
539            ALOGV("kWhatReset");
540
541            if (mRenderer != NULL) {
542                // There's an edge case where the renderer owns all output
543                // buffers and is paused, therefore the decoder will not read
544                // more input data and will never encounter the matching
545                // discontinuity. To avoid this, we resume the renderer.
546
547                if (mFlushingAudio == AWAITING_DISCONTINUITY
548                        || mFlushingVideo == AWAITING_DISCONTINUITY) {
549                    mRenderer->resume();
550                }
551            }
552
553            if (mFlushingAudio != NONE || mFlushingVideo != NONE) {
554                // We're currently flushing, postpone the reset until that's
555                // completed.
556
557                ALOGV("postponing reset mFlushingAudio=%d, mFlushingVideo=%d",
558                      mFlushingAudio, mFlushingVideo);
559
560                mResetPostponed = true;
561                break;
562            }
563
564            if (mAudioDecoder == NULL && mVideoDecoder == NULL) {
565                finishReset();
566                break;
567            }
568
569            mTimeDiscontinuityPending = true;
570
571            if (mAudioDecoder != NULL) {
572                flushDecoder(true /* audio */, true /* needShutdown */);
573            }
574
575            if (mVideoDecoder != NULL) {
576                flushDecoder(false /* audio */, true /* needShutdown */);
577            }
578
579            mResetInProgress = true;
580            break;
581        }
582
583        case kWhatSeek:
584        {
585            int64_t seekTimeUs;
586            CHECK(msg->findInt64("seekTimeUs", &seekTimeUs));
587
588            ALOGV("kWhatSeek seekTimeUs=%lld us (%.2f secs)",
589                 seekTimeUs, seekTimeUs / 1E6);
590
591            mSource->seekTo(seekTimeUs);
592
593            if (mDriver != NULL) {
594                sp<NuPlayerDriver> driver = mDriver.promote();
595                if (driver != NULL) {
596                    driver->notifySeekComplete();
597                }
598            }
599
600            break;
601        }
602
603        case kWhatPause:
604        {
605            CHECK(mRenderer != NULL);
606            mRenderer->pause();
607            break;
608        }
609
610        case kWhatResume:
611        {
612            CHECK(mRenderer != NULL);
613            mRenderer->resume();
614            break;
615        }
616
617        default:
618            TRESPASS();
619            break;
620    }
621}
622
623void NuPlayer::finishFlushIfPossible() {
624    if (mFlushingAudio != FLUSHED && mFlushingAudio != SHUT_DOWN) {
625        return;
626    }
627
628    if (mFlushingVideo != FLUSHED && mFlushingVideo != SHUT_DOWN) {
629        return;
630    }
631
632    ALOGV("both audio and video are flushed now.");
633
634    if (mTimeDiscontinuityPending) {
635        mRenderer->signalTimeDiscontinuity();
636        mTimeDiscontinuityPending = false;
637    }
638
639    if (mAudioDecoder != NULL) {
640        mAudioDecoder->signalResume();
641    }
642
643    if (mVideoDecoder != NULL) {
644        mVideoDecoder->signalResume();
645    }
646
647    mFlushingAudio = NONE;
648    mFlushingVideo = NONE;
649
650    if (mResetInProgress) {
651        ALOGV("reset completed");
652
653        mResetInProgress = false;
654        finishReset();
655    } else if (mResetPostponed) {
656        (new AMessage(kWhatReset, id()))->post();
657        mResetPostponed = false;
658    } else if (mAudioDecoder == NULL || mVideoDecoder == NULL) {
659        postScanSources();
660    }
661}
662
663void NuPlayer::finishReset() {
664    CHECK(mAudioDecoder == NULL);
665    CHECK(mVideoDecoder == NULL);
666
667    ++mScanSourcesGeneration;
668    mScanSourcesPending = false;
669
670    mRenderer.clear();
671
672    if (mSource != NULL) {
673        mSource->stop();
674        mSource.clear();
675    }
676
677    if (mDriver != NULL) {
678        sp<NuPlayerDriver> driver = mDriver.promote();
679        if (driver != NULL) {
680            driver->notifyResetComplete();
681        }
682    }
683}
684
685void NuPlayer::postScanSources() {
686    if (mScanSourcesPending) {
687        return;
688    }
689
690    sp<AMessage> msg = new AMessage(kWhatScanSources, id());
691    msg->setInt32("generation", mScanSourcesGeneration);
692    msg->post();
693
694    mScanSourcesPending = true;
695}
696
697status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) {
698    if (*decoder != NULL) {
699        return OK;
700    }
701
702    sp<MetaData> meta = mSource->getFormat(audio);
703
704    if (meta == NULL) {
705        return -EWOULDBLOCK;
706    }
707
708    if (!audio) {
709        const char *mime;
710        CHECK(meta->findCString(kKeyMIMEType, &mime));
711        mVideoIsAVC = !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime);
712    }
713
714    sp<AMessage> notify =
715        new AMessage(audio ? kWhatAudioNotify : kWhatVideoNotify,
716                     id());
717
718    *decoder = audio ? new Decoder(notify) :
719                       new Decoder(notify, mNativeWindow);
720    looper()->registerHandler(*decoder);
721
722    (*decoder)->configure(meta);
723
724    int64_t durationUs;
725    if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) {
726        sp<NuPlayerDriver> driver = mDriver.promote();
727        if (driver != NULL) {
728            driver->notifyDuration(durationUs);
729        }
730    }
731
732    return OK;
733}
734
735status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) {
736    sp<AMessage> reply;
737    CHECK(msg->findMessage("reply", &reply));
738
739    if ((audio && IsFlushingState(mFlushingAudio))
740            || (!audio && IsFlushingState(mFlushingVideo))) {
741        reply->setInt32("err", INFO_DISCONTINUITY);
742        reply->post();
743        return OK;
744    }
745
746    sp<ABuffer> accessUnit;
747
748    bool dropAccessUnit;
749    do {
750        status_t err = mSource->dequeueAccessUnit(audio, &accessUnit);
751
752        if (err == -EWOULDBLOCK) {
753            return err;
754        } else if (err != OK) {
755            if (err == INFO_DISCONTINUITY) {
756                int32_t type;
757                CHECK(accessUnit->meta()->findInt32("discontinuity", &type));
758
759                bool formatChange =
760                    (audio &&
761                     (type & ATSParser::DISCONTINUITY_AUDIO_FORMAT))
762                    || (!audio &&
763                            (type & ATSParser::DISCONTINUITY_VIDEO_FORMAT));
764
765                bool timeChange = (type & ATSParser::DISCONTINUITY_TIME) != 0;
766
767                ALOGI("%s discontinuity (formatChange=%d, time=%d)",
768                     audio ? "audio" : "video", formatChange, timeChange);
769
770                if (audio) {
771                    mSkipRenderingAudioUntilMediaTimeUs = -1;
772                } else {
773                    mSkipRenderingVideoUntilMediaTimeUs = -1;
774                }
775
776                if (timeChange) {
777                    sp<AMessage> extra;
778                    if (accessUnit->meta()->findMessage("extra", &extra)
779                            && extra != NULL) {
780                        int64_t resumeAtMediaTimeUs;
781                        if (extra->findInt64(
782                                    "resume-at-mediatimeUs", &resumeAtMediaTimeUs)) {
783                            ALOGI("suppressing rendering of %s until %lld us",
784                                    audio ? "audio" : "video", resumeAtMediaTimeUs);
785
786                            if (audio) {
787                                mSkipRenderingAudioUntilMediaTimeUs =
788                                    resumeAtMediaTimeUs;
789                            } else {
790                                mSkipRenderingVideoUntilMediaTimeUs =
791                                    resumeAtMediaTimeUs;
792                            }
793                        }
794                    }
795                }
796
797                mTimeDiscontinuityPending =
798                    mTimeDiscontinuityPending || timeChange;
799
800                if (formatChange || timeChange) {
801                    flushDecoder(audio, formatChange);
802                } else {
803                    // This stream is unaffected by the discontinuity
804
805                    if (audio) {
806                        mFlushingAudio = FLUSHED;
807                    } else {
808                        mFlushingVideo = FLUSHED;
809                    }
810
811                    finishFlushIfPossible();
812
813                    return -EWOULDBLOCK;
814                }
815            }
816
817            reply->setInt32("err", err);
818            reply->post();
819            return OK;
820        }
821
822        if (!audio) {
823            ++mNumFramesTotal;
824        }
825
826        dropAccessUnit = false;
827        if (!audio
828                && mVideoLateByUs > 100000ll
829                && mVideoIsAVC
830                && !IsAVCReferenceFrame(accessUnit)) {
831            dropAccessUnit = true;
832            ++mNumFramesDropped;
833        }
834    } while (dropAccessUnit);
835
836    // ALOGV("returned a valid buffer of %s data", audio ? "audio" : "video");
837
838#if 0
839    int64_t mediaTimeUs;
840    CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs));
841    ALOGV("feeding %s input buffer at media time %.2f secs",
842         audio ? "audio" : "video",
843         mediaTimeUs / 1E6);
844#endif
845
846    reply->setBuffer("buffer", accessUnit);
847    reply->post();
848
849    return OK;
850}
851
852void NuPlayer::renderBuffer(bool audio, const sp<AMessage> &msg) {
853    // ALOGV("renderBuffer %s", audio ? "audio" : "video");
854
855    sp<AMessage> reply;
856    CHECK(msg->findMessage("reply", &reply));
857
858    if (IsFlushingState(audio ? mFlushingAudio : mFlushingVideo)) {
859        // We're currently attempting to flush the decoder, in order
860        // to complete this, the decoder wants all its buffers back,
861        // so we don't want any output buffers it sent us (from before
862        // we initiated the flush) to be stuck in the renderer's queue.
863
864        ALOGV("we're still flushing the %s decoder, sending its output buffer"
865             " right back.", audio ? "audio" : "video");
866
867        reply->post();
868        return;
869    }
870
871    sp<ABuffer> buffer;
872    CHECK(msg->findBuffer("buffer", &buffer));
873
874    int64_t &skipUntilMediaTimeUs =
875        audio
876            ? mSkipRenderingAudioUntilMediaTimeUs
877            : mSkipRenderingVideoUntilMediaTimeUs;
878
879    if (skipUntilMediaTimeUs >= 0) {
880        int64_t mediaTimeUs;
881        CHECK(buffer->meta()->findInt64("timeUs", &mediaTimeUs));
882
883        if (mediaTimeUs < skipUntilMediaTimeUs) {
884            ALOGV("dropping %s buffer at time %lld as requested.",
885                 audio ? "audio" : "video",
886                 mediaTimeUs);
887
888            reply->post();
889            return;
890        }
891
892        skipUntilMediaTimeUs = -1;
893    }
894
895    if (audio && mSkipCutBuffer) {
896        mSkipCutBuffer->submit(buffer);
897    }
898
899    mRenderer->queueBuffer(audio, buffer, reply);
900}
901
902void NuPlayer::notifyListener(int msg, int ext1, int ext2) {
903    if (mDriver == NULL) {
904        return;
905    }
906
907    sp<NuPlayerDriver> driver = mDriver.promote();
908
909    if (driver == NULL) {
910        return;
911    }
912
913    driver->notifyListener(msg, ext1, ext2);
914}
915
916void NuPlayer::flushDecoder(bool audio, bool needShutdown) {
917    if ((audio && mAudioDecoder == NULL) || (!audio && mVideoDecoder == NULL)) {
918        ALOGI("flushDecoder %s without decoder present",
919             audio ? "audio" : "video");
920    }
921
922    // Make sure we don't continue to scan sources until we finish flushing.
923    ++mScanSourcesGeneration;
924    mScanSourcesPending = false;
925
926    (audio ? mAudioDecoder : mVideoDecoder)->signalFlush();
927    mRenderer->flush(audio);
928
929    FlushStatus newStatus =
930        needShutdown ? FLUSHING_DECODER_SHUTDOWN : FLUSHING_DECODER;
931
932    if (audio) {
933        CHECK(mFlushingAudio == NONE
934                || mFlushingAudio == AWAITING_DISCONTINUITY);
935
936        mFlushingAudio = newStatus;
937
938        if (mFlushingVideo == NONE) {
939            mFlushingVideo = (mVideoDecoder != NULL)
940                ? AWAITING_DISCONTINUITY
941                : FLUSHED;
942        }
943    } else {
944        CHECK(mFlushingVideo == NONE
945                || mFlushingVideo == AWAITING_DISCONTINUITY);
946
947        mFlushingVideo = newStatus;
948
949        if (mFlushingAudio == NONE) {
950            mFlushingAudio = (mAudioDecoder != NULL)
951                ? AWAITING_DISCONTINUITY
952                : FLUSHED;
953        }
954    }
955}
956
957}  // namespace android
958