NuPlayerRenderer.cpp revision c851b5de495169d7e9528644c2592746021bd968
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "NuPlayerRenderer"
19#include <utils/Log.h>
20
21#include "NuPlayerRenderer.h"
22
23#include <media/stagefright/foundation/ABuffer.h>
24#include <media/stagefright/foundation/ADebug.h>
25#include <media/stagefright/foundation/AMessage.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28
29#include <VideoFrameScheduler.h>
30
31#include <inttypes.h>
32
33namespace android {
34
35// static
36const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
37
38NuPlayer::Renderer::Renderer(
39        const sp<MediaPlayerBase::AudioSink> &sink,
40        const sp<AMessage> &notify,
41        uint32_t flags)
42    : mAudioSink(sink),
43      mNotify(notify),
44      mFlags(flags),
45      mNumFramesWritten(0),
46      mDrainAudioQueuePending(false),
47      mDrainVideoQueuePending(false),
48      mAudioQueueGeneration(0),
49      mVideoQueueGeneration(0),
50      mFirstAnchorTimeMediaUs(-1),
51      mAnchorTimeMediaUs(-1),
52      mAnchorTimeRealUs(-1),
53      mFlushingAudio(false),
54      mFlushingVideo(false),
55      mHasAudio(false),
56      mHasVideo(false),
57      mSyncQueues(false),
58      mPaused(false),
59      mVideoSampleReceived(false),
60      mVideoRenderingStarted(false),
61      mVideoRenderingStartGeneration(0),
62      mAudioRenderingStartGeneration(0),
63      mLastPositionUpdateUs(-1ll),
64      mVideoLateByUs(0ll) {
65}
66
67NuPlayer::Renderer::~Renderer() {
68    if (offloadingAudio()) {
69        mAudioSink->stop();
70        mAudioSink->flush();
71        mAudioSink->close();
72    }
73}
74
75void NuPlayer::Renderer::queueBuffer(
76        bool audio,
77        const sp<ABuffer> &buffer,
78        const sp<AMessage> &notifyConsumed) {
79    sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
80    msg->setInt32("audio", static_cast<int32_t>(audio));
81    msg->setBuffer("buffer", buffer);
82    msg->setMessage("notifyConsumed", notifyConsumed);
83    msg->post();
84}
85
86void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
87    CHECK_NE(finalResult, (status_t)OK);
88
89    sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
90    msg->setInt32("audio", static_cast<int32_t>(audio));
91    msg->setInt32("finalResult", finalResult);
92    msg->post();
93}
94
95void NuPlayer::Renderer::flush(bool audio) {
96    {
97        Mutex::Autolock autoLock(mFlushLock);
98        if (audio) {
99            if (mFlushingAudio) {
100                return;
101            }
102            mFlushingAudio = true;
103        } else {
104            if (mFlushingVideo) {
105                return;
106            }
107            mFlushingVideo = true;
108        }
109    }
110
111    sp<AMessage> msg = new AMessage(kWhatFlush, id());
112    msg->setInt32("audio", static_cast<int32_t>(audio));
113    msg->post();
114}
115
116void NuPlayer::Renderer::signalTimeDiscontinuity() {
117    Mutex::Autolock autoLock(mLock);
118    // CHECK(mAudioQueue.empty());
119    // CHECK(mVideoQueue.empty());
120    mFirstAnchorTimeMediaUs = -1;
121    mAnchorTimeMediaUs = -1;
122    mAnchorTimeRealUs = -1;
123    mSyncQueues = false;
124}
125
126void NuPlayer::Renderer::signalAudioSinkChanged() {
127    (new AMessage(kWhatAudioSinkChanged, id()))->post();
128}
129
130void NuPlayer::Renderer::signalDisableOffloadAudio() {
131    (new AMessage(kWhatDisableOffloadAudio, id()))->post();
132}
133
134void NuPlayer::Renderer::pause() {
135    (new AMessage(kWhatPause, id()))->post();
136}
137
138void NuPlayer::Renderer::resume() {
139    (new AMessage(kWhatResume, id()))->post();
140}
141
142void NuPlayer::Renderer::setVideoFrameRate(float fps) {
143    sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, id());
144    msg->setFloat("frame-rate", fps);
145    msg->post();
146}
147
148void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
149    switch (msg->what()) {
150        case kWhatStopAudioSink:
151        {
152            mAudioSink->stop();
153            break;
154        }
155
156        case kWhatDrainAudioQueue:
157        {
158            int32_t generation;
159            CHECK(msg->findInt32("generation", &generation));
160            if (generation != mAudioQueueGeneration) {
161                break;
162            }
163
164            mDrainAudioQueuePending = false;
165
166            if (onDrainAudioQueue()) {
167                uint32_t numFramesPlayed;
168                CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
169                         (status_t)OK);
170
171                uint32_t numFramesPendingPlayout =
172                    mNumFramesWritten - numFramesPlayed;
173
174                // This is how long the audio sink will have data to
175                // play back.
176                int64_t delayUs =
177                    mAudioSink->msecsPerFrame()
178                        * numFramesPendingPlayout * 1000ll;
179
180                // Let's give it more data after about half that time
181                // has elapsed.
182                // kWhatDrainAudioQueue is used for non-offloading mode,
183                // and mLock is used only for offloading mode. Therefore,
184                // no need to acquire mLock here.
185                postDrainAudioQueue_l(delayUs / 2);
186            }
187            break;
188        }
189
190        case kWhatDrainVideoQueue:
191        {
192            int32_t generation;
193            CHECK(msg->findInt32("generation", &generation));
194            if (generation != mVideoQueueGeneration) {
195                break;
196            }
197
198            mDrainVideoQueuePending = false;
199
200            onDrainVideoQueue();
201
202            postDrainVideoQueue();
203            break;
204        }
205
206        case kWhatQueueBuffer:
207        {
208            onQueueBuffer(msg);
209            break;
210        }
211
212        case kWhatQueueEOS:
213        {
214            onQueueEOS(msg);
215            break;
216        }
217
218        case kWhatFlush:
219        {
220            onFlush(msg);
221            break;
222        }
223
224        case kWhatAudioSinkChanged:
225        {
226            onAudioSinkChanged();
227            break;
228        }
229
230        case kWhatDisableOffloadAudio:
231        {
232            onDisableOffloadAudio();
233            break;
234        }
235
236        case kWhatPause:
237        {
238            onPause();
239            break;
240        }
241
242        case kWhatResume:
243        {
244            onResume();
245            break;
246        }
247
248        case kWhatSetVideoFrameRate:
249        {
250            float fps;
251            CHECK(msg->findFloat("frame-rate", &fps));
252            onSetVideoFrameRate(fps);
253            break;
254        }
255
256        case kWhatAudioOffloadTearDown:
257        {
258            onAudioOffloadTearDown();
259            break;
260        }
261
262        default:
263            TRESPASS();
264            break;
265    }
266}
267
268void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
269    if (mDrainAudioQueuePending || mSyncQueues || mPaused
270            || offloadingAudio()) {
271        return;
272    }
273
274    if (mAudioQueue.empty()) {
275        return;
276    }
277
278    mDrainAudioQueuePending = true;
279    sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
280    msg->setInt32("generation", mAudioQueueGeneration);
281    msg->post(delayUs);
282}
283
284void NuPlayer::Renderer::prepareForMediaRenderingStart() {
285    mAudioRenderingStartGeneration = mAudioQueueGeneration;
286    mVideoRenderingStartGeneration = mVideoQueueGeneration;
287}
288
289void NuPlayer::Renderer::notifyIfMediaRenderingStarted() {
290    if (mVideoRenderingStartGeneration == mVideoQueueGeneration &&
291        mAudioRenderingStartGeneration == mAudioQueueGeneration) {
292        mVideoRenderingStartGeneration = -1;
293        mAudioRenderingStartGeneration = -1;
294
295        sp<AMessage> notify = mNotify->dup();
296        notify->setInt32("what", kWhatMediaRenderingStart);
297        notify->post();
298    }
299}
300
301// static
302size_t NuPlayer::Renderer::AudioSinkCallback(
303        MediaPlayerBase::AudioSink * /* audioSink */,
304        void *buffer,
305        size_t size,
306        void *cookie,
307        MediaPlayerBase::AudioSink::cb_event_t event) {
308    NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
309
310    switch (event) {
311        case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
312        {
313            return me->fillAudioBuffer(buffer, size);
314            break;
315        }
316
317        case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
318        {
319            me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM);
320            break;
321        }
322
323        case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
324        {
325            me->notifyAudioOffloadTearDown();
326            break;
327        }
328    }
329
330    return 0;
331}
332
333size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
334    Mutex::Autolock autoLock(mLock);
335
336    if (!offloadingAudio() || mPaused) {
337        return 0;
338    }
339
340    bool hasEOS = false;
341
342    size_t sizeCopied = 0;
343    bool firstEntry = true;
344    while (sizeCopied < size && !mAudioQueue.empty()) {
345        QueueEntry *entry = &*mAudioQueue.begin();
346
347        if (entry->mBuffer == NULL) { // EOS
348            hasEOS = true;
349            mAudioQueue.erase(mAudioQueue.begin());
350            entry = NULL;
351            break;
352        }
353
354        if (firstEntry && entry->mOffset == 0) {
355            firstEntry = false;
356            int64_t mediaTimeUs;
357            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
358            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
359            if (mFirstAnchorTimeMediaUs == -1) {
360                mFirstAnchorTimeMediaUs = mediaTimeUs;
361            }
362
363            int64_t nowUs = ALooper::GetNowUs();
364            mAnchorTimeMediaUs =
365                mFirstAnchorTimeMediaUs + getPlayedOutAudioDurationUs(nowUs);
366            mAnchorTimeRealUs = nowUs;
367
368            notifyPosition();
369        }
370
371        size_t copy = entry->mBuffer->size() - entry->mOffset;
372        size_t sizeRemaining = size - sizeCopied;
373        if (copy > sizeRemaining) {
374            copy = sizeRemaining;
375        }
376
377        memcpy((char *)buffer + sizeCopied,
378               entry->mBuffer->data() + entry->mOffset,
379               copy);
380
381        entry->mOffset += copy;
382        if (entry->mOffset == entry->mBuffer->size()) {
383            entry->mNotifyConsumed->post();
384            mAudioQueue.erase(mAudioQueue.begin());
385            entry = NULL;
386        }
387        sizeCopied += copy;
388        notifyIfMediaRenderingStarted();
389    }
390
391    if (hasEOS) {
392        (new AMessage(kWhatStopAudioSink, id()))->post();
393    }
394
395    return sizeCopied;
396}
397
398bool NuPlayer::Renderer::onDrainAudioQueue() {
399    uint32_t numFramesPlayed;
400    if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
401        return false;
402    }
403
404    ssize_t numFramesAvailableToWrite =
405        mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
406
407#if 0
408    if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
409        ALOGI("audio sink underrun");
410    } else {
411        ALOGV("audio queue has %d frames left to play",
412             mAudioSink->frameCount() - numFramesAvailableToWrite);
413    }
414#endif
415
416    size_t numBytesAvailableToWrite =
417        numFramesAvailableToWrite * mAudioSink->frameSize();
418
419    while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) {
420        QueueEntry *entry = &*mAudioQueue.begin();
421
422        if (entry->mBuffer == NULL) {
423            // EOS
424            int64_t postEOSDelayUs = 0;
425            if (mAudioSink->needsTrailingPadding()) {
426                postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
427            }
428            notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
429
430            mAudioQueue.erase(mAudioQueue.begin());
431            entry = NULL;
432            return false;
433        }
434
435        if (entry->mOffset == 0) {
436            int64_t mediaTimeUs;
437            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
438            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
439            if (mFirstAnchorTimeMediaUs == -1) {
440                mFirstAnchorTimeMediaUs = mediaTimeUs;
441            }
442            mAnchorTimeMediaUs = mediaTimeUs;
443
444            int64_t nowUs = ALooper::GetNowUs();
445            mAnchorTimeRealUs = nowUs + getPendingAudioPlayoutDurationUs(nowUs);
446
447            notifyPosition();
448        }
449
450        size_t copy = entry->mBuffer->size() - entry->mOffset;
451        if (copy > numBytesAvailableToWrite) {
452            copy = numBytesAvailableToWrite;
453        }
454
455        ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy);
456        if (written < 0) {
457            // An error in AudioSink write is fatal here.
458            LOG_ALWAYS_FATAL("AudioSink write error(%zd) when writing %zu bytes", written, copy);
459        }
460
461        entry->mOffset += written;
462        if (entry->mOffset == entry->mBuffer->size()) {
463            entry->mNotifyConsumed->post();
464            mAudioQueue.erase(mAudioQueue.begin());
465
466            entry = NULL;
467        }
468
469        numBytesAvailableToWrite -= written;
470        size_t copiedFrames = written / mAudioSink->frameSize();
471        mNumFramesWritten += copiedFrames;
472
473        notifyIfMediaRenderingStarted();
474
475        if (written != (ssize_t)copy) {
476            // A short count was received from AudioSink::write()
477            //
478            // AudioSink write should block until exactly the number of bytes are delivered.
479            // But it may return with a short count (without an error) when:
480            //
481            // 1) Size to be copied is not a multiple of the frame size. We consider this fatal.
482            // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
483
484            // (Case 1)
485            // Must be a multiple of the frame size.  If it is not a multiple of a frame size, it
486            // needs to fail, as we should not carry over fractional frames between calls.
487            CHECK_EQ(copy % mAudioSink->frameSize(), 0);
488
489            // (Case 2)
490            // Return early to the caller.
491            // Beware of calling immediately again as this may busy-loop if you are not careful.
492            ALOGW("AudioSink write short frame count %zd < %zu", written, copy);
493            break;
494        }
495    }
496    return !mAudioQueue.empty();
497}
498
499int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
500    int64_t writtenAudioDurationUs =
501        mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame();
502    return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs);
503}
504
505void NuPlayer::Renderer::postDrainVideoQueue() {
506    if (mDrainVideoQueuePending
507            || mSyncQueues
508            || (mPaused && mVideoSampleReceived)) {
509        return;
510    }
511
512    if (mVideoQueue.empty()) {
513        return;
514    }
515
516    QueueEntry &entry = *mVideoQueue.begin();
517
518    sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
519    msg->setInt32("generation", mVideoQueueGeneration);
520
521    if (entry.mBuffer == NULL) {
522        // EOS doesn't carry a timestamp.
523        msg->post();
524        mDrainVideoQueuePending = true;
525        return;
526    }
527
528    int64_t delayUs;
529    int64_t nowUs = ALooper::GetNowUs();
530    int64_t realTimeUs;
531    if (mFlags & FLAG_REAL_TIME) {
532        int64_t mediaTimeUs;
533        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
534        realTimeUs = mediaTimeUs;
535    } else {
536        int64_t mediaTimeUs;
537        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
538
539        if (mFirstAnchorTimeMediaUs == -1 && !mHasAudio) {
540            mFirstAnchorTimeMediaUs = mediaTimeUs;
541        }
542        if (mAnchorTimeMediaUs < 0) {
543            if (!mHasAudio) {
544                mAnchorTimeMediaUs = mediaTimeUs;
545                mAnchorTimeRealUs = nowUs;
546                notifyPosition();
547            }
548            realTimeUs = nowUs;
549        } else {
550            realTimeUs =
551                (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;
552        }
553    }
554
555    realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
556    int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
557
558    delayUs = realTimeUs - nowUs;
559
560    ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs);
561    // post 2 display refreshes before rendering is due
562    msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
563
564    mDrainVideoQueuePending = true;
565}
566
567void NuPlayer::Renderer::onDrainVideoQueue() {
568    if (mVideoQueue.empty()) {
569        return;
570    }
571
572    QueueEntry *entry = &*mVideoQueue.begin();
573
574    if (entry->mBuffer == NULL) {
575        // EOS
576
577        notifyEOS(false /* audio */, entry->mFinalResult);
578
579        mVideoQueue.erase(mVideoQueue.begin());
580        entry = NULL;
581
582        mVideoLateByUs = 0ll;
583        return;
584    }
585
586    int64_t realTimeUs;
587    if (mFlags & FLAG_REAL_TIME) {
588        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
589    } else {
590        int64_t mediaTimeUs;
591        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
592
593        realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs;
594    }
595
596    bool tooLate = false;
597
598    if (!mPaused) {
599        mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;
600        tooLate = (mVideoLateByUs > 40000);
601
602        if (tooLate) {
603            ALOGV("video late by %lld us (%.2f secs)",
604                 mVideoLateByUs, mVideoLateByUs / 1E6);
605        } else {
606            ALOGV("rendering video at media time %.2f secs",
607                    (mFlags & FLAG_REAL_TIME ? realTimeUs :
608                    (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
609        }
610    } else {
611        mVideoLateByUs = 0ll;
612    }
613
614    entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll);
615    entry->mNotifyConsumed->setInt32("render", !tooLate);
616    entry->mNotifyConsumed->post();
617    mVideoQueue.erase(mVideoQueue.begin());
618    entry = NULL;
619
620    mVideoSampleReceived = true;
621
622    if (!mPaused) {
623        if (!mVideoRenderingStarted) {
624            mVideoRenderingStarted = true;
625            notifyVideoRenderingStart();
626        }
627        notifyIfMediaRenderingStarted();
628    }
629}
630
631void NuPlayer::Renderer::notifyVideoRenderingStart() {
632    sp<AMessage> notify = mNotify->dup();
633    notify->setInt32("what", kWhatVideoRenderingStart);
634    notify->post();
635}
636
637void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) {
638    sp<AMessage> notify = mNotify->dup();
639    notify->setInt32("what", kWhatEOS);
640    notify->setInt32("audio", static_cast<int32_t>(audio));
641    notify->setInt32("finalResult", finalResult);
642    notify->post(delayUs);
643}
644
645void NuPlayer::Renderer::notifyAudioOffloadTearDown() {
646    (new AMessage(kWhatAudioOffloadTearDown, id()))->post();
647}
648
649void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
650    int32_t audio;
651    CHECK(msg->findInt32("audio", &audio));
652
653    if (audio) {
654        mHasAudio = true;
655    } else {
656        mHasVideo = true;
657        if (mVideoScheduler == NULL) {
658            mVideoScheduler = new VideoFrameScheduler();
659            mVideoScheduler->init();
660        }
661    }
662
663    if (dropBufferWhileFlushing(audio, msg)) {
664        return;
665    }
666
667    sp<ABuffer> buffer;
668    CHECK(msg->findBuffer("buffer", &buffer));
669
670    sp<AMessage> notifyConsumed;
671    CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
672
673    QueueEntry entry;
674    entry.mBuffer = buffer;
675    entry.mNotifyConsumed = notifyConsumed;
676    entry.mOffset = 0;
677    entry.mFinalResult = OK;
678
679    if (audio) {
680        Mutex::Autolock autoLock(mLock);
681        mAudioQueue.push_back(entry);
682        postDrainAudioQueue_l();
683    } else {
684        mVideoQueue.push_back(entry);
685        postDrainVideoQueue();
686    }
687
688    Mutex::Autolock autoLock(mLock);
689    if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
690        return;
691    }
692
693    sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
694    sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
695
696    if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
697        // EOS signalled on either queue.
698        syncQueuesDone_l();
699        return;
700    }
701
702    int64_t firstAudioTimeUs;
703    int64_t firstVideoTimeUs;
704    CHECK(firstAudioBuffer->meta()
705            ->findInt64("timeUs", &firstAudioTimeUs));
706    CHECK(firstVideoBuffer->meta()
707            ->findInt64("timeUs", &firstVideoTimeUs));
708
709    int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
710
711    ALOGV("queueDiff = %.2f secs", diff / 1E6);
712
713    if (diff > 100000ll) {
714        // Audio data starts More than 0.1 secs before video.
715        // Drop some audio.
716
717        (*mAudioQueue.begin()).mNotifyConsumed->post();
718        mAudioQueue.erase(mAudioQueue.begin());
719        return;
720    }
721
722    syncQueuesDone_l();
723}
724
725void NuPlayer::Renderer::syncQueuesDone_l() {
726    if (!mSyncQueues) {
727        return;
728    }
729
730    mSyncQueues = false;
731
732    if (!mAudioQueue.empty()) {
733        postDrainAudioQueue_l();
734    }
735
736    if (!mVideoQueue.empty()) {
737        postDrainVideoQueue();
738    }
739}
740
741void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
742    int32_t audio;
743    CHECK(msg->findInt32("audio", &audio));
744
745    if (dropBufferWhileFlushing(audio, msg)) {
746        return;
747    }
748
749    int32_t finalResult;
750    CHECK(msg->findInt32("finalResult", &finalResult));
751
752    QueueEntry entry;
753    entry.mOffset = 0;
754    entry.mFinalResult = finalResult;
755
756    if (audio) {
757        Mutex::Autolock autoLock(mLock);
758        if (mAudioQueue.empty() && mSyncQueues) {
759            syncQueuesDone_l();
760        }
761        mAudioQueue.push_back(entry);
762        postDrainAudioQueue_l();
763    } else {
764        if (mVideoQueue.empty() && mSyncQueues) {
765            Mutex::Autolock autoLock(mLock);
766            syncQueuesDone_l();
767        }
768        mVideoQueue.push_back(entry);
769        postDrainVideoQueue();
770    }
771}
772
773void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
774    int32_t audio;
775    CHECK(msg->findInt32("audio", &audio));
776
777    {
778        Mutex::Autolock autoLock(mFlushLock);
779        if (audio) {
780            mFlushingAudio = false;
781        } else {
782            mFlushingVideo = false;
783        }
784    }
785
786    // If we're currently syncing the queues, i.e. dropping audio while
787    // aligning the first audio/video buffer times and only one of the
788    // two queues has data, we may starve that queue by not requesting
789    // more buffers from the decoder. If the other source then encounters
790    // a discontinuity that leads to flushing, we'll never find the
791    // corresponding discontinuity on the other queue.
792    // Therefore we'll stop syncing the queues if at least one of them
793    // is flushed.
794    {
795         Mutex::Autolock autoLock(mLock);
796         syncQueuesDone_l();
797    }
798
799    ALOGV("flushing %s", audio ? "audio" : "video");
800    if (audio) {
801        {
802            Mutex::Autolock autoLock(mLock);
803            flushQueue(&mAudioQueue);
804
805            ++mAudioQueueGeneration;
806            prepareForMediaRenderingStart();
807
808            if (offloadingAudio()) {
809                mFirstAnchorTimeMediaUs = -1;
810            }
811        }
812
813        mDrainAudioQueuePending = false;
814
815        if (offloadingAudio()) {
816            mAudioSink->pause();
817            mAudioSink->flush();
818            mAudioSink->start();
819        }
820    } else {
821        flushQueue(&mVideoQueue);
822
823        mDrainVideoQueuePending = false;
824        ++mVideoQueueGeneration;
825
826        if (mVideoScheduler != NULL) {
827            mVideoScheduler->restart();
828        }
829
830        prepareForMediaRenderingStart();
831    }
832
833    mVideoSampleReceived = false;
834    notifyFlushComplete(audio);
835}
836
837void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
838    while (!queue->empty()) {
839        QueueEntry *entry = &*queue->begin();
840
841        if (entry->mBuffer != NULL) {
842            entry->mNotifyConsumed->post();
843        }
844
845        queue->erase(queue->begin());
846        entry = NULL;
847    }
848}
849
850void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
851    sp<AMessage> notify = mNotify->dup();
852    notify->setInt32("what", kWhatFlushComplete);
853    notify->setInt32("audio", static_cast<int32_t>(audio));
854    notify->post();
855}
856
857bool NuPlayer::Renderer::dropBufferWhileFlushing(
858        bool audio, const sp<AMessage> &msg) {
859    bool flushing = false;
860
861    {
862        Mutex::Autolock autoLock(mFlushLock);
863        if (audio) {
864            flushing = mFlushingAudio;
865        } else {
866            flushing = mFlushingVideo;
867        }
868    }
869
870    if (!flushing) {
871        return false;
872    }
873
874    sp<AMessage> notifyConsumed;
875    if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
876        notifyConsumed->post();
877    }
878
879    return true;
880}
881
882void NuPlayer::Renderer::onAudioSinkChanged() {
883    if (offloadingAudio()) {
884        return;
885    }
886    CHECK(!mDrainAudioQueuePending);
887    mNumFramesWritten = 0;
888    uint32_t written;
889    if (mAudioSink->getFramesWritten(&written) == OK) {
890        mNumFramesWritten = written;
891    }
892}
893
894void NuPlayer::Renderer::onDisableOffloadAudio() {
895    Mutex::Autolock autoLock(mLock);
896    mFlags &= ~FLAG_OFFLOAD_AUDIO;
897    ++mAudioQueueGeneration;
898}
899
900void NuPlayer::Renderer::notifyPosition() {
901    // notifyPosition() must be called only after setting mAnchorTimeRealUs
902    // and mAnchorTimeMediaUs, and must not be paused as it extrapolates position.
903    //CHECK_GE(mAnchorTimeRealUs, 0);
904    //CHECK_GE(mAnchorTimeMediaUs, 0);
905    //CHECK(!mPaused || !mHasAudio);  // video-only does display in paused mode.
906
907    int64_t nowUs = ALooper::GetNowUs();
908
909    if (mLastPositionUpdateUs >= 0
910            && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) {
911        return;
912    }
913    mLastPositionUpdateUs = nowUs;
914
915    int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
916
917    //ALOGD("notifyPosition: positionUs(%lld) nowUs(%lld) mAnchorTimeRealUs(%lld)"
918    //        " mAnchorTimeMediaUs(%lld) mFirstAnchorTimeMediaUs(%lld)",
919    //        (long long)positionUs, (long long)nowUs, (long long)mAnchorTimeRealUs,
920    //        (long long)mAnchorTimeMediaUs, (long long)mFirstAnchorTimeMediaUs);
921
922    // Due to adding the latency to mAnchorTimeRealUs in onDrainAudioQueue(),
923    // positionUs may be less than the first media time.  This is avoided
924    // here to prevent potential retrograde motion of the position bar
925    // when starting up after a seek.
926    if (positionUs < mFirstAnchorTimeMediaUs) {
927        positionUs = mFirstAnchorTimeMediaUs;
928    }
929    sp<AMessage> notify = mNotify->dup();
930    notify->setInt32("what", kWhatPosition);
931    notify->setInt64("positionUs", positionUs);
932    notify->setInt64("videoLateByUs", mVideoLateByUs);
933    notify->post();
934}
935
936void NuPlayer::Renderer::onPause() {
937    if (mPaused) {
938        ALOGW("Renderer::onPause() called while already paused!");
939        return;
940    }
941    {
942        Mutex::Autolock autoLock(mLock);
943        ++mAudioQueueGeneration;
944        ++mVideoQueueGeneration;
945        prepareForMediaRenderingStart();
946        mPaused = true;
947    }
948
949    mDrainAudioQueuePending = false;
950    mDrainVideoQueuePending = false;
951
952    if (mHasAudio) {
953        mAudioSink->pause();
954    }
955
956    ALOGV("now paused audio queue has %d entries, video has %d entries",
957          mAudioQueue.size(), mVideoQueue.size());
958}
959
960void NuPlayer::Renderer::onResume() {
961    if (!mPaused) {
962        return;
963    }
964
965    if (mHasAudio) {
966        mAudioSink->start();
967    }
968
969    Mutex::Autolock autoLock(mLock);
970    mPaused = false;
971
972    if (!mAudioQueue.empty()) {
973        postDrainAudioQueue_l();
974    }
975
976    if (!mVideoQueue.empty()) {
977        postDrainVideoQueue();
978    }
979}
980
981void NuPlayer::Renderer::onSetVideoFrameRate(float fps) {
982    if (mVideoScheduler == NULL) {
983        mVideoScheduler = new VideoFrameScheduler();
984    }
985    mVideoScheduler->init(fps);
986}
987
988// TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs()
989// as it acquires locks and may query the audio driver.
990//
991// Some calls are not needed since notifyPosition() doesn't always deliver a message.
992// Some calls could conceivably retrieve extrapolated data instead of
993// accessing getTimestamp() or getPosition() every time a data buffer with
994// a media time is received.
995//
996int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) {
997    uint32_t numFramesPlayed;
998    int64_t numFramesPlayedAt;
999    AudioTimestamp ts;
1000    static const int64_t kStaleTimestamp100ms = 100000;
1001
1002    status_t res = mAudioSink->getTimestamp(ts);
1003    if (res == OK) {                 // case 1: mixing audio tracks and offloaded tracks.
1004        numFramesPlayed = ts.mPosition;
1005        numFramesPlayedAt =
1006            ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000;
1007        const int64_t timestampAge = nowUs - numFramesPlayedAt;
1008        if (timestampAge > kStaleTimestamp100ms) {
1009            // This is an audio FIXME.
1010            // getTimestamp returns a timestamp which may come from audio mixing threads.
1011            // After pausing, the MixerThread may go idle, thus the mTime estimate may
1012            // become stale. Assuming that the MixerThread runs 20ms, with FastMixer at 5ms,
1013            // the max latency should be about 25ms with an average around 12ms (to be verified).
1014            // For safety we use 100ms.
1015            ALOGW("getTimestamp: returned stale timestamp nowUs(%lld) numFramesPlayedAt(%lld)",
1016                    (long long)nowUs, (long long)numFramesPlayedAt);
1017            numFramesPlayedAt = nowUs - kStaleTimestamp100ms;
1018        }
1019        //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAt);
1020    } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track
1021        numFramesPlayed = 0;
1022        numFramesPlayedAt = nowUs;
1023        //ALOGD("getTimestamp: WOULD_BLOCK %d %lld",
1024        //        numFramesPlayed, (long long)numFramesPlayedAt);
1025    } else {                         // case 3: transitory at new track or audio fast tracks.
1026        res = mAudioSink->getPosition(&numFramesPlayed);
1027        CHECK_EQ(res, (status_t)OK);
1028        numFramesPlayedAt = nowUs;
1029        numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */
1030        //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt);
1031    }
1032
1033    // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
1034    //CHECK_EQ(numFramesPlayed & (1 << 31), 0);  // can't be negative until 12.4 hrs, test
1035    int64_t durationUs = (int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame()
1036            + nowUs - numFramesPlayedAt;
1037    if (durationUs < 0) {
1038        // Occurs when numFramesPlayed position is very small and the following:
1039        // (1) In case 1, the time nowUs is computed before getTimestamp() is called and
1040        //     numFramesPlayedAt is greater than nowUs by time more than numFramesPlayed.
1041        // (2) In case 3, using getPosition and adding mAudioSink->latency() to
1042        //     numFramesPlayedAt, by a time amount greater than numFramesPlayed.
1043        //
1044        // Both of these are transitory conditions.
1045        ALOGW("getPlayedOutAudioDurationUs: negative timestamp %lld set to zero", (long long)durationUs);
1046        durationUs = 0;
1047    }
1048    ALOGV("getPlayedOutAudioDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)",
1049            (long long)durationUs, (long long)nowUs, numFramesPlayed, (long long)numFramesPlayedAt);
1050    return durationUs;
1051}
1052
1053void NuPlayer::Renderer::onAudioOffloadTearDown() {
1054    int64_t firstAudioTimeUs;
1055    {
1056        Mutex::Autolock autoLock(mLock);
1057        firstAudioTimeUs = mFirstAnchorTimeMediaUs;
1058    }
1059
1060    int64_t currentPositionUs =
1061        firstAudioTimeUs + getPlayedOutAudioDurationUs(ALooper::GetNowUs());
1062
1063    mAudioSink->stop();
1064    mAudioSink->flush();
1065
1066    sp<AMessage> notify = mNotify->dup();
1067    notify->setInt32("what", kWhatAudioOffloadTearDown);
1068    notify->setInt64("positionUs", currentPositionUs);
1069    notify->post();
1070}
1071
1072}  // namespace android
1073
1074