NuPlayerRenderer.cpp revision dc43dfa1294470a4413c37e863ef3b621da8681f
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "NuPlayerRenderer"
19#include <utils/Log.h>
20
21#include "NuPlayerRenderer.h"
22
23#include <media/stagefright/foundation/ABuffer.h>
24#include <media/stagefright/foundation/ADebug.h>
25#include <media/stagefright/foundation/AMessage.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28
29#include <VideoFrameScheduler.h>
30
31#include <inttypes.h>
32
33namespace android {
34
35// static
36const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
37
38NuPlayer::Renderer::Renderer(
39        const sp<MediaPlayerBase::AudioSink> &sink,
40        const sp<AMessage> &notify,
41        uint32_t flags)
42    : mAudioSink(sink),
43      mNotify(notify),
44      mFlags(flags),
45      mNumFramesWritten(0),
46      mDrainAudioQueuePending(false),
47      mDrainVideoQueuePending(false),
48      mAudioQueueGeneration(0),
49      mVideoQueueGeneration(0),
50      mFirstAnchorTimeMediaUs(-1),
51      mAnchorTimeMediaUs(-1),
52      mAnchorTimeRealUs(-1),
53      mFlushingAudio(false),
54      mFlushingVideo(false),
55      mHasAudio(false),
56      mHasVideo(false),
57      mSyncQueues(false),
58      mPaused(false),
59      mVideoSampleReceived(false),
60      mVideoRenderingStarted(false),
61      mVideoRenderingStartGeneration(0),
62      mAudioRenderingStartGeneration(0),
63      mLastPositionUpdateUs(-1ll),
64      mVideoLateByUs(0ll) {
65}
66
67NuPlayer::Renderer::~Renderer() {
68    if (offloadingAudio()) {
69        mAudioSink->stop();
70        mAudioSink->flush();
71        mAudioSink->close();
72    }
73}
74
75void NuPlayer::Renderer::queueBuffer(
76        bool audio,
77        const sp<ABuffer> &buffer,
78        const sp<AMessage> &notifyConsumed) {
79    sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
80    msg->setInt32("audio", static_cast<int32_t>(audio));
81    msg->setBuffer("buffer", buffer);
82    msg->setMessage("notifyConsumed", notifyConsumed);
83    msg->post();
84}
85
86void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
87    CHECK_NE(finalResult, (status_t)OK);
88
89    sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
90    msg->setInt32("audio", static_cast<int32_t>(audio));
91    msg->setInt32("finalResult", finalResult);
92    msg->post();
93}
94
95void NuPlayer::Renderer::flush(bool audio) {
96    {
97        Mutex::Autolock autoLock(mFlushLock);
98        if (audio) {
99            if (mFlushingAudio) {
100                return;
101            }
102            mFlushingAudio = true;
103        } else {
104            if (mFlushingVideo) {
105                return;
106            }
107            mFlushingVideo = true;
108        }
109    }
110
111    sp<AMessage> msg = new AMessage(kWhatFlush, id());
112    msg->setInt32("audio", static_cast<int32_t>(audio));
113    msg->post();
114}
115
116void NuPlayer::Renderer::signalTimeDiscontinuity() {
117    Mutex::Autolock autoLock(mLock);
118    // CHECK(mAudioQueue.empty());
119    // CHECK(mVideoQueue.empty());
120    mFirstAnchorTimeMediaUs = -1;
121    mAnchorTimeMediaUs = -1;
122    mAnchorTimeRealUs = -1;
123    mSyncQueues = false;
124}
125
126void NuPlayer::Renderer::signalAudioSinkChanged() {
127    (new AMessage(kWhatAudioSinkChanged, id()))->post();
128}
129
130void NuPlayer::Renderer::signalDisableOffloadAudio() {
131    (new AMessage(kWhatDisableOffloadAudio, id()))->post();
132}
133
134void NuPlayer::Renderer::pause() {
135    (new AMessage(kWhatPause, id()))->post();
136}
137
138void NuPlayer::Renderer::resume() {
139    (new AMessage(kWhatResume, id()))->post();
140}
141
142void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
143    switch (msg->what()) {
144        case kWhatStopAudioSink:
145        {
146            mAudioSink->stop();
147            break;
148        }
149
150        case kWhatDrainAudioQueue:
151        {
152            int32_t generation;
153            CHECK(msg->findInt32("generation", &generation));
154            if (generation != mAudioQueueGeneration) {
155                break;
156            }
157
158            mDrainAudioQueuePending = false;
159
160            if (onDrainAudioQueue()) {
161                uint32_t numFramesPlayed;
162                CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
163                         (status_t)OK);
164
165                uint32_t numFramesPendingPlayout =
166                    mNumFramesWritten - numFramesPlayed;
167
168                // This is how long the audio sink will have data to
169                // play back.
170                int64_t delayUs =
171                    mAudioSink->msecsPerFrame()
172                        * numFramesPendingPlayout * 1000ll;
173
174                // Let's give it more data after about half that time
175                // has elapsed.
176                // kWhatDrainAudioQueue is used for non-offloading mode,
177                // and mLock is used only for offloading mode. Therefore,
178                // no need to acquire mLock here.
179                postDrainAudioQueue_l(delayUs / 2);
180            }
181            break;
182        }
183
184        case kWhatDrainVideoQueue:
185        {
186            int32_t generation;
187            CHECK(msg->findInt32("generation", &generation));
188            if (generation != mVideoQueueGeneration) {
189                break;
190            }
191
192            mDrainVideoQueuePending = false;
193
194            onDrainVideoQueue();
195
196            postDrainVideoQueue();
197            break;
198        }
199
200        case kWhatQueueBuffer:
201        {
202            onQueueBuffer(msg);
203            break;
204        }
205
206        case kWhatQueueEOS:
207        {
208            onQueueEOS(msg);
209            break;
210        }
211
212        case kWhatFlush:
213        {
214            onFlush(msg);
215            break;
216        }
217
218        case kWhatAudioSinkChanged:
219        {
220            onAudioSinkChanged();
221            break;
222        }
223
224        case kWhatDisableOffloadAudio:
225        {
226            onDisableOffloadAudio();
227            break;
228        }
229
230        case kWhatPause:
231        {
232            onPause();
233            break;
234        }
235
236        case kWhatResume:
237        {
238            onResume();
239            break;
240        }
241
242        case kWhatAudioOffloadTearDown:
243        {
244            onAudioOffloadTearDown();
245            break;
246        }
247
248        default:
249            TRESPASS();
250            break;
251    }
252}
253
254void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
255    if (mDrainAudioQueuePending || mSyncQueues || mPaused
256            || offloadingAudio()) {
257        return;
258    }
259
260    if (mAudioQueue.empty()) {
261        return;
262    }
263
264    mDrainAudioQueuePending = true;
265    sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
266    msg->setInt32("generation", mAudioQueueGeneration);
267    msg->post(delayUs);
268}
269
270void NuPlayer::Renderer::prepareForMediaRenderingStart() {
271    mAudioRenderingStartGeneration = mAudioQueueGeneration;
272    mVideoRenderingStartGeneration = mVideoQueueGeneration;
273}
274
275void NuPlayer::Renderer::notifyIfMediaRenderingStarted() {
276    if (mVideoRenderingStartGeneration == mVideoQueueGeneration &&
277        mAudioRenderingStartGeneration == mAudioQueueGeneration) {
278        mVideoRenderingStartGeneration = -1;
279        mAudioRenderingStartGeneration = -1;
280
281        sp<AMessage> notify = mNotify->dup();
282        notify->setInt32("what", kWhatMediaRenderingStart);
283        notify->post();
284    }
285}
286
287// static
288size_t NuPlayer::Renderer::AudioSinkCallback(
289        MediaPlayerBase::AudioSink * /* audioSink */,
290        void *buffer,
291        size_t size,
292        void *cookie,
293        MediaPlayerBase::AudioSink::cb_event_t event) {
294    NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
295
296    switch (event) {
297        case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
298        {
299            return me->fillAudioBuffer(buffer, size);
300            break;
301        }
302
303        case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
304        {
305            me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM);
306            break;
307        }
308
309        case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
310        {
311            me->notifyAudioOffloadTearDown();
312            break;
313        }
314    }
315
316    return 0;
317}
318
319size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
320    Mutex::Autolock autoLock(mLock);
321
322    if (!offloadingAudio() || mPaused) {
323        return 0;
324    }
325
326    bool hasEOS = false;
327
328    size_t sizeCopied = 0;
329    bool firstEntry = true;
330    while (sizeCopied < size && !mAudioQueue.empty()) {
331        QueueEntry *entry = &*mAudioQueue.begin();
332
333        if (entry->mBuffer == NULL) { // EOS
334            hasEOS = true;
335            mAudioQueue.erase(mAudioQueue.begin());
336            entry = NULL;
337            break;
338        }
339
340        if (firstEntry && entry->mOffset == 0) {
341            firstEntry = false;
342            int64_t mediaTimeUs;
343            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
344            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
345            if (mFirstAnchorTimeMediaUs == -1) {
346                mFirstAnchorTimeMediaUs = mediaTimeUs;
347            }
348
349            int64_t nowUs = ALooper::GetNowUs();
350            mAnchorTimeMediaUs =
351                mFirstAnchorTimeMediaUs + getPlayedOutAudioDurationUs(nowUs);
352            mAnchorTimeRealUs = nowUs;
353
354            notifyPosition();
355        }
356
357        size_t copy = entry->mBuffer->size() - entry->mOffset;
358        size_t sizeRemaining = size - sizeCopied;
359        if (copy > sizeRemaining) {
360            copy = sizeRemaining;
361        }
362
363        memcpy((char *)buffer + sizeCopied,
364               entry->mBuffer->data() + entry->mOffset,
365               copy);
366
367        entry->mOffset += copy;
368        if (entry->mOffset == entry->mBuffer->size()) {
369            entry->mNotifyConsumed->post();
370            mAudioQueue.erase(mAudioQueue.begin());
371            entry = NULL;
372        }
373        sizeCopied += copy;
374        notifyIfMediaRenderingStarted();
375    }
376
377    if (hasEOS) {
378        (new AMessage(kWhatStopAudioSink, id()))->post();
379    }
380
381    return sizeCopied;
382}
383
384bool NuPlayer::Renderer::onDrainAudioQueue() {
385    uint32_t numFramesPlayed;
386    if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
387        return false;
388    }
389
390    ssize_t numFramesAvailableToWrite =
391        mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
392
393#if 0
394    if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
395        ALOGI("audio sink underrun");
396    } else {
397        ALOGV("audio queue has %d frames left to play",
398             mAudioSink->frameCount() - numFramesAvailableToWrite);
399    }
400#endif
401
402    size_t numBytesAvailableToWrite =
403        numFramesAvailableToWrite * mAudioSink->frameSize();
404
405    while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) {
406        QueueEntry *entry = &*mAudioQueue.begin();
407
408        if (entry->mBuffer == NULL) {
409            // EOS
410            int64_t postEOSDelayUs = 0;
411            if (mAudioSink->needsTrailingPadding()) {
412                postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
413            }
414            notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
415
416            mAudioQueue.erase(mAudioQueue.begin());
417            entry = NULL;
418            return false;
419        }
420
421        if (entry->mOffset == 0) {
422            int64_t mediaTimeUs;
423            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
424            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
425            if (mFirstAnchorTimeMediaUs == -1) {
426                mFirstAnchorTimeMediaUs = mediaTimeUs;
427            }
428            mAnchorTimeMediaUs = mediaTimeUs;
429
430            int64_t nowUs = ALooper::GetNowUs();
431            mAnchorTimeRealUs = nowUs + getPendingAudioPlayoutDurationUs(nowUs);
432
433            notifyPosition();
434        }
435
436        size_t copy = entry->mBuffer->size() - entry->mOffset;
437        if (copy > numBytesAvailableToWrite) {
438            copy = numBytesAvailableToWrite;
439        }
440
441        ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy);
442        if (written < 0) {
443            // An error in AudioSink write is fatal here.
444            LOG_ALWAYS_FATAL("AudioSink write error(%zd) when writing %zu bytes", written, copy);
445        }
446
447        entry->mOffset += written;
448        if (entry->mOffset == entry->mBuffer->size()) {
449            entry->mNotifyConsumed->post();
450            mAudioQueue.erase(mAudioQueue.begin());
451
452            entry = NULL;
453        }
454
455        numBytesAvailableToWrite -= written;
456        size_t copiedFrames = written / mAudioSink->frameSize();
457        mNumFramesWritten += copiedFrames;
458
459        notifyIfMediaRenderingStarted();
460
461        if (written != (ssize_t)copy) {
462            // A short count was received from AudioSink::write()
463            //
464            // AudioSink write should block until exactly the number of bytes are delivered.
465            // But it may return with a short count (without an error) when:
466            //
467            // 1) Size to be copied is not a multiple of the frame size. We consider this fatal.
468            // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
469
470            // (Case 1)
471            // Must be a multiple of the frame size.  If it is not a multiple of a frame size, it
472            // needs to fail, as we should not carry over fractional frames between calls.
473            CHECK_EQ(copy % mAudioSink->frameSize(), 0);
474
475            // (Case 2)
476            // Return early to the caller.
477            // Beware of calling immediately again as this may busy-loop if you are not careful.
478            ALOGW("AudioSink write short frame count %zd < %zu", written, copy);
479            break;
480        }
481    }
482    return !mAudioQueue.empty();
483}
484
485int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
486    int64_t writtenAudioDurationUs =
487        mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame();
488    return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs);
489}
490
491void NuPlayer::Renderer::postDrainVideoQueue() {
492    if (mDrainVideoQueuePending
493            || mSyncQueues
494            || (mPaused && mVideoSampleReceived)) {
495        return;
496    }
497
498    if (mVideoQueue.empty()) {
499        return;
500    }
501
502    QueueEntry &entry = *mVideoQueue.begin();
503
504    sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
505    msg->setInt32("generation", mVideoQueueGeneration);
506
507    if (entry.mBuffer == NULL) {
508        // EOS doesn't carry a timestamp.
509        msg->post();
510        mDrainVideoQueuePending = true;
511        return;
512    }
513
514    int64_t delayUs;
515    int64_t nowUs = ALooper::GetNowUs();
516    int64_t realTimeUs;
517    if (mFlags & FLAG_REAL_TIME) {
518        int64_t mediaTimeUs;
519        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
520        realTimeUs = mediaTimeUs;
521    } else {
522        int64_t mediaTimeUs;
523        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
524
525        if (mFirstAnchorTimeMediaUs == -1 && !mHasAudio) {
526            mFirstAnchorTimeMediaUs = mediaTimeUs;
527        }
528        if (mAnchorTimeMediaUs < 0) {
529            if (!mHasAudio) {
530                mAnchorTimeMediaUs = mediaTimeUs;
531                mAnchorTimeRealUs = nowUs;
532                notifyPosition();
533            }
534            realTimeUs = nowUs;
535        } else {
536            realTimeUs =
537                (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;
538        }
539    }
540
541    realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
542    int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
543
544    delayUs = realTimeUs - nowUs;
545
546    ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs);
547    // post 2 display refreshes before rendering is due
548    msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
549
550    mDrainVideoQueuePending = true;
551}
552
553void NuPlayer::Renderer::onDrainVideoQueue() {
554    if (mVideoQueue.empty()) {
555        return;
556    }
557
558    QueueEntry *entry = &*mVideoQueue.begin();
559
560    if (entry->mBuffer == NULL) {
561        // EOS
562
563        notifyEOS(false /* audio */, entry->mFinalResult);
564
565        mVideoQueue.erase(mVideoQueue.begin());
566        entry = NULL;
567
568        mVideoLateByUs = 0ll;
569        return;
570    }
571
572    int64_t realTimeUs;
573    if (mFlags & FLAG_REAL_TIME) {
574        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
575    } else {
576        int64_t mediaTimeUs;
577        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
578
579        realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs;
580    }
581
582    bool tooLate = false;
583
584    if (!mPaused) {
585        mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;
586        tooLate = (mVideoLateByUs > 40000);
587
588        if (tooLate) {
589            ALOGV("video late by %lld us (%.2f secs)",
590                 mVideoLateByUs, mVideoLateByUs / 1E6);
591        } else {
592            ALOGV("rendering video at media time %.2f secs",
593                    (mFlags & FLAG_REAL_TIME ? realTimeUs :
594                    (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
595        }
596    } else {
597        mVideoLateByUs = 0ll;
598    }
599
600    entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll);
601    entry->mNotifyConsumed->setInt32("render", !tooLate);
602    entry->mNotifyConsumed->post();
603    mVideoQueue.erase(mVideoQueue.begin());
604    entry = NULL;
605
606    mVideoSampleReceived = true;
607
608    if (!mPaused) {
609        if (!mVideoRenderingStarted) {
610            mVideoRenderingStarted = true;
611            notifyVideoRenderingStart();
612        }
613        notifyIfMediaRenderingStarted();
614    }
615}
616
617void NuPlayer::Renderer::notifyVideoRenderingStart() {
618    sp<AMessage> notify = mNotify->dup();
619    notify->setInt32("what", kWhatVideoRenderingStart);
620    notify->post();
621}
622
623void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) {
624    sp<AMessage> notify = mNotify->dup();
625    notify->setInt32("what", kWhatEOS);
626    notify->setInt32("audio", static_cast<int32_t>(audio));
627    notify->setInt32("finalResult", finalResult);
628    notify->post(delayUs);
629}
630
631void NuPlayer::Renderer::notifyAudioOffloadTearDown() {
632    (new AMessage(kWhatAudioOffloadTearDown, id()))->post();
633}
634
635void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
636    int32_t audio;
637    CHECK(msg->findInt32("audio", &audio));
638
639    if (audio) {
640        mHasAudio = true;
641    } else {
642        mHasVideo = true;
643        if (mVideoScheduler == NULL) {
644            mVideoScheduler = new VideoFrameScheduler();
645            mVideoScheduler->init();
646        }
647    }
648
649    if (dropBufferWhileFlushing(audio, msg)) {
650        return;
651    }
652
653    sp<ABuffer> buffer;
654    CHECK(msg->findBuffer("buffer", &buffer));
655
656    sp<AMessage> notifyConsumed;
657    CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
658
659    QueueEntry entry;
660    entry.mBuffer = buffer;
661    entry.mNotifyConsumed = notifyConsumed;
662    entry.mOffset = 0;
663    entry.mFinalResult = OK;
664
665    if (audio) {
666        Mutex::Autolock autoLock(mLock);
667        mAudioQueue.push_back(entry);
668        postDrainAudioQueue_l();
669    } else {
670        mVideoQueue.push_back(entry);
671        postDrainVideoQueue();
672    }
673
674    Mutex::Autolock autoLock(mLock);
675    if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
676        return;
677    }
678
679    sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
680    sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
681
682    if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
683        // EOS signalled on either queue.
684        syncQueuesDone_l();
685        return;
686    }
687
688    int64_t firstAudioTimeUs;
689    int64_t firstVideoTimeUs;
690    CHECK(firstAudioBuffer->meta()
691            ->findInt64("timeUs", &firstAudioTimeUs));
692    CHECK(firstVideoBuffer->meta()
693            ->findInt64("timeUs", &firstVideoTimeUs));
694
695    int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
696
697    ALOGV("queueDiff = %.2f secs", diff / 1E6);
698
699    if (diff > 100000ll) {
700        // Audio data starts More than 0.1 secs before video.
701        // Drop some audio.
702
703        (*mAudioQueue.begin()).mNotifyConsumed->post();
704        mAudioQueue.erase(mAudioQueue.begin());
705        return;
706    }
707
708    syncQueuesDone_l();
709}
710
711void NuPlayer::Renderer::syncQueuesDone_l() {
712    if (!mSyncQueues) {
713        return;
714    }
715
716    mSyncQueues = false;
717
718    if (!mAudioQueue.empty()) {
719        postDrainAudioQueue_l();
720    }
721
722    if (!mVideoQueue.empty()) {
723        postDrainVideoQueue();
724    }
725}
726
727void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
728    int32_t audio;
729    CHECK(msg->findInt32("audio", &audio));
730
731    if (dropBufferWhileFlushing(audio, msg)) {
732        return;
733    }
734
735    int32_t finalResult;
736    CHECK(msg->findInt32("finalResult", &finalResult));
737
738    QueueEntry entry;
739    entry.mOffset = 0;
740    entry.mFinalResult = finalResult;
741
742    if (audio) {
743        Mutex::Autolock autoLock(mLock);
744        if (mAudioQueue.empty() && mSyncQueues) {
745            syncQueuesDone_l();
746        }
747        mAudioQueue.push_back(entry);
748        postDrainAudioQueue_l();
749    } else {
750        if (mVideoQueue.empty() && mSyncQueues) {
751            Mutex::Autolock autoLock(mLock);
752            syncQueuesDone_l();
753        }
754        mVideoQueue.push_back(entry);
755        postDrainVideoQueue();
756    }
757}
758
759void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
760    int32_t audio;
761    CHECK(msg->findInt32("audio", &audio));
762
763    {
764        Mutex::Autolock autoLock(mFlushLock);
765        if (audio) {
766            mFlushingAudio = false;
767        } else {
768            mFlushingVideo = false;
769        }
770    }
771
772    // If we're currently syncing the queues, i.e. dropping audio while
773    // aligning the first audio/video buffer times and only one of the
774    // two queues has data, we may starve that queue by not requesting
775    // more buffers from the decoder. If the other source then encounters
776    // a discontinuity that leads to flushing, we'll never find the
777    // corresponding discontinuity on the other queue.
778    // Therefore we'll stop syncing the queues if at least one of them
779    // is flushed.
780    {
781         Mutex::Autolock autoLock(mLock);
782         syncQueuesDone_l();
783    }
784
785    ALOGV("flushing %s", audio ? "audio" : "video");
786    if (audio) {
787        {
788            Mutex::Autolock autoLock(mLock);
789            flushQueue(&mAudioQueue);
790
791            ++mAudioQueueGeneration;
792            prepareForMediaRenderingStart();
793
794            if (offloadingAudio()) {
795                mFirstAnchorTimeMediaUs = -1;
796            }
797        }
798
799        mDrainAudioQueuePending = false;
800
801        if (offloadingAudio()) {
802            mAudioSink->pause();
803            mAudioSink->flush();
804            mAudioSink->start();
805        }
806    } else {
807        flushQueue(&mVideoQueue);
808
809        mDrainVideoQueuePending = false;
810        ++mVideoQueueGeneration;
811
812        prepareForMediaRenderingStart();
813    }
814
815    mVideoSampleReceived = false;
816    notifyFlushComplete(audio);
817}
818
819void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
820    while (!queue->empty()) {
821        QueueEntry *entry = &*queue->begin();
822
823        if (entry->mBuffer != NULL) {
824            entry->mNotifyConsumed->post();
825        }
826
827        queue->erase(queue->begin());
828        entry = NULL;
829    }
830}
831
832void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
833    sp<AMessage> notify = mNotify->dup();
834    notify->setInt32("what", kWhatFlushComplete);
835    notify->setInt32("audio", static_cast<int32_t>(audio));
836    notify->post();
837}
838
839bool NuPlayer::Renderer::dropBufferWhileFlushing(
840        bool audio, const sp<AMessage> &msg) {
841    bool flushing = false;
842
843    {
844        Mutex::Autolock autoLock(mFlushLock);
845        if (audio) {
846            flushing = mFlushingAudio;
847        } else {
848            flushing = mFlushingVideo;
849        }
850    }
851
852    if (!flushing) {
853        return false;
854    }
855
856    sp<AMessage> notifyConsumed;
857    if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
858        notifyConsumed->post();
859    }
860
861    return true;
862}
863
864void NuPlayer::Renderer::onAudioSinkChanged() {
865    if (offloadingAudio()) {
866        return;
867    }
868    CHECK(!mDrainAudioQueuePending);
869    mNumFramesWritten = 0;
870    uint32_t written;
871    if (mAudioSink->getFramesWritten(&written) == OK) {
872        mNumFramesWritten = written;
873    }
874}
875
876void NuPlayer::Renderer::onDisableOffloadAudio() {
877    Mutex::Autolock autoLock(mLock);
878    mFlags &= ~FLAG_OFFLOAD_AUDIO;
879    ++mAudioQueueGeneration;
880}
881
882void NuPlayer::Renderer::notifyPosition() {
883    // notifyPosition() must be called only after setting mAnchorTimeRealUs
884    // and mAnchorTimeMediaUs, and must not be paused as it extrapolates position.
885    //CHECK_GE(mAnchorTimeRealUs, 0);
886    //CHECK_GE(mAnchorTimeMediaUs, 0);
887    //CHECK(!mPaused || !mHasAudio);  // video-only does display in paused mode.
888
889    int64_t nowUs = ALooper::GetNowUs();
890
891    if (mLastPositionUpdateUs >= 0
892            && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) {
893        return;
894    }
895    mLastPositionUpdateUs = nowUs;
896
897    int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
898
899    //ALOGD("notifyPosition: positionUs(%lld) nowUs(%lld) mAnchorTimeRealUs(%lld)"
900    //        " mAnchorTimeMediaUs(%lld) mFirstAnchorTimeMediaUs(%lld)",
901    //        (long long)positionUs, (long long)nowUs, (long long)mAnchorTimeRealUs,
902    //        (long long)mAnchorTimeMediaUs, (long long)mFirstAnchorTimeMediaUs);
903
904    // Due to adding the latency to mAnchorTimeRealUs in onDrainAudioQueue(),
905    // positionUs may be less than the first media time.  This is avoided
906    // here to prevent potential retrograde motion of the position bar
907    // when starting up after a seek.
908    if (positionUs < mFirstAnchorTimeMediaUs) {
909        positionUs = mFirstAnchorTimeMediaUs;
910    }
911    sp<AMessage> notify = mNotify->dup();
912    notify->setInt32("what", kWhatPosition);
913    notify->setInt64("positionUs", positionUs);
914    notify->setInt64("videoLateByUs", mVideoLateByUs);
915    notify->post();
916}
917
918void NuPlayer::Renderer::onPause() {
919    if (mPaused) {
920        ALOGW("Renderer::onPause() called while already paused!");
921        return;
922    }
923    {
924        Mutex::Autolock autoLock(mLock);
925        ++mAudioQueueGeneration;
926        ++mVideoQueueGeneration;
927        prepareForMediaRenderingStart();
928        mPaused = true;
929    }
930
931    mDrainAudioQueuePending = false;
932    mDrainVideoQueuePending = false;
933
934    if (mHasAudio) {
935        mAudioSink->pause();
936    }
937
938    ALOGV("now paused audio queue has %d entries, video has %d entries",
939          mAudioQueue.size(), mVideoQueue.size());
940}
941
942void NuPlayer::Renderer::onResume() {
943    if (!mPaused) {
944        return;
945    }
946
947    if (mHasAudio) {
948        mAudioSink->start();
949    }
950
951    Mutex::Autolock autoLock(mLock);
952    mPaused = false;
953
954    if (!mAudioQueue.empty()) {
955        postDrainAudioQueue_l();
956    }
957
958    if (!mVideoQueue.empty()) {
959        postDrainVideoQueue();
960    }
961}
962
963// TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs()
964// as it acquires locks and may query the audio driver.
965//
966// Some calls are not needed since notifyPosition() doesn't always deliver a message.
967// Some calls could conceivably retrieve extrapolated data instead of
968// accessing getTimestamp() or getPosition() every time a data buffer with
969// a media time is received.
970//
971int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) {
972    uint32_t numFramesPlayed;
973    int64_t numFramesPlayedAt;
974    AudioTimestamp ts;
975    static const int64_t kStaleTimestamp100ms = 100000;
976
977    status_t res = mAudioSink->getTimestamp(ts);
978    if (res == OK) {                 // case 1: mixing audio tracks and offloaded tracks.
979        numFramesPlayed = ts.mPosition;
980        numFramesPlayedAt =
981            ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000;
982        const int64_t timestampAge = nowUs - numFramesPlayedAt;
983        if (timestampAge > kStaleTimestamp100ms) {
984            // This is an audio FIXME.
985            // getTimestamp returns a timestamp which may come from audio mixing threads.
986            // After pausing, the MixerThread may go idle, thus the mTime estimate may
987            // become stale. Assuming that the MixerThread runs 20ms, with FastMixer at 5ms,
988            // the max latency should be about 25ms with an average around 12ms (to be verified).
989            // For safety we use 100ms.
990            ALOGW("getTimestamp: returned stale timestamp nowUs(%lld) numFramesPlayedAt(%lld)",
991                    (long long)nowUs, (long long)numFramesPlayedAt);
992            numFramesPlayedAt = nowUs - kStaleTimestamp100ms;
993        }
994        //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAt);
995    } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track
996        numFramesPlayed = 0;
997        numFramesPlayedAt = nowUs;
998        //ALOGD("getTimestamp: WOULD_BLOCK %d %lld",
999        //        numFramesPlayed, (long long)numFramesPlayedAt);
1000    } else {                         // case 3: transitory at new track or audio fast tracks.
1001        res = mAudioSink->getPosition(&numFramesPlayed);
1002        CHECK_EQ(res, (status_t)OK);
1003        numFramesPlayedAt = nowUs;
1004        numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */
1005        //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt);
1006    }
1007
1008    // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
1009    //CHECK_EQ(numFramesPlayed & (1 << 31), 0);  // can't be negative until 12.4 hrs, test
1010    int64_t durationUs = (int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame()
1011            + nowUs - numFramesPlayedAt;
1012    if (durationUs < 0) {
1013        // Occurs when numFramesPlayed position is very small and the following:
1014        // (1) In case 1, the time nowUs is computed before getTimestamp() is called and
1015        //     numFramesPlayedAt is greater than nowUs by time more than numFramesPlayed.
1016        // (2) In case 3, using getPosition and adding mAudioSink->latency() to
1017        //     numFramesPlayedAt, by a time amount greater than numFramesPlayed.
1018        //
1019        // Both of these are transitory conditions.
1020        ALOGW("getPlayedOutAudioDurationUs: negative timestamp %lld set to zero", (long long)durationUs);
1021        durationUs = 0;
1022    }
1023    ALOGV("getPlayedOutAudioDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)",
1024            (long long)durationUs, (long long)nowUs, numFramesPlayed, (long long)numFramesPlayedAt);
1025    return durationUs;
1026}
1027
1028void NuPlayer::Renderer::onAudioOffloadTearDown() {
1029    int64_t firstAudioTimeUs;
1030    {
1031        Mutex::Autolock autoLock(mLock);
1032        firstAudioTimeUs = mFirstAnchorTimeMediaUs;
1033    }
1034
1035    int64_t currentPositionUs =
1036        firstAudioTimeUs + getPlayedOutAudioDurationUs(ALooper::GetNowUs());
1037
1038    mAudioSink->stop();
1039    mAudioSink->flush();
1040
1041    sp<AMessage> notify = mNotify->dup();
1042    notify->setInt32("what", kWhatAudioOffloadTearDown);
1043    notify->setInt64("positionUs", currentPositionUs);
1044    notify->post();
1045}
1046
1047}  // namespace android
1048
1049