NuPlayerRenderer.cpp revision 06ad1528e6dd4c866c085d3cad9235d2752eb3ed
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "NuPlayerRenderer"
19#include <utils/Log.h>
20
21#include "NuPlayerRenderer.h"
22
23#include <media/stagefright/foundation/ABuffer.h>
24#include <media/stagefright/foundation/ADebug.h>
25#include <media/stagefright/foundation/AMessage.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28
29#include <inttypes.h>
30
31namespace android {
32
33// static
34const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
35
36NuPlayer::Renderer::Renderer(
37        const sp<MediaPlayerBase::AudioSink> &sink,
38        const sp<AMessage> &notify,
39        uint32_t flags)
40    : mAudioSink(sink),
41      mNotify(notify),
42      mFlags(flags),
43      mNumFramesWritten(0),
44      mDrainAudioQueuePending(false),
45      mDrainVideoQueuePending(false),
46      mAudioQueueGeneration(0),
47      mVideoQueueGeneration(0),
48      mFirstAudioTimeUs(-1),
49      mAnchorTimeMediaUs(-1),
50      mAnchorTimeRealUs(-1),
51      mFlushingAudio(false),
52      mFlushingVideo(false),
53      mHasAudio(false),
54      mHasVideo(false),
55      mSyncQueues(false),
56      mPaused(false),
57      mVideoRenderingStarted(false),
58      mVideoRenderingStartGeneration(0),
59      mAudioRenderingStartGeneration(0),
60      mLastPositionUpdateUs(-1ll),
61      mVideoLateByUs(0ll),
62      mVideoSampleReceived(false) {
63}
64
65NuPlayer::Renderer::~Renderer() {
66    if (offloadingAudio()) {
67        mAudioSink->stop();
68        mAudioSink->flush();
69        mAudioSink->close();
70    }
71}
72
73void NuPlayer::Renderer::queueBuffer(
74        bool audio,
75        const sp<ABuffer> &buffer,
76        const sp<AMessage> &notifyConsumed) {
77    sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
78    msg->setInt32("audio", static_cast<int32_t>(audio));
79    msg->setBuffer("buffer", buffer);
80    msg->setMessage("notifyConsumed", notifyConsumed);
81    msg->post();
82}
83
84void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
85    CHECK_NE(finalResult, (status_t)OK);
86
87    sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
88    msg->setInt32("audio", static_cast<int32_t>(audio));
89    msg->setInt32("finalResult", finalResult);
90    msg->post();
91}
92
93void NuPlayer::Renderer::flush(bool audio) {
94    {
95        Mutex::Autolock autoLock(mFlushLock);
96        if (audio) {
97            if (mFlushingAudio) {
98                return;
99            }
100            mFlushingAudio = true;
101        } else {
102            if (mFlushingVideo) {
103                return;
104            }
105            mFlushingVideo = true;
106        }
107    }
108
109    sp<AMessage> msg = new AMessage(kWhatFlush, id());
110    msg->setInt32("audio", static_cast<int32_t>(audio));
111    msg->post();
112}
113
114void NuPlayer::Renderer::signalTimeDiscontinuity() {
115    Mutex::Autolock autoLock(mLock);
116    // CHECK(mAudioQueue.empty());
117    // CHECK(mVideoQueue.empty());
118    mAnchorTimeMediaUs = -1;
119    mAnchorTimeRealUs = -1;
120    mSyncQueues = false;
121}
122
123void NuPlayer::Renderer::signalAudioSinkChanged() {
124    (new AMessage(kWhatAudioSinkChanged, id()))->post();
125}
126
127void NuPlayer::Renderer::signalDisableOffloadAudio() {
128    (new AMessage(kWhatDisableOffloadAudio, id()))->post();
129}
130
131void NuPlayer::Renderer::pause() {
132    (new AMessage(kWhatPause, id()))->post();
133}
134
135void NuPlayer::Renderer::resume() {
136    (new AMessage(kWhatResume, id()))->post();
137}
138
139void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
140    switch (msg->what()) {
141        case kWhatStopAudioSink:
142        {
143            mAudioSink->stop();
144            break;
145        }
146
147        case kWhatDrainAudioQueue:
148        {
149            int32_t generation;
150            CHECK(msg->findInt32("generation", &generation));
151            if (generation != mAudioQueueGeneration) {
152                break;
153            }
154
155            mDrainAudioQueuePending = false;
156
157            if (onDrainAudioQueue()) {
158                uint32_t numFramesPlayed;
159                CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
160                         (status_t)OK);
161
162                uint32_t numFramesPendingPlayout =
163                    mNumFramesWritten - numFramesPlayed;
164
165                // This is how long the audio sink will have data to
166                // play back.
167                int64_t delayUs =
168                    mAudioSink->msecsPerFrame()
169                        * numFramesPendingPlayout * 1000ll;
170
171                // Let's give it more data after about half that time
172                // has elapsed.
173                // kWhatDrainAudioQueue is used for non-offloading mode,
174                // and mLock is used only for offloading mode. Therefore,
175                // no need to acquire mLock here.
176                postDrainAudioQueue_l(delayUs / 2);
177            }
178            break;
179        }
180
181        case kWhatDrainVideoQueue:
182        {
183            int32_t generation;
184            CHECK(msg->findInt32("generation", &generation));
185            if (generation != mVideoQueueGeneration) {
186                break;
187            }
188
189            mDrainVideoQueuePending = false;
190
191            onDrainVideoQueue();
192
193            postDrainVideoQueue();
194            break;
195        }
196
197        case kWhatQueueBuffer:
198        {
199            onQueueBuffer(msg);
200            break;
201        }
202
203        case kWhatQueueEOS:
204        {
205            onQueueEOS(msg);
206            break;
207        }
208
209        case kWhatFlush:
210        {
211            onFlush(msg);
212            break;
213        }
214
215        case kWhatAudioSinkChanged:
216        {
217            onAudioSinkChanged();
218            break;
219        }
220
221        case kWhatDisableOffloadAudio:
222        {
223            onDisableOffloadAudio();
224            break;
225        }
226
227        case kWhatPause:
228        {
229            onPause();
230            break;
231        }
232
233        case kWhatResume:
234        {
235            onResume();
236            break;
237        }
238
239        case kWhatAudioOffloadTearDown:
240        {
241            onAudioOffloadTearDown();
242            break;
243        }
244
245        default:
246            TRESPASS();
247            break;
248    }
249}
250
251void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
252    if (mDrainAudioQueuePending || mSyncQueues || mPaused
253            || offloadingAudio()) {
254        return;
255    }
256
257    if (mAudioQueue.empty()) {
258        return;
259    }
260
261    mDrainAudioQueuePending = true;
262    sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
263    msg->setInt32("generation", mAudioQueueGeneration);
264    msg->post(delayUs);
265}
266
267void NuPlayer::Renderer::prepareForMediaRenderingStart() {
268    mAudioRenderingStartGeneration = mAudioQueueGeneration;
269    mVideoRenderingStartGeneration = mVideoQueueGeneration;
270}
271
272void NuPlayer::Renderer::notifyIfMediaRenderingStarted() {
273    if (mVideoRenderingStartGeneration == mVideoQueueGeneration &&
274        mAudioRenderingStartGeneration == mAudioQueueGeneration) {
275        mVideoRenderingStartGeneration = -1;
276        mAudioRenderingStartGeneration = -1;
277
278        sp<AMessage> notify = mNotify->dup();
279        notify->setInt32("what", kWhatMediaRenderingStart);
280        notify->post();
281    }
282}
283
284// static
285size_t NuPlayer::Renderer::AudioSinkCallback(
286        MediaPlayerBase::AudioSink * /* audioSink */,
287        void *buffer,
288        size_t size,
289        void *cookie,
290        MediaPlayerBase::AudioSink::cb_event_t event) {
291    NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
292
293    switch (event) {
294        case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
295        {
296            return me->fillAudioBuffer(buffer, size);
297            break;
298        }
299
300        case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
301        {
302            me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM);
303            break;
304        }
305
306        case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
307        {
308            me->notifyAudioOffloadTearDown();
309            break;
310        }
311    }
312
313    return 0;
314}
315
316size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
317    Mutex::Autolock autoLock(mLock);
318
319    if (!offloadingAudio() || mPaused) {
320        return 0;
321    }
322
323    bool hasEOS = false;
324
325    size_t sizeCopied = 0;
326    bool firstEntry = true;
327    while (sizeCopied < size && !mAudioQueue.empty()) {
328        QueueEntry *entry = &*mAudioQueue.begin();
329
330        if (entry->mBuffer == NULL) { // EOS
331            hasEOS = true;
332            mAudioQueue.erase(mAudioQueue.begin());
333            entry = NULL;
334            break;
335        }
336
337        if (firstEntry && entry->mOffset == 0) {
338            firstEntry = false;
339            int64_t mediaTimeUs;
340            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
341            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
342            if (mFirstAudioTimeUs == -1) {
343                mFirstAudioTimeUs = mediaTimeUs;
344            }
345
346            // TODO: figure out how to calculate initial latency if
347            // getTimestamp is not available. Otherwise, the initial time
348            // is not correct till the first sample is played.
349            int64_t nowUs = ALooper::GetNowUs();
350            mAnchorTimeMediaUs =
351                mFirstAudioTimeUs + getPlayedOutAudioDurationUs(nowUs);
352            mAnchorTimeRealUs = nowUs;
353        }
354
355        size_t copy = entry->mBuffer->size() - entry->mOffset;
356        size_t sizeRemaining = size - sizeCopied;
357        if (copy > sizeRemaining) {
358            copy = sizeRemaining;
359        }
360
361        memcpy((char *)buffer + sizeCopied,
362               entry->mBuffer->data() + entry->mOffset,
363               copy);
364
365        entry->mOffset += copy;
366        if (entry->mOffset == entry->mBuffer->size()) {
367            entry->mNotifyConsumed->post();
368            mAudioQueue.erase(mAudioQueue.begin());
369            entry = NULL;
370        }
371        sizeCopied += copy;
372        notifyIfMediaRenderingStarted();
373    }
374
375    if (sizeCopied != 0) {
376        notifyPosition();
377    }
378
379    if (hasEOS) {
380        (new AMessage(kWhatStopAudioSink, id()))->post();
381    }
382
383    return sizeCopied;
384}
385
386bool NuPlayer::Renderer::onDrainAudioQueue() {
387    uint32_t numFramesPlayed;
388    if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
389        return false;
390    }
391
392    ssize_t numFramesAvailableToWrite =
393        mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
394
395#if 0
396    if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
397        ALOGI("audio sink underrun");
398    } else {
399        ALOGV("audio queue has %d frames left to play",
400             mAudioSink->frameCount() - numFramesAvailableToWrite);
401    }
402#endif
403
404    size_t numBytesAvailableToWrite =
405        numFramesAvailableToWrite * mAudioSink->frameSize();
406
407    while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) {
408        QueueEntry *entry = &*mAudioQueue.begin();
409
410        if (entry->mBuffer == NULL) {
411            // EOS
412            int64_t postEOSDelayUs = 0;
413            if (mAudioSink->needsTrailingPadding()) {
414                postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
415            }
416            notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
417
418            mAudioQueue.erase(mAudioQueue.begin());
419            entry = NULL;
420            return false;
421        }
422
423        if (entry->mOffset == 0) {
424            int64_t mediaTimeUs;
425            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
426            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
427            mAnchorTimeMediaUs = mediaTimeUs;
428
429            int64_t nowUs = ALooper::GetNowUs();
430            mAnchorTimeRealUs = nowUs + getPendingAudioPlayoutDurationUs(nowUs);
431        }
432
433        size_t copy = entry->mBuffer->size() - entry->mOffset;
434        if (copy > numBytesAvailableToWrite) {
435            copy = numBytesAvailableToWrite;
436        }
437
438        ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy);
439        if (written < 0) {
440            // An error in AudioSink write is fatal here.
441            LOG_ALWAYS_FATAL("AudioSink write error(%zd) when writing %zu bytes", written, copy);
442        }
443
444        entry->mOffset += written;
445        if (entry->mOffset == entry->mBuffer->size()) {
446            entry->mNotifyConsumed->post();
447            mAudioQueue.erase(mAudioQueue.begin());
448
449            entry = NULL;
450        }
451
452        numBytesAvailableToWrite -= written;
453        size_t copiedFrames = written / mAudioSink->frameSize();
454        mNumFramesWritten += copiedFrames;
455
456        notifyIfMediaRenderingStarted();
457
458        if (written != (ssize_t)copy) {
459            // A short count was received from AudioSink::write()
460            //
461            // AudioSink write should block until exactly the number of bytes are delivered.
462            // But it may return with a short count (without an error) when:
463            //
464            // 1) Size to be copied is not a multiple of the frame size. We consider this fatal.
465            // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
466
467            // (Case 1)
468            // Must be a multiple of the frame size.  If it is not a multiple of a frame size, it
469            // needs to fail, as we should not carry over fractional frames between calls.
470            CHECK_EQ(copy % mAudioSink->frameSize(), 0);
471
472            // (Case 2)
473            // Return early to the caller.
474            // Beware of calling immediately again as this may busy-loop if you are not careful.
475            ALOGW("AudioSink write short frame count %zd < %zu", written, copy);
476            break;
477        }
478    }
479    notifyPosition();
480
481    return !mAudioQueue.empty();
482}
483
484int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
485    int64_t writtenAudioDurationUs =
486        mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame();
487    return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs);
488}
489
490void NuPlayer::Renderer::postDrainVideoQueue() {
491    if (mDrainVideoQueuePending
492            || mSyncQueues
493            || (mPaused && mVideoSampleReceived)) {
494        return;
495    }
496
497    if (mVideoQueue.empty()) {
498        return;
499    }
500
501    QueueEntry &entry = *mVideoQueue.begin();
502
503    sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
504    msg->setInt32("generation", mVideoQueueGeneration);
505
506    int64_t delayUs;
507
508    if (entry.mBuffer == NULL) {
509        // EOS doesn't carry a timestamp.
510        delayUs = 0;
511    } else if (mFlags & FLAG_REAL_TIME) {
512        int64_t mediaTimeUs;
513        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
514
515        delayUs = mediaTimeUs - ALooper::GetNowUs();
516    } else {
517        int64_t mediaTimeUs;
518        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
519
520        if (mAnchorTimeMediaUs < 0) {
521            delayUs = 0;
522
523            if (!mHasAudio) {
524                mAnchorTimeMediaUs = mediaTimeUs;
525                mAnchorTimeRealUs = ALooper::GetNowUs();
526            }
527        } else {
528            int64_t realTimeUs =
529                (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;
530
531            delayUs = realTimeUs - ALooper::GetNowUs();
532        }
533    }
534
535    ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs);
536    msg->post(delayUs);
537
538    mDrainVideoQueuePending = true;
539}
540
541void NuPlayer::Renderer::onDrainVideoQueue() {
542    if (mVideoQueue.empty()) {
543        return;
544    }
545
546    QueueEntry *entry = &*mVideoQueue.begin();
547
548    if (entry->mBuffer == NULL) {
549        // EOS
550
551        notifyEOS(false /* audio */, entry->mFinalResult);
552
553        mVideoQueue.erase(mVideoQueue.begin());
554        entry = NULL;
555
556        mVideoLateByUs = 0ll;
557
558        notifyPosition();
559        return;
560    }
561
562    int64_t realTimeUs;
563    if (mFlags & FLAG_REAL_TIME) {
564        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
565    } else {
566        int64_t mediaTimeUs;
567        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
568
569        realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs;
570    }
571
572    bool tooLate = false;
573
574    if (!mPaused) {
575        mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;
576        tooLate = (mVideoLateByUs > 40000);
577
578        if (tooLate) {
579            ALOGV("video late by %lld us (%.2f secs)",
580                 mVideoLateByUs, mVideoLateByUs / 1E6);
581        } else {
582            ALOGV("rendering video at media time %.2f secs",
583                    (mFlags & FLAG_REAL_TIME ? realTimeUs :
584                    (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
585        }
586    } else {
587        mVideoLateByUs = 0ll;
588    }
589
590    entry->mNotifyConsumed->setInt32("render", !tooLate);
591    entry->mNotifyConsumed->post();
592    mVideoQueue.erase(mVideoQueue.begin());
593    entry = NULL;
594
595    mVideoSampleReceived = true;
596
597    if (!mPaused) {
598        if (!mVideoRenderingStarted) {
599            mVideoRenderingStarted = true;
600            notifyVideoRenderingStart();
601        }
602        notifyIfMediaRenderingStarted();
603    }
604
605    notifyPosition();
606}
607
608void NuPlayer::Renderer::notifyVideoRenderingStart() {
609    sp<AMessage> notify = mNotify->dup();
610    notify->setInt32("what", kWhatVideoRenderingStart);
611    notify->post();
612}
613
614void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) {
615    sp<AMessage> notify = mNotify->dup();
616    notify->setInt32("what", kWhatEOS);
617    notify->setInt32("audio", static_cast<int32_t>(audio));
618    notify->setInt32("finalResult", finalResult);
619    notify->post(delayUs);
620}
621
622void NuPlayer::Renderer::notifyAudioOffloadTearDown() {
623    (new AMessage(kWhatAudioOffloadTearDown, id()))->post();
624}
625
626void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
627    int32_t audio;
628    CHECK(msg->findInt32("audio", &audio));
629
630    if (audio) {
631        mHasAudio = true;
632    } else {
633        mHasVideo = true;
634    }
635
636    if (dropBufferWhileFlushing(audio, msg)) {
637        return;
638    }
639
640    sp<ABuffer> buffer;
641    CHECK(msg->findBuffer("buffer", &buffer));
642
643    sp<AMessage> notifyConsumed;
644    CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
645
646    QueueEntry entry;
647    entry.mBuffer = buffer;
648    entry.mNotifyConsumed = notifyConsumed;
649    entry.mOffset = 0;
650    entry.mFinalResult = OK;
651
652    if (audio) {
653        Mutex::Autolock autoLock(mLock);
654        mAudioQueue.push_back(entry);
655        postDrainAudioQueue_l();
656    } else {
657        mVideoQueue.push_back(entry);
658        postDrainVideoQueue();
659    }
660
661    Mutex::Autolock autoLock(mLock);
662    if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
663        return;
664    }
665
666    sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
667    sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
668
669    if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
670        // EOS signalled on either queue.
671        syncQueuesDone_l();
672        return;
673    }
674
675    int64_t firstAudioTimeUs;
676    int64_t firstVideoTimeUs;
677    CHECK(firstAudioBuffer->meta()
678            ->findInt64("timeUs", &firstAudioTimeUs));
679    CHECK(firstVideoBuffer->meta()
680            ->findInt64("timeUs", &firstVideoTimeUs));
681
682    int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
683
684    ALOGV("queueDiff = %.2f secs", diff / 1E6);
685
686    if (diff > 100000ll) {
687        // Audio data starts More than 0.1 secs before video.
688        // Drop some audio.
689
690        (*mAudioQueue.begin()).mNotifyConsumed->post();
691        mAudioQueue.erase(mAudioQueue.begin());
692        return;
693    }
694
695    syncQueuesDone_l();
696}
697
698void NuPlayer::Renderer::syncQueuesDone_l() {
699    if (!mSyncQueues) {
700        return;
701    }
702
703    mSyncQueues = false;
704
705    if (!mAudioQueue.empty()) {
706        postDrainAudioQueue_l();
707    }
708
709    if (!mVideoQueue.empty()) {
710        postDrainVideoQueue();
711    }
712}
713
714void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
715    int32_t audio;
716    CHECK(msg->findInt32("audio", &audio));
717
718    if (dropBufferWhileFlushing(audio, msg)) {
719        return;
720    }
721
722    int32_t finalResult;
723    CHECK(msg->findInt32("finalResult", &finalResult));
724
725    QueueEntry entry;
726    entry.mOffset = 0;
727    entry.mFinalResult = finalResult;
728
729    if (audio) {
730        Mutex::Autolock autoLock(mLock);
731        if (mAudioQueue.empty() && mSyncQueues) {
732            syncQueuesDone_l();
733        }
734        mAudioQueue.push_back(entry);
735        postDrainAudioQueue_l();
736    } else {
737        if (mVideoQueue.empty() && mSyncQueues) {
738            Mutex::Autolock autoLock(mLock);
739            syncQueuesDone_l();
740        }
741        mVideoQueue.push_back(entry);
742        postDrainVideoQueue();
743    }
744}
745
746void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
747    int32_t audio;
748    CHECK(msg->findInt32("audio", &audio));
749
750    {
751        Mutex::Autolock autoLock(mFlushLock);
752        if (audio) {
753            mFlushingAudio = false;
754        } else {
755            mFlushingVideo = false;
756        }
757    }
758
759    // If we're currently syncing the queues, i.e. dropping audio while
760    // aligning the first audio/video buffer times and only one of the
761    // two queues has data, we may starve that queue by not requesting
762    // more buffers from the decoder. If the other source then encounters
763    // a discontinuity that leads to flushing, we'll never find the
764    // corresponding discontinuity on the other queue.
765    // Therefore we'll stop syncing the queues if at least one of them
766    // is flushed.
767    {
768         Mutex::Autolock autoLock(mLock);
769         syncQueuesDone_l();
770    }
771
772    ALOGV("flushing %s", audio ? "audio" : "video");
773    if (audio) {
774        {
775            Mutex::Autolock autoLock(mLock);
776            flushQueue(&mAudioQueue);
777
778            ++mAudioQueueGeneration;
779            prepareForMediaRenderingStart();
780
781            if (offloadingAudio()) {
782                mFirstAudioTimeUs = -1;
783            }
784        }
785
786        mDrainAudioQueuePending = false;
787
788        if (offloadingAudio()) {
789            mAudioSink->pause();
790            mAudioSink->flush();
791            mAudioSink->start();
792        }
793    } else {
794        flushQueue(&mVideoQueue);
795
796        mDrainVideoQueuePending = false;
797        ++mVideoQueueGeneration;
798
799        prepareForMediaRenderingStart();
800    }
801
802    mVideoSampleReceived = false;
803    notifyFlushComplete(audio);
804}
805
806void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
807    while (!queue->empty()) {
808        QueueEntry *entry = &*queue->begin();
809
810        if (entry->mBuffer != NULL) {
811            entry->mNotifyConsumed->post();
812        }
813
814        queue->erase(queue->begin());
815        entry = NULL;
816    }
817}
818
819void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
820    sp<AMessage> notify = mNotify->dup();
821    notify->setInt32("what", kWhatFlushComplete);
822    notify->setInt32("audio", static_cast<int32_t>(audio));
823    notify->post();
824}
825
826bool NuPlayer::Renderer::dropBufferWhileFlushing(
827        bool audio, const sp<AMessage> &msg) {
828    bool flushing = false;
829
830    {
831        Mutex::Autolock autoLock(mFlushLock);
832        if (audio) {
833            flushing = mFlushingAudio;
834        } else {
835            flushing = mFlushingVideo;
836        }
837    }
838
839    if (!flushing) {
840        return false;
841    }
842
843    sp<AMessage> notifyConsumed;
844    if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
845        notifyConsumed->post();
846    }
847
848    return true;
849}
850
851void NuPlayer::Renderer::onAudioSinkChanged() {
852    if (offloadingAudio()) {
853        return;
854    }
855    CHECK(!mDrainAudioQueuePending);
856    mNumFramesWritten = 0;
857    uint32_t written;
858    if (mAudioSink->getFramesWritten(&written) == OK) {
859        mNumFramesWritten = written;
860    }
861}
862
863void NuPlayer::Renderer::onDisableOffloadAudio() {
864    Mutex::Autolock autoLock(mLock);
865    mFlags &= ~FLAG_OFFLOAD_AUDIO;
866    ++mAudioQueueGeneration;
867}
868
869void NuPlayer::Renderer::notifyPosition() {
870    if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) {
871        return;
872    }
873
874    int64_t nowUs = ALooper::GetNowUs();
875
876    if (mLastPositionUpdateUs >= 0
877            && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) {
878        return;
879    }
880    mLastPositionUpdateUs = nowUs;
881
882    int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
883
884    sp<AMessage> notify = mNotify->dup();
885    notify->setInt32("what", kWhatPosition);
886    notify->setInt64("positionUs", positionUs);
887    notify->setInt64("videoLateByUs", mVideoLateByUs);
888    notify->post();
889}
890
891void NuPlayer::Renderer::onPause() {
892    if (mPaused) {
893        ALOGW("Renderer::onPause() called while already paused!");
894        return;
895    }
896    {
897        Mutex::Autolock autoLock(mLock);
898        ++mAudioQueueGeneration;
899        ++mVideoQueueGeneration;
900        prepareForMediaRenderingStart();
901        mPaused = true;
902    }
903
904    mDrainAudioQueuePending = false;
905    mDrainVideoQueuePending = false;
906
907    if (mHasAudio) {
908        mAudioSink->pause();
909    }
910
911    ALOGV("now paused audio queue has %d entries, video has %d entries",
912          mAudioQueue.size(), mVideoQueue.size());
913}
914
915void NuPlayer::Renderer::onResume() {
916    if (!mPaused) {
917        return;
918    }
919
920    if (mHasAudio) {
921        mAudioSink->start();
922    }
923
924    Mutex::Autolock autoLock(mLock);
925    mPaused = false;
926
927    if (!mAudioQueue.empty()) {
928        postDrainAudioQueue_l();
929    }
930
931    if (!mVideoQueue.empty()) {
932        postDrainVideoQueue();
933    }
934}
935
936int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) {
937    // FIXME: getTimestamp sometimes returns negative frame count.
938    // Since we do not handle the rollover at this point (which can
939    // happen every 14 hours), simply treat the timestamp as signed.
940    uint32_t numFramesPlayed;
941    int64_t numFramesPlayedAt;
942    AudioTimestamp ts;
943    status_t res = mAudioSink->getTimestamp(ts);
944    if (res == OK) {
945        numFramesPlayed = ts.mPosition;
946        numFramesPlayedAt =
947            ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000;
948    } else {
949        res = mAudioSink->getPosition(&numFramesPlayed);
950        CHECK_EQ(res, (status_t)OK);
951        numFramesPlayedAt = nowUs;
952        numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */
953    }
954    return (int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame()
955            + nowUs - numFramesPlayedAt;
956}
957
958void NuPlayer::Renderer::onAudioOffloadTearDown() {
959    int64_t firstAudioTimeUs;
960    {
961        Mutex::Autolock autoLock(mLock);
962        firstAudioTimeUs = mFirstAudioTimeUs;
963    }
964
965    int64_t currentPositionUs =
966        firstAudioTimeUs + getPlayedOutAudioDurationUs(ALooper::GetNowUs());
967
968    mAudioSink->stop();
969    mAudioSink->flush();
970
971    sp<AMessage> notify = mNotify->dup();
972    notify->setInt32("what", kWhatAudioOffloadTearDown);
973    notify->setInt64("positionUs", currentPositionUs);
974    notify->post();
975}
976
977}  // namespace android
978
979