NuPlayerRenderer.cpp revision 3e5efb37308aa1f54c2a72cd8a7a73d2d7921a90
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "NuPlayerRenderer"
19#include <utils/Log.h>
20
21#include "NuPlayerRenderer.h"
22
23#include <media/stagefright/foundation/ABuffer.h>
24#include <media/stagefright/foundation/ADebug.h>
25#include <media/stagefright/foundation/AMessage.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28
29#include <inttypes.h>
30
31namespace android {
32
33// static
34const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
35
36NuPlayer::Renderer::Renderer(
37        const sp<MediaPlayerBase::AudioSink> &sink,
38        const sp<AMessage> &notify,
39        uint32_t flags)
40    : mAudioSink(sink),
41      mNotify(notify),
42      mFlags(flags),
43      mNumFramesWritten(0),
44      mDrainAudioQueuePending(false),
45      mDrainVideoQueuePending(false),
46      mAudioQueueGeneration(0),
47      mVideoQueueGeneration(0),
48      mFirstAudioTimeUs(-1),
49      mAnchorTimeMediaUs(-1),
50      mAnchorTimeRealUs(-1),
51      mFlushingAudio(false),
52      mFlushingVideo(false),
53      mHasAudio(false),
54      mHasVideo(false),
55      mSyncQueues(false),
56      mPaused(false),
57      mVideoRenderingStarted(false),
58      mVideoRenderingStartGeneration(0),
59      mAudioRenderingStartGeneration(0),
60      mLastPositionUpdateUs(-1ll),
61      mVideoLateByUs(0ll) {
62}
63
64NuPlayer::Renderer::~Renderer() {
65    if (offloadingAudio()) {
66        mAudioSink->stop();
67        mAudioSink->flush();
68        mAudioSink->close();
69    }
70}
71
72void NuPlayer::Renderer::queueBuffer(
73        bool audio,
74        const sp<ABuffer> &buffer,
75        const sp<AMessage> &notifyConsumed) {
76    sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
77    msg->setInt32("audio", static_cast<int32_t>(audio));
78    msg->setBuffer("buffer", buffer);
79    msg->setMessage("notifyConsumed", notifyConsumed);
80    msg->post();
81}
82
83void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
84    CHECK_NE(finalResult, (status_t)OK);
85
86    sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
87    msg->setInt32("audio", static_cast<int32_t>(audio));
88    msg->setInt32("finalResult", finalResult);
89    msg->post();
90}
91
92void NuPlayer::Renderer::flush(bool audio) {
93    {
94        Mutex::Autolock autoLock(mFlushLock);
95        if (audio) {
96            CHECK(!mFlushingAudio);
97            mFlushingAudio = true;
98        } else {
99            CHECK(!mFlushingVideo);
100            mFlushingVideo = true;
101        }
102    }
103
104    sp<AMessage> msg = new AMessage(kWhatFlush, id());
105    msg->setInt32("audio", static_cast<int32_t>(audio));
106    msg->post();
107}
108
109void NuPlayer::Renderer::signalTimeDiscontinuity() {
110    Mutex::Autolock autoLock(mLock);
111    // CHECK(mAudioQueue.empty());
112    // CHECK(mVideoQueue.empty());
113    mAnchorTimeMediaUs = -1;
114    mAnchorTimeRealUs = -1;
115    mSyncQueues = false;
116}
117
118void NuPlayer::Renderer::pause() {
119    (new AMessage(kWhatPause, id()))->post();
120}
121
122void NuPlayer::Renderer::resume() {
123    (new AMessage(kWhatResume, id()))->post();
124}
125
126void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
127    switch (msg->what()) {
128        case kWhatStopAudioSink:
129        {
130            mAudioSink->stop();
131            break;
132        }
133
134        case kWhatDrainAudioQueue:
135        {
136            int32_t generation;
137            CHECK(msg->findInt32("generation", &generation));
138            if (generation != mAudioQueueGeneration) {
139                break;
140            }
141
142            mDrainAudioQueuePending = false;
143
144            if (onDrainAudioQueue()) {
145                uint32_t numFramesPlayed;
146                CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
147                         (status_t)OK);
148
149                uint32_t numFramesPendingPlayout =
150                    mNumFramesWritten - numFramesPlayed;
151
152                // This is how long the audio sink will have data to
153                // play back.
154                int64_t delayUs =
155                    mAudioSink->msecsPerFrame()
156                        * numFramesPendingPlayout * 1000ll;
157
158                // Let's give it more data after about half that time
159                // has elapsed.
160                // kWhatDrainAudioQueue is used for non-offloading mode,
161                // and mLock is used only for offloading mode. Therefore,
162                // no need to acquire mLock here.
163                postDrainAudioQueue_l(delayUs / 2);
164            }
165            break;
166        }
167
168        case kWhatDrainVideoQueue:
169        {
170            int32_t generation;
171            CHECK(msg->findInt32("generation", &generation));
172            if (generation != mVideoQueueGeneration) {
173                break;
174            }
175
176            mDrainVideoQueuePending = false;
177
178            onDrainVideoQueue();
179
180            postDrainVideoQueue();
181            break;
182        }
183
184        case kWhatQueueBuffer:
185        {
186            onQueueBuffer(msg);
187            break;
188        }
189
190        case kWhatQueueEOS:
191        {
192            onQueueEOS(msg);
193            break;
194        }
195
196        case kWhatFlush:
197        {
198            onFlush(msg);
199            break;
200        }
201
202        case kWhatAudioSinkChanged:
203        {
204            onAudioSinkChanged();
205            break;
206        }
207
208        case kWhatDisableOffloadAudio:
209        {
210            onDisableOffloadAudio();
211            break;
212        }
213
214        case kWhatPause:
215        {
216            onPause();
217            break;
218        }
219
220        case kWhatResume:
221        {
222            onResume();
223            break;
224        }
225
226        case kWhatAudioOffloadTearDown:
227        {
228            onAudioOffloadTearDown();
229            break;
230        }
231
232        default:
233            TRESPASS();
234            break;
235    }
236}
237
238void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
239    if (mDrainAudioQueuePending || mSyncQueues || mPaused
240            || offloadingAudio()) {
241        return;
242    }
243
244    if (mAudioQueue.empty()) {
245        return;
246    }
247
248    mDrainAudioQueuePending = true;
249    sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
250    msg->setInt32("generation", mAudioQueueGeneration);
251    msg->post(delayUs);
252}
253
254void NuPlayer::Renderer::signalAudioSinkChanged() {
255    (new AMessage(kWhatAudioSinkChanged, id()))->post();
256}
257
258void NuPlayer::Renderer::signalDisableOffloadAudio() {
259    (new AMessage(kWhatDisableOffloadAudio, id()))->post();
260}
261
262void NuPlayer::Renderer::prepareForMediaRenderingStart() {
263    mAudioRenderingStartGeneration = mAudioQueueGeneration;
264    mVideoRenderingStartGeneration = mVideoQueueGeneration;
265}
266
267void NuPlayer::Renderer::notifyIfMediaRenderingStarted() {
268    if (mVideoRenderingStartGeneration == mVideoQueueGeneration &&
269        mAudioRenderingStartGeneration == mAudioQueueGeneration) {
270        mVideoRenderingStartGeneration = -1;
271        mAudioRenderingStartGeneration = -1;
272
273        sp<AMessage> notify = mNotify->dup();
274        notify->setInt32("what", kWhatMediaRenderingStart);
275        notify->post();
276    }
277}
278
279// static
280size_t NuPlayer::Renderer::AudioSinkCallback(
281        MediaPlayerBase::AudioSink * /* audioSink */,
282        void *buffer,
283        size_t size,
284        void *cookie,
285        MediaPlayerBase::AudioSink::cb_event_t event) {
286    NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
287
288    switch (event) {
289        case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
290        {
291            return me->fillAudioBuffer(buffer, size);
292            break;
293        }
294
295        case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
296        {
297            me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM);
298            break;
299        }
300
301        case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
302        {
303            me->notifyAudioOffloadTearDown();
304            break;
305        }
306    }
307
308    return 0;
309}
310
311size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
312    Mutex::Autolock autoLock(mLock);
313
314    if (!offloadingAudio()) {
315        return 0;
316    }
317
318    bool hasEOS = false;
319
320    size_t sizeCopied = 0;
321    bool firstEntry = true;
322    while (sizeCopied < size && !mAudioQueue.empty()) {
323        QueueEntry *entry = &*mAudioQueue.begin();
324
325        if (entry->mBuffer == NULL) { // EOS
326            hasEOS = true;
327            mAudioQueue.erase(mAudioQueue.begin());
328            entry = NULL;
329            break;
330        }
331
332        if (firstEntry && entry->mOffset == 0) {
333            firstEntry = false;
334            int64_t mediaTimeUs;
335            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
336            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
337            if (mFirstAudioTimeUs == -1) {
338                mFirstAudioTimeUs = mediaTimeUs;
339            }
340
341            uint32_t numFramesPlayed;
342            CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
343
344            // TODO: figure out how to calculate initial latency.
345            // Otherwise, the initial time is not correct till the first sample
346            // is played.
347            mAnchorTimeMediaUs = mFirstAudioTimeUs
348                    + (numFramesPlayed * mAudioSink->msecsPerFrame()) * 1000ll;
349            mAnchorTimeRealUs = ALooper::GetNowUs();
350        }
351
352        size_t copy = entry->mBuffer->size() - entry->mOffset;
353        size_t sizeRemaining = size - sizeCopied;
354        if (copy > sizeRemaining) {
355            copy = sizeRemaining;
356        }
357
358        memcpy((char *)buffer + sizeCopied,
359               entry->mBuffer->data() + entry->mOffset,
360               copy);
361
362        entry->mOffset += copy;
363        if (entry->mOffset == entry->mBuffer->size()) {
364            entry->mNotifyConsumed->post();
365            mAudioQueue.erase(mAudioQueue.begin());
366            entry = NULL;
367        }
368        sizeCopied += copy;
369        notifyIfMediaRenderingStarted();
370    }
371
372    if (sizeCopied != 0) {
373        notifyPosition();
374    }
375
376    if (hasEOS) {
377        (new AMessage(kWhatStopAudioSink, id()))->post();
378    }
379
380    return sizeCopied;
381}
382
383bool NuPlayer::Renderer::onDrainAudioQueue() {
384    uint32_t numFramesPlayed;
385    if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
386        return false;
387    }
388
389    ssize_t numFramesAvailableToWrite =
390        mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
391
392#if 0
393    if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
394        ALOGI("audio sink underrun");
395    } else {
396        ALOGV("audio queue has %d frames left to play",
397             mAudioSink->frameCount() - numFramesAvailableToWrite);
398    }
399#endif
400
401    size_t numBytesAvailableToWrite =
402        numFramesAvailableToWrite * mAudioSink->frameSize();
403
404    while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) {
405        QueueEntry *entry = &*mAudioQueue.begin();
406
407        if (entry->mBuffer == NULL) {
408            // EOS
409
410            notifyEOS(true /* audio */, entry->mFinalResult);
411
412            mAudioQueue.erase(mAudioQueue.begin());
413            entry = NULL;
414            return false;
415        }
416
417        if (entry->mOffset == 0) {
418            int64_t mediaTimeUs;
419            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
420
421            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
422
423            mAnchorTimeMediaUs = mediaTimeUs;
424
425            uint32_t numFramesPlayed;
426            CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
427
428            uint32_t numFramesPendingPlayout =
429                mNumFramesWritten - numFramesPlayed;
430
431            int64_t realTimeOffsetUs =
432                (mAudioSink->latency() / 2  /* XXX */
433                    + numFramesPendingPlayout
434                        * mAudioSink->msecsPerFrame()) * 1000ll;
435
436            // ALOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs);
437
438            mAnchorTimeRealUs =
439                ALooper::GetNowUs() + realTimeOffsetUs;
440        }
441
442        size_t copy = entry->mBuffer->size() - entry->mOffset;
443        if (copy > numBytesAvailableToWrite) {
444            copy = numBytesAvailableToWrite;
445        }
446
447        CHECK_EQ(mAudioSink->write(
448                    entry->mBuffer->data() + entry->mOffset, copy),
449                 (ssize_t)copy);
450
451        entry->mOffset += copy;
452        if (entry->mOffset == entry->mBuffer->size()) {
453            entry->mNotifyConsumed->post();
454            mAudioQueue.erase(mAudioQueue.begin());
455
456            entry = NULL;
457        }
458
459        numBytesAvailableToWrite -= copy;
460        size_t copiedFrames = copy / mAudioSink->frameSize();
461        mNumFramesWritten += copiedFrames;
462
463        notifyIfMediaRenderingStarted();
464    }
465
466    notifyPosition();
467
468    return !mAudioQueue.empty();
469}
470
471void NuPlayer::Renderer::postDrainVideoQueue() {
472    if (mDrainVideoQueuePending || mSyncQueues || mPaused) {
473        return;
474    }
475
476    if (mVideoQueue.empty()) {
477        return;
478    }
479
480    QueueEntry &entry = *mVideoQueue.begin();
481
482    sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
483    msg->setInt32("generation", mVideoQueueGeneration);
484
485    int64_t delayUs;
486
487    if (entry.mBuffer == NULL) {
488        // EOS doesn't carry a timestamp.
489        delayUs = 0;
490    } else if (mFlags & FLAG_REAL_TIME) {
491        int64_t mediaTimeUs;
492        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
493
494        delayUs = mediaTimeUs - ALooper::GetNowUs();
495    } else {
496        int64_t mediaTimeUs;
497        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
498
499        if (mAnchorTimeMediaUs < 0) {
500            delayUs = 0;
501
502            if (!mHasAudio) {
503                mAnchorTimeMediaUs = mediaTimeUs;
504                mAnchorTimeRealUs = ALooper::GetNowUs();
505            }
506        } else {
507            int64_t realTimeUs =
508                (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;
509
510            delayUs = realTimeUs - ALooper::GetNowUs();
511        }
512    }
513
514    ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs);
515    msg->post(delayUs);
516
517    mDrainVideoQueuePending = true;
518}
519
520void NuPlayer::Renderer::onDrainVideoQueue() {
521    if (mVideoQueue.empty()) {
522        return;
523    }
524
525    QueueEntry *entry = &*mVideoQueue.begin();
526
527    if (entry->mBuffer == NULL) {
528        // EOS
529
530        notifyEOS(false /* audio */, entry->mFinalResult);
531
532        mVideoQueue.erase(mVideoQueue.begin());
533        entry = NULL;
534
535        mVideoLateByUs = 0ll;
536
537        notifyPosition();
538        return;
539    }
540
541    int64_t realTimeUs;
542    if (mFlags & FLAG_REAL_TIME) {
543        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
544    } else {
545        int64_t mediaTimeUs;
546        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
547
548        realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs;
549    }
550
551    mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;
552    bool tooLate = (mVideoLateByUs > 40000);
553
554    if (tooLate) {
555        ALOGV("video late by %lld us (%.2f secs)",
556             mVideoLateByUs, mVideoLateByUs / 1E6);
557    } else {
558        ALOGV("rendering video at media time %.2f secs",
559                (mFlags & FLAG_REAL_TIME ? realTimeUs :
560                (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
561    }
562
563    entry->mNotifyConsumed->setInt32("render", !tooLate);
564    entry->mNotifyConsumed->post();
565    mVideoQueue.erase(mVideoQueue.begin());
566    entry = NULL;
567
568    if (!mVideoRenderingStarted) {
569        mVideoRenderingStarted = true;
570        notifyVideoRenderingStart();
571    }
572
573    notifyIfMediaRenderingStarted();
574
575    notifyPosition();
576}
577
578void NuPlayer::Renderer::notifyVideoRenderingStart() {
579    sp<AMessage> notify = mNotify->dup();
580    notify->setInt32("what", kWhatVideoRenderingStart);
581    notify->post();
582}
583
584void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult) {
585    sp<AMessage> notify = mNotify->dup();
586    notify->setInt32("what", kWhatEOS);
587    notify->setInt32("audio", static_cast<int32_t>(audio));
588    notify->setInt32("finalResult", finalResult);
589    notify->post();
590}
591
592void NuPlayer::Renderer::notifyAudioOffloadTearDown() {
593    (new AMessage(kWhatAudioOffloadTearDown, id()))->post();
594}
595
596void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
597    int32_t audio;
598    CHECK(msg->findInt32("audio", &audio));
599
600    if (audio) {
601        mHasAudio = true;
602    } else {
603        mHasVideo = true;
604    }
605
606    if (dropBufferWhileFlushing(audio, msg)) {
607        return;
608    }
609
610    sp<ABuffer> buffer;
611    CHECK(msg->findBuffer("buffer", &buffer));
612
613    sp<AMessage> notifyConsumed;
614    CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
615
616    QueueEntry entry;
617    entry.mBuffer = buffer;
618    entry.mNotifyConsumed = notifyConsumed;
619    entry.mOffset = 0;
620    entry.mFinalResult = OK;
621
622    if (audio) {
623        Mutex::Autolock autoLock(mLock);
624        mAudioQueue.push_back(entry);
625        postDrainAudioQueue_l();
626    } else {
627        mVideoQueue.push_back(entry);
628        postDrainVideoQueue();
629    }
630
631    Mutex::Autolock autoLock(mLock);
632    if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
633        return;
634    }
635
636    sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
637    sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
638
639    if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
640        // EOS signalled on either queue.
641        syncQueuesDone_l();
642        return;
643    }
644
645    int64_t firstAudioTimeUs;
646    int64_t firstVideoTimeUs;
647    CHECK(firstAudioBuffer->meta()
648            ->findInt64("timeUs", &firstAudioTimeUs));
649    CHECK(firstVideoBuffer->meta()
650            ->findInt64("timeUs", &firstVideoTimeUs));
651
652    int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
653
654    ALOGV("queueDiff = %.2f secs", diff / 1E6);
655
656    if (diff > 100000ll) {
657        // Audio data starts More than 0.1 secs before video.
658        // Drop some audio.
659
660        (*mAudioQueue.begin()).mNotifyConsumed->post();
661        mAudioQueue.erase(mAudioQueue.begin());
662        return;
663    }
664
665    syncQueuesDone_l();
666}
667
668void NuPlayer::Renderer::syncQueuesDone_l() {
669    if (!mSyncQueues) {
670        return;
671    }
672
673    mSyncQueues = false;
674
675    if (!mAudioQueue.empty()) {
676        postDrainAudioQueue_l();
677    }
678
679    if (!mVideoQueue.empty()) {
680        postDrainVideoQueue();
681    }
682}
683
684void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
685    int32_t audio;
686    CHECK(msg->findInt32("audio", &audio));
687
688    if (dropBufferWhileFlushing(audio, msg)) {
689        return;
690    }
691
692    int32_t finalResult;
693    CHECK(msg->findInt32("finalResult", &finalResult));
694
695    QueueEntry entry;
696    entry.mOffset = 0;
697    entry.mFinalResult = finalResult;
698
699    if (audio) {
700        Mutex::Autolock autoLock(mLock);
701        if (mAudioQueue.empty() && mSyncQueues) {
702            syncQueuesDone_l();
703        }
704        mAudioQueue.push_back(entry);
705        postDrainAudioQueue_l();
706    } else {
707        if (mVideoQueue.empty() && mSyncQueues) {
708            Mutex::Autolock autoLock(mLock);
709            syncQueuesDone_l();
710        }
711        mVideoQueue.push_back(entry);
712        postDrainVideoQueue();
713    }
714}
715
716void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
717    int32_t audio;
718    CHECK(msg->findInt32("audio", &audio));
719
720    // If we're currently syncing the queues, i.e. dropping audio while
721    // aligning the first audio/video buffer times and only one of the
722    // two queues has data, we may starve that queue by not requesting
723    // more buffers from the decoder. If the other source then encounters
724    // a discontinuity that leads to flushing, we'll never find the
725    // corresponding discontinuity on the other queue.
726    // Therefore we'll stop syncing the queues if at least one of them
727    // is flushed.
728    {
729         Mutex::Autolock autoLock(mLock);
730         syncQueuesDone_l();
731    }
732
733    ALOGV("flushing %s", audio ? "audio" : "video");
734    if (audio) {
735        {
736            Mutex::Autolock autoLock(mLock);
737            flushQueue(&mAudioQueue);
738        }
739
740        Mutex::Autolock autoLock(mFlushLock);
741        mFlushingAudio = false;
742
743        mDrainAudioQueuePending = false;
744        ++mAudioQueueGeneration;
745
746        prepareForMediaRenderingStart();
747        if (offloadingAudio()) {
748            mFirstAudioTimeUs = -1;
749            mAudioSink->pause();
750            mAudioSink->flush();
751            mAudioSink->start();
752        }
753    } else {
754        flushQueue(&mVideoQueue);
755
756        Mutex::Autolock autoLock(mFlushLock);
757        mFlushingVideo = false;
758
759        mDrainVideoQueuePending = false;
760        ++mVideoQueueGeneration;
761
762        prepareForMediaRenderingStart();
763    }
764
765    notifyFlushComplete(audio);
766}
767
768void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
769    while (!queue->empty()) {
770        QueueEntry *entry = &*queue->begin();
771
772        if (entry->mBuffer != NULL) {
773            entry->mNotifyConsumed->post();
774        }
775
776        queue->erase(queue->begin());
777        entry = NULL;
778    }
779}
780
781void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
782    sp<AMessage> notify = mNotify->dup();
783    notify->setInt32("what", kWhatFlushComplete);
784    notify->setInt32("audio", static_cast<int32_t>(audio));
785    notify->post();
786}
787
788bool NuPlayer::Renderer::dropBufferWhileFlushing(
789        bool audio, const sp<AMessage> &msg) {
790    bool flushing = false;
791
792    {
793        Mutex::Autolock autoLock(mFlushLock);
794        if (audio) {
795            flushing = mFlushingAudio;
796        } else {
797            flushing = mFlushingVideo;
798        }
799    }
800
801    if (!flushing) {
802        return false;
803    }
804
805    sp<AMessage> notifyConsumed;
806    if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
807        notifyConsumed->post();
808    }
809
810    return true;
811}
812
813void NuPlayer::Renderer::onAudioSinkChanged() {
814    if (offloadingAudio()) {
815        return;
816    }
817    CHECK(!mDrainAudioQueuePending);
818    mNumFramesWritten = 0;
819    uint32_t written;
820    if (mAudioSink->getFramesWritten(&written) == OK) {
821        mNumFramesWritten = written;
822    }
823}
824
825void NuPlayer::Renderer::onDisableOffloadAudio() {
826    Mutex::Autolock autoLock(mLock);
827    mFlags &= ~FLAG_OFFLOAD_AUDIO;
828    ++mAudioQueueGeneration;
829}
830
831void NuPlayer::Renderer::notifyPosition() {
832    if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) {
833        return;
834    }
835
836    int64_t nowUs = ALooper::GetNowUs();
837
838    if (mLastPositionUpdateUs >= 0
839            && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) {
840        return;
841    }
842    mLastPositionUpdateUs = nowUs;
843
844    int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
845
846    sp<AMessage> notify = mNotify->dup();
847    notify->setInt32("what", kWhatPosition);
848    notify->setInt64("positionUs", positionUs);
849    notify->setInt64("videoLateByUs", mVideoLateByUs);
850    notify->post();
851}
852
853void NuPlayer::Renderer::onPause() {
854    CHECK(!mPaused);
855
856    mDrainAudioQueuePending = false;
857    ++mAudioQueueGeneration;
858
859    mDrainVideoQueuePending = false;
860    ++mVideoQueueGeneration;
861
862    prepareForMediaRenderingStart();
863
864    if (mHasAudio) {
865        mAudioSink->pause();
866    }
867
868    ALOGV("now paused audio queue has %d entries, video has %d entries",
869          mAudioQueue.size(), mVideoQueue.size());
870
871    mPaused = true;
872}
873
874void NuPlayer::Renderer::onResume() {
875    if (!mPaused) {
876        return;
877    }
878
879    if (mHasAudio) {
880        mAudioSink->start();
881    }
882
883    mPaused = false;
884
885    Mutex::Autolock autoLock(mLock);
886    if (!mAudioQueue.empty()) {
887        postDrainAudioQueue_l();
888    }
889
890    if (!mVideoQueue.empty()) {
891        postDrainVideoQueue();
892    }
893}
894
895void NuPlayer::Renderer::onAudioOffloadTearDown() {
896    uint32_t numFramesPlayed;
897    CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
898
899    int64_t currentPositionUs = mFirstAudioTimeUs
900            + (numFramesPlayed * mAudioSink->msecsPerFrame()) * 1000ll;
901
902    mAudioSink->stop();
903    mAudioSink->flush();
904
905    sp<AMessage> notify = mNotify->dup();
906    notify->setInt32("what", kWhatAudioOffloadTearDown);
907    notify->setInt64("positionUs", currentPositionUs);
908    notify->post();
909}
910
911}  // namespace android
912
913