NuPlayerRenderer.cpp revision 3a2956d148d81194e297408179e84a47a309ef48
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "NuPlayerRenderer"
19#include <utils/Log.h>
20
21#include "NuPlayerRenderer.h"
22
23#include <media/stagefright/foundation/ABuffer.h>
24#include <media/stagefright/foundation/ADebug.h>
25#include <media/stagefright/foundation/AMessage.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28
29#include <inttypes.h>
30
31namespace android {
32
33// static
34const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
35
36NuPlayer::Renderer::Renderer(
37        const sp<MediaPlayerBase::AudioSink> &sink,
38        const sp<AMessage> &notify,
39        uint32_t flags)
40    : mAudioSink(sink),
41      mNotify(notify),
42      mFlags(flags),
43      mNumFramesWritten(0),
44      mDrainAudioQueuePending(false),
45      mDrainVideoQueuePending(false),
46      mAudioQueueGeneration(0),
47      mVideoQueueGeneration(0),
48      mFirstAudioTimeUs(-1),
49      mAnchorTimeMediaUs(-1),
50      mAnchorTimeRealUs(-1),
51      mFlushingAudio(false),
52      mFlushingVideo(false),
53      mHasAudio(false),
54      mHasVideo(false),
55      mSyncQueues(false),
56      mPaused(false),
57      mVideoRenderingStarted(false),
58      mVideoRenderingStartGeneration(0),
59      mAudioRenderingStartGeneration(0),
60      mLastPositionUpdateUs(-1ll),
61      mVideoLateByUs(0ll) {
62}
63
64NuPlayer::Renderer::~Renderer() {
65    if (offloadingAudio()) {
66        mAudioSink->stop();
67        mAudioSink->flush();
68        mAudioSink->close();
69    }
70}
71
72void NuPlayer::Renderer::queueBuffer(
73        bool audio,
74        const sp<ABuffer> &buffer,
75        const sp<AMessage> &notifyConsumed) {
76    sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
77    msg->setInt32("audio", static_cast<int32_t>(audio));
78    msg->setBuffer("buffer", buffer);
79    msg->setMessage("notifyConsumed", notifyConsumed);
80    msg->post();
81}
82
83void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
84    CHECK_NE(finalResult, (status_t)OK);
85
86    sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
87    msg->setInt32("audio", static_cast<int32_t>(audio));
88    msg->setInt32("finalResult", finalResult);
89    msg->post();
90}
91
92void NuPlayer::Renderer::flush(bool audio) {
93    {
94        Mutex::Autolock autoLock(mFlushLock);
95        if (audio) {
96            CHECK(!mFlushingAudio);
97            mFlushingAudio = true;
98        } else {
99            CHECK(!mFlushingVideo);
100            mFlushingVideo = true;
101        }
102    }
103
104    sp<AMessage> msg = new AMessage(kWhatFlush, id());
105    msg->setInt32("audio", static_cast<int32_t>(audio));
106    msg->post();
107}
108
109void NuPlayer::Renderer::signalTimeDiscontinuity() {
110    Mutex::Autolock autoLock(mLock);
111    // CHECK(mAudioQueue.empty());
112    // CHECK(mVideoQueue.empty());
113    mAnchorTimeMediaUs = -1;
114    mAnchorTimeRealUs = -1;
115    mSyncQueues = false;
116}
117
118void NuPlayer::Renderer::pause() {
119    (new AMessage(kWhatPause, id()))->post();
120}
121
122void NuPlayer::Renderer::resume() {
123    (new AMessage(kWhatResume, id()))->post();
124}
125
126void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
127    switch (msg->what()) {
128        case kWhatStopAudioSink:
129        {
130            mAudioSink->stop();
131            break;
132        }
133
134        case kWhatDrainAudioQueue:
135        {
136            int32_t generation;
137            CHECK(msg->findInt32("generation", &generation));
138            if (generation != mAudioQueueGeneration) {
139                break;
140            }
141
142            mDrainAudioQueuePending = false;
143
144            if (onDrainAudioQueue()) {
145                uint32_t numFramesPlayed;
146                CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
147                         (status_t)OK);
148
149                uint32_t numFramesPendingPlayout =
150                    mNumFramesWritten - numFramesPlayed;
151
152                // This is how long the audio sink will have data to
153                // play back.
154                int64_t delayUs =
155                    mAudioSink->msecsPerFrame()
156                        * numFramesPendingPlayout * 1000ll;
157
158                // Let's give it more data after about half that time
159                // has elapsed.
160                // kWhatDrainAudioQueue is used for non-offloading mode,
161                // and mLock is used only for offloading mode. Therefore,
162                // no need to acquire mLock here.
163                postDrainAudioQueue_l(delayUs / 2);
164            }
165            break;
166        }
167
168        case kWhatDrainVideoQueue:
169        {
170            int32_t generation;
171            CHECK(msg->findInt32("generation", &generation));
172            if (generation != mVideoQueueGeneration) {
173                break;
174            }
175
176            mDrainVideoQueuePending = false;
177
178            onDrainVideoQueue();
179
180            postDrainVideoQueue();
181            break;
182        }
183
184        case kWhatQueueBuffer:
185        {
186            onQueueBuffer(msg);
187            break;
188        }
189
190        case kWhatQueueEOS:
191        {
192            onQueueEOS(msg);
193            break;
194        }
195
196        case kWhatFlush:
197        {
198            onFlush(msg);
199            break;
200        }
201
202        case kWhatAudioSinkChanged:
203        {
204            onAudioSinkChanged();
205            break;
206        }
207
208        case kWhatDisableOffloadAudio:
209        {
210            onDisableOffloadAudio();
211            break;
212        }
213
214        case kWhatPause:
215        {
216            onPause();
217            break;
218        }
219
220        case kWhatResume:
221        {
222            onResume();
223            break;
224        }
225
226        case kWhatAudioOffloadTearDown:
227        {
228            onAudioOffloadTearDown();
229            break;
230        }
231
232        default:
233            TRESPASS();
234            break;
235    }
236}
237
238void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
239    if (mDrainAudioQueuePending || mSyncQueues || mPaused
240            || offloadingAudio()) {
241        return;
242    }
243
244    if (mAudioQueue.empty()) {
245        return;
246    }
247
248    mDrainAudioQueuePending = true;
249    sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
250    msg->setInt32("generation", mAudioQueueGeneration);
251    msg->post(delayUs);
252}
253
254void NuPlayer::Renderer::signalAudioSinkChanged() {
255    (new AMessage(kWhatAudioSinkChanged, id()))->post();
256}
257
258void NuPlayer::Renderer::signalDisableOffloadAudio() {
259    (new AMessage(kWhatDisableOffloadAudio, id()))->post();
260}
261
262void NuPlayer::Renderer::prepareForMediaRenderingStart() {
263    mAudioRenderingStartGeneration = mAudioQueueGeneration;
264    mVideoRenderingStartGeneration = mVideoQueueGeneration;
265}
266
267void NuPlayer::Renderer::notifyIfMediaRenderingStarted() {
268    if (mVideoRenderingStartGeneration == mVideoQueueGeneration &&
269        mAudioRenderingStartGeneration == mAudioQueueGeneration) {
270        mVideoRenderingStartGeneration = -1;
271        mAudioRenderingStartGeneration = -1;
272
273        sp<AMessage> notify = mNotify->dup();
274        notify->setInt32("what", kWhatMediaRenderingStart);
275        notify->post();
276    }
277}
278
279// static
280size_t NuPlayer::Renderer::AudioSinkCallback(
281        MediaPlayerBase::AudioSink * /* audioSink */,
282        void *buffer,
283        size_t size,
284        void *cookie,
285        MediaPlayerBase::AudioSink::cb_event_t event) {
286    NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
287
288    switch (event) {
289        case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
290        {
291            return me->fillAudioBuffer(buffer, size);
292            break;
293        }
294
295        case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
296        {
297            me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM);
298            break;
299        }
300
301        case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
302        {
303            me->notifyAudioOffloadTearDown();
304            break;
305        }
306    }
307
308    return 0;
309}
310
311size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
312    Mutex::Autolock autoLock(mLock);
313
314    if (!offloadingAudio()) {
315        return 0;
316    }
317
318    bool hasEOS = false;
319
320    size_t sizeCopied = 0;
321    while (sizeCopied < size && !mAudioQueue.empty()) {
322        QueueEntry *entry = &*mAudioQueue.begin();
323
324        if (entry->mBuffer == NULL) { // EOS
325            hasEOS = true;
326            mAudioQueue.erase(mAudioQueue.begin());
327            entry = NULL;
328            break;
329        }
330
331        if (entry->mOffset == 0) {
332            int64_t mediaTimeUs;
333            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
334            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
335            if (mFirstAudioTimeUs == -1) {
336                mFirstAudioTimeUs = mediaTimeUs;
337            }
338            mAnchorTimeMediaUs = mediaTimeUs;
339
340            uint32_t numFramesPlayed;
341            CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
342
343            // TODO: figure out how to calculate initial latency.
344            // Otherwise, the initial time is not correct till the first sample
345            // is played.
346            mAnchorTimeMediaUs = mFirstAudioTimeUs
347                    + (numFramesPlayed * mAudioSink->msecsPerFrame()) * 1000ll;
348            mAnchorTimeRealUs = ALooper::GetNowUs();
349        }
350
351        size_t copy = entry->mBuffer->size() - entry->mOffset;
352        size_t sizeRemaining = size - sizeCopied;
353        if (copy > sizeRemaining) {
354            copy = sizeRemaining;
355        }
356
357        memcpy((char *)buffer + sizeCopied,
358               entry->mBuffer->data() + entry->mOffset,
359               copy);
360
361        entry->mOffset += copy;
362        if (entry->mOffset == entry->mBuffer->size()) {
363            entry->mNotifyConsumed->post();
364            mAudioQueue.erase(mAudioQueue.begin());
365            entry = NULL;
366        }
367        sizeCopied += copy;
368        notifyIfMediaRenderingStarted();
369    }
370
371    if (sizeCopied != 0) {
372        notifyPosition();
373    }
374
375    if (hasEOS) {
376        (new AMessage(kWhatStopAudioSink, id()))->post();
377    }
378
379    return sizeCopied;
380}
381
382bool NuPlayer::Renderer::onDrainAudioQueue() {
383    uint32_t numFramesPlayed;
384    if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
385        return false;
386    }
387
388    ssize_t numFramesAvailableToWrite =
389        mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
390
391#if 0
392    if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
393        ALOGI("audio sink underrun");
394    } else {
395        ALOGV("audio queue has %d frames left to play",
396             mAudioSink->frameCount() - numFramesAvailableToWrite);
397    }
398#endif
399
400    size_t numBytesAvailableToWrite =
401        numFramesAvailableToWrite * mAudioSink->frameSize();
402
403    while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) {
404        QueueEntry *entry = &*mAudioQueue.begin();
405
406        if (entry->mBuffer == NULL) {
407            // EOS
408
409            notifyEOS(true /* audio */, entry->mFinalResult);
410
411            mAudioQueue.erase(mAudioQueue.begin());
412            entry = NULL;
413            return false;
414        }
415
416        if (entry->mOffset == 0) {
417            int64_t mediaTimeUs;
418            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
419
420            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
421
422            mAnchorTimeMediaUs = mediaTimeUs;
423
424            uint32_t numFramesPlayed;
425            CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
426
427            uint32_t numFramesPendingPlayout =
428                mNumFramesWritten - numFramesPlayed;
429
430            int64_t realTimeOffsetUs =
431                (mAudioSink->latency() / 2  /* XXX */
432                    + numFramesPendingPlayout
433                        * mAudioSink->msecsPerFrame()) * 1000ll;
434
435            // ALOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs);
436
437            mAnchorTimeRealUs =
438                ALooper::GetNowUs() + realTimeOffsetUs;
439        }
440
441        size_t copy = entry->mBuffer->size() - entry->mOffset;
442        if (copy > numBytesAvailableToWrite) {
443            copy = numBytesAvailableToWrite;
444        }
445
446        CHECK_EQ(mAudioSink->write(
447                    entry->mBuffer->data() + entry->mOffset, copy),
448                 (ssize_t)copy);
449
450        entry->mOffset += copy;
451        if (entry->mOffset == entry->mBuffer->size()) {
452            entry->mNotifyConsumed->post();
453            mAudioQueue.erase(mAudioQueue.begin());
454
455            entry = NULL;
456        }
457
458        numBytesAvailableToWrite -= copy;
459        size_t copiedFrames = copy / mAudioSink->frameSize();
460        mNumFramesWritten += copiedFrames;
461
462        notifyIfMediaRenderingStarted();
463    }
464
465    notifyPosition();
466
467    return !mAudioQueue.empty();
468}
469
470void NuPlayer::Renderer::postDrainVideoQueue() {
471    if (mDrainVideoQueuePending || mSyncQueues || mPaused) {
472        return;
473    }
474
475    if (mVideoQueue.empty()) {
476        return;
477    }
478
479    QueueEntry &entry = *mVideoQueue.begin();
480
481    sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
482    msg->setInt32("generation", mVideoQueueGeneration);
483
484    int64_t delayUs;
485
486    if (entry.mBuffer == NULL) {
487        // EOS doesn't carry a timestamp.
488        delayUs = 0;
489    } else if (mFlags & FLAG_REAL_TIME) {
490        int64_t mediaTimeUs;
491        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
492
493        delayUs = mediaTimeUs - ALooper::GetNowUs();
494    } else {
495        int64_t mediaTimeUs;
496        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
497
498        if (mAnchorTimeMediaUs < 0) {
499            delayUs = 0;
500
501            if (!mHasAudio) {
502                mAnchorTimeMediaUs = mediaTimeUs;
503                mAnchorTimeRealUs = ALooper::GetNowUs();
504            }
505        } else {
506            int64_t realTimeUs =
507                (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;
508
509            delayUs = realTimeUs - ALooper::GetNowUs();
510        }
511    }
512
513    ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs);
514    msg->post(delayUs);
515
516    mDrainVideoQueuePending = true;
517}
518
519void NuPlayer::Renderer::onDrainVideoQueue() {
520    if (mVideoQueue.empty()) {
521        return;
522    }
523
524    QueueEntry *entry = &*mVideoQueue.begin();
525
526    if (entry->mBuffer == NULL) {
527        // EOS
528
529        notifyEOS(false /* audio */, entry->mFinalResult);
530
531        mVideoQueue.erase(mVideoQueue.begin());
532        entry = NULL;
533
534        mVideoLateByUs = 0ll;
535
536        notifyPosition();
537        return;
538    }
539
540    int64_t realTimeUs;
541    if (mFlags & FLAG_REAL_TIME) {
542        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
543    } else {
544        int64_t mediaTimeUs;
545        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
546
547        realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs;
548    }
549
550    mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;
551    bool tooLate = (mVideoLateByUs > 40000);
552
553    if (tooLate) {
554        ALOGV("video late by %lld us (%.2f secs)",
555             mVideoLateByUs, mVideoLateByUs / 1E6);
556    } else {
557        ALOGV("rendering video at media time %.2f secs",
558                (mFlags & FLAG_REAL_TIME ? realTimeUs :
559                (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
560    }
561
562    entry->mNotifyConsumed->setInt32("render", !tooLate);
563    entry->mNotifyConsumed->post();
564    mVideoQueue.erase(mVideoQueue.begin());
565    entry = NULL;
566
567    if (!mVideoRenderingStarted) {
568        mVideoRenderingStarted = true;
569        notifyVideoRenderingStart();
570    }
571
572    notifyIfMediaRenderingStarted();
573
574    notifyPosition();
575}
576
577void NuPlayer::Renderer::notifyVideoRenderingStart() {
578    sp<AMessage> notify = mNotify->dup();
579    notify->setInt32("what", kWhatVideoRenderingStart);
580    notify->post();
581}
582
583void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult) {
584    sp<AMessage> notify = mNotify->dup();
585    notify->setInt32("what", kWhatEOS);
586    notify->setInt32("audio", static_cast<int32_t>(audio));
587    notify->setInt32("finalResult", finalResult);
588    notify->post();
589}
590
591void NuPlayer::Renderer::notifyAudioOffloadTearDown() {
592    (new AMessage(kWhatAudioOffloadTearDown, id()))->post();
593}
594
595void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
596    int32_t audio;
597    CHECK(msg->findInt32("audio", &audio));
598
599    if (audio) {
600        mHasAudio = true;
601    } else {
602        mHasVideo = true;
603    }
604
605    if (dropBufferWhileFlushing(audio, msg)) {
606        return;
607    }
608
609    sp<ABuffer> buffer;
610    CHECK(msg->findBuffer("buffer", &buffer));
611
612    sp<AMessage> notifyConsumed;
613    CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
614
615    QueueEntry entry;
616    entry.mBuffer = buffer;
617    entry.mNotifyConsumed = notifyConsumed;
618    entry.mOffset = 0;
619    entry.mFinalResult = OK;
620
621    if (audio) {
622        Mutex::Autolock autoLock(mLock);
623        mAudioQueue.push_back(entry);
624        postDrainAudioQueue_l();
625    } else {
626        mVideoQueue.push_back(entry);
627        postDrainVideoQueue();
628    }
629
630    Mutex::Autolock autoLock(mLock);
631    if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
632        return;
633    }
634
635    sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
636    sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
637
638    if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
639        // EOS signalled on either queue.
640        syncQueuesDone_l();
641        return;
642    }
643
644    int64_t firstAudioTimeUs;
645    int64_t firstVideoTimeUs;
646    CHECK(firstAudioBuffer->meta()
647            ->findInt64("timeUs", &firstAudioTimeUs));
648    CHECK(firstVideoBuffer->meta()
649            ->findInt64("timeUs", &firstVideoTimeUs));
650
651    int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
652
653    ALOGV("queueDiff = %.2f secs", diff / 1E6);
654
655    if (diff > 100000ll) {
656        // Audio data starts More than 0.1 secs before video.
657        // Drop some audio.
658
659        (*mAudioQueue.begin()).mNotifyConsumed->post();
660        mAudioQueue.erase(mAudioQueue.begin());
661        return;
662    }
663
664    syncQueuesDone_l();
665}
666
667void NuPlayer::Renderer::syncQueuesDone_l() {
668    if (!mSyncQueues) {
669        return;
670    }
671
672    mSyncQueues = false;
673
674    if (!mAudioQueue.empty()) {
675        postDrainAudioQueue_l();
676    }
677
678    if (!mVideoQueue.empty()) {
679        postDrainVideoQueue();
680    }
681}
682
683void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
684    int32_t audio;
685    CHECK(msg->findInt32("audio", &audio));
686
687    if (dropBufferWhileFlushing(audio, msg)) {
688        return;
689    }
690
691    int32_t finalResult;
692    CHECK(msg->findInt32("finalResult", &finalResult));
693
694    QueueEntry entry;
695    entry.mOffset = 0;
696    entry.mFinalResult = finalResult;
697
698    if (audio) {
699        Mutex::Autolock autoLock(mLock);
700        if (mAudioQueue.empty() && mSyncQueues) {
701            syncQueuesDone_l();
702        }
703        mAudioQueue.push_back(entry);
704        postDrainAudioQueue_l();
705    } else {
706        if (mVideoQueue.empty() && mSyncQueues) {
707            Mutex::Autolock autoLock(mLock);
708            syncQueuesDone_l();
709        }
710        mVideoQueue.push_back(entry);
711        postDrainVideoQueue();
712    }
713}
714
715void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
716    int32_t audio;
717    CHECK(msg->findInt32("audio", &audio));
718
719    // If we're currently syncing the queues, i.e. dropping audio while
720    // aligning the first audio/video buffer times and only one of the
721    // two queues has data, we may starve that queue by not requesting
722    // more buffers from the decoder. If the other source then encounters
723    // a discontinuity that leads to flushing, we'll never find the
724    // corresponding discontinuity on the other queue.
725    // Therefore we'll stop syncing the queues if at least one of them
726    // is flushed.
727    {
728         Mutex::Autolock autoLock(mLock);
729         syncQueuesDone_l();
730    }
731
732    ALOGV("flushing %s", audio ? "audio" : "video");
733    if (audio) {
734        {
735            Mutex::Autolock autoLock(mLock);
736            flushQueue(&mAudioQueue);
737        }
738
739        Mutex::Autolock autoLock(mFlushLock);
740        mFlushingAudio = false;
741
742        mDrainAudioQueuePending = false;
743        ++mAudioQueueGeneration;
744
745        prepareForMediaRenderingStart();
746        if (offloadingAudio()) {
747            mFirstAudioTimeUs = -1;
748            mAudioSink->pause();
749            mAudioSink->flush();
750            mAudioSink->start();
751        }
752    } else {
753        flushQueue(&mVideoQueue);
754
755        Mutex::Autolock autoLock(mFlushLock);
756        mFlushingVideo = false;
757
758        mDrainVideoQueuePending = false;
759        ++mVideoQueueGeneration;
760
761        prepareForMediaRenderingStart();
762    }
763
764    notifyFlushComplete(audio);
765}
766
767void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
768    while (!queue->empty()) {
769        QueueEntry *entry = &*queue->begin();
770
771        if (entry->mBuffer != NULL) {
772            entry->mNotifyConsumed->post();
773        }
774
775        queue->erase(queue->begin());
776        entry = NULL;
777    }
778}
779
780void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
781    sp<AMessage> notify = mNotify->dup();
782    notify->setInt32("what", kWhatFlushComplete);
783    notify->setInt32("audio", static_cast<int32_t>(audio));
784    notify->post();
785}
786
787bool NuPlayer::Renderer::dropBufferWhileFlushing(
788        bool audio, const sp<AMessage> &msg) {
789    bool flushing = false;
790
791    {
792        Mutex::Autolock autoLock(mFlushLock);
793        if (audio) {
794            flushing = mFlushingAudio;
795        } else {
796            flushing = mFlushingVideo;
797        }
798    }
799
800    if (!flushing) {
801        return false;
802    }
803
804    sp<AMessage> notifyConsumed;
805    if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
806        notifyConsumed->post();
807    }
808
809    return true;
810}
811
812void NuPlayer::Renderer::onAudioSinkChanged() {
813    if (offloadingAudio()) {
814        return;
815    }
816    CHECK(!mDrainAudioQueuePending);
817    mNumFramesWritten = 0;
818    uint32_t written;
819    if (mAudioSink->getFramesWritten(&written) == OK) {
820        mNumFramesWritten = written;
821    }
822}
823
824void NuPlayer::Renderer::onDisableOffloadAudio() {
825    Mutex::Autolock autoLock(mLock);
826    mFlags &= ~FLAG_OFFLOAD_AUDIO;
827    ++mAudioQueueGeneration;
828}
829
830void NuPlayer::Renderer::notifyPosition() {
831    if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) {
832        return;
833    }
834
835    int64_t nowUs = ALooper::GetNowUs();
836
837    if (mLastPositionUpdateUs >= 0
838            && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) {
839        return;
840    }
841    mLastPositionUpdateUs = nowUs;
842
843    int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
844
845    sp<AMessage> notify = mNotify->dup();
846    notify->setInt32("what", kWhatPosition);
847    notify->setInt64("positionUs", positionUs);
848    notify->setInt64("videoLateByUs", mVideoLateByUs);
849    notify->post();
850}
851
852void NuPlayer::Renderer::onPause() {
853    CHECK(!mPaused);
854
855    mDrainAudioQueuePending = false;
856    ++mAudioQueueGeneration;
857
858    mDrainVideoQueuePending = false;
859    ++mVideoQueueGeneration;
860
861    prepareForMediaRenderingStart();
862
863    if (mHasAudio) {
864        mAudioSink->pause();
865    }
866
867    ALOGV("now paused audio queue has %d entries, video has %d entries",
868          mAudioQueue.size(), mVideoQueue.size());
869
870    mPaused = true;
871}
872
873void NuPlayer::Renderer::onResume() {
874    if (!mPaused) {
875        return;
876    }
877
878    if (mHasAudio) {
879        mAudioSink->start();
880    }
881
882    mPaused = false;
883
884    Mutex::Autolock autoLock(mLock);
885    if (!mAudioQueue.empty()) {
886        postDrainAudioQueue_l();
887    }
888
889    if (!mVideoQueue.empty()) {
890        postDrainVideoQueue();
891    }
892}
893
894void NuPlayer::Renderer::onAudioOffloadTearDown() {
895    uint32_t numFramesPlayed;
896    CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
897
898    int64_t currentPositionUs = mFirstAudioTimeUs
899            + (numFramesPlayed * mAudioSink->msecsPerFrame()) * 1000ll;
900
901    mAudioSink->stop();
902    mAudioSink->flush();
903
904    sp<AMessage> notify = mNotify->dup();
905    notify->setInt32("what", kWhatAudioOffloadTearDown);
906    notify->setInt64("positionUs", currentPositionUs);
907    notify->post();
908}
909
910}  // namespace android
911
912