NuPlayerRenderer.cpp revision 095248375e29adde961ec2a44989ecb3a6dda6a2
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "NuPlayerRenderer"
19#include <utils/Log.h>
20
21#include "NuPlayerRenderer.h"
22
23#include <media/stagefright/foundation/ABuffer.h>
24#include <media/stagefright/foundation/ADebug.h>
25#include <media/stagefright/foundation/AMessage.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28
29#include <inttypes.h>
30
31namespace android {
32
33// static
34const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
35
36NuPlayer::Renderer::Renderer(
37        const sp<MediaPlayerBase::AudioSink> &sink,
38        const sp<AMessage> &notify,
39        uint32_t flags)
40    : mAudioSink(sink),
41      mNotify(notify),
42      mFlags(flags),
43      mNumFramesWritten(0),
44      mDrainAudioQueuePending(false),
45      mDrainVideoQueuePending(false),
46      mAudioQueueGeneration(0),
47      mVideoQueueGeneration(0),
48      mFirstAudioTimeUs(-1),
49      mAnchorTimeMediaUs(-1),
50      mAnchorTimeRealUs(-1),
51      mFlushingAudio(false),
52      mFlushingVideo(false),
53      mHasAudio(false),
54      mHasVideo(false),
55      mSyncQueues(false),
56      mPaused(false),
57      mVideoRenderingStarted(false),
58      mVideoRenderingStartGeneration(0),
59      mAudioRenderingStartGeneration(0),
60      mLastPositionUpdateUs(-1ll),
61      mVideoLateByUs(0ll) {
62}
63
64NuPlayer::Renderer::~Renderer() {
65    if (offloadingAudio()) {
66        mAudioSink->stop();
67        mAudioSink->flush();
68        mAudioSink->close();
69    }
70}
71
72void NuPlayer::Renderer::queueBuffer(
73        bool audio,
74        const sp<ABuffer> &buffer,
75        const sp<AMessage> &notifyConsumed) {
76    sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
77    msg->setInt32("audio", static_cast<int32_t>(audio));
78    msg->setBuffer("buffer", buffer);
79    msg->setMessage("notifyConsumed", notifyConsumed);
80    msg->post();
81}
82
83void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
84    CHECK_NE(finalResult, (status_t)OK);
85
86    sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
87    msg->setInt32("audio", static_cast<int32_t>(audio));
88    msg->setInt32("finalResult", finalResult);
89    msg->post();
90}
91
92void NuPlayer::Renderer::flush(bool audio) {
93    {
94        Mutex::Autolock autoLock(mFlushLock);
95        if (audio) {
96            CHECK(!mFlushingAudio);
97            mFlushingAudio = true;
98        } else {
99            CHECK(!mFlushingVideo);
100            mFlushingVideo = true;
101        }
102    }
103
104    sp<AMessage> msg = new AMessage(kWhatFlush, id());
105    msg->setInt32("audio", static_cast<int32_t>(audio));
106    msg->post();
107}
108
109void NuPlayer::Renderer::signalTimeDiscontinuity() {
110    Mutex::Autolock autoLock(mLock);
111    // CHECK(mAudioQueue.empty());
112    // CHECK(mVideoQueue.empty());
113    mAnchorTimeMediaUs = -1;
114    mAnchorTimeRealUs = -1;
115    mSyncQueues = false;
116}
117
118void NuPlayer::Renderer::pause() {
119    (new AMessage(kWhatPause, id()))->post();
120}
121
122void NuPlayer::Renderer::resume() {
123    (new AMessage(kWhatResume, id()))->post();
124}
125
126void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
127    switch (msg->what()) {
128        case kWhatStopAudioSink:
129        {
130            mAudioSink->stop();
131            break;
132        }
133
134        case kWhatDrainAudioQueue:
135        {
136            int32_t generation;
137            CHECK(msg->findInt32("generation", &generation));
138            if (generation != mAudioQueueGeneration) {
139                break;
140            }
141
142            mDrainAudioQueuePending = false;
143
144            if (onDrainAudioQueue()) {
145                uint32_t numFramesPlayed;
146                CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
147                         (status_t)OK);
148
149                uint32_t numFramesPendingPlayout =
150                    mNumFramesWritten - numFramesPlayed;
151
152                // This is how long the audio sink will have data to
153                // play back.
154                int64_t delayUs =
155                    mAudioSink->msecsPerFrame()
156                        * numFramesPendingPlayout * 1000ll;
157
158                // Let's give it more data after about half that time
159                // has elapsed.
160                // kWhatDrainAudioQueue is used for non-offloading mode,
161                // and mLock is used only for offloading mode. Therefore,
162                // no need to acquire mLock here.
163                postDrainAudioQueue_l(delayUs / 2);
164            }
165            break;
166        }
167
168        case kWhatDrainVideoQueue:
169        {
170            int32_t generation;
171            CHECK(msg->findInt32("generation", &generation));
172            if (generation != mVideoQueueGeneration) {
173                break;
174            }
175
176            mDrainVideoQueuePending = false;
177
178            onDrainVideoQueue();
179
180            postDrainVideoQueue();
181            break;
182        }
183
184        case kWhatQueueBuffer:
185        {
186            onQueueBuffer(msg);
187            break;
188        }
189
190        case kWhatQueueEOS:
191        {
192            onQueueEOS(msg);
193            break;
194        }
195
196        case kWhatFlush:
197        {
198            onFlush(msg);
199            break;
200        }
201
202        case kWhatAudioSinkChanged:
203        {
204            onAudioSinkChanged();
205            break;
206        }
207
208        case kWhatDisableOffloadAudio:
209        {
210            onDisableOffloadAudio();
211            break;
212        }
213
214        case kWhatPause:
215        {
216            onPause();
217            break;
218        }
219
220        case kWhatResume:
221        {
222            onResume();
223            break;
224        }
225
226        default:
227            TRESPASS();
228            break;
229    }
230}
231
232void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
233    if (mDrainAudioQueuePending || mSyncQueues || mPaused
234            || offloadingAudio()) {
235        return;
236    }
237
238    if (mAudioQueue.empty()) {
239        return;
240    }
241
242    mDrainAudioQueuePending = true;
243    sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
244    msg->setInt32("generation", mAudioQueueGeneration);
245    msg->post(delayUs);
246}
247
248void NuPlayer::Renderer::signalAudioSinkChanged() {
249    (new AMessage(kWhatAudioSinkChanged, id()))->post();
250}
251
252void NuPlayer::Renderer::signalDisableOffloadAudio() {
253    (new AMessage(kWhatDisableOffloadAudio, id()))->post();
254}
255
256void NuPlayer::Renderer::prepareForMediaRenderingStart() {
257    mAudioRenderingStartGeneration = mAudioQueueGeneration;
258    mVideoRenderingStartGeneration = mVideoQueueGeneration;
259}
260
261void NuPlayer::Renderer::notifyIfMediaRenderingStarted() {
262    if (mVideoRenderingStartGeneration == mVideoQueueGeneration &&
263        mAudioRenderingStartGeneration == mAudioQueueGeneration) {
264        mVideoRenderingStartGeneration = -1;
265        mAudioRenderingStartGeneration = -1;
266
267        sp<AMessage> notify = mNotify->dup();
268        notify->setInt32("what", kWhatMediaRenderingStart);
269        notify->post();
270    }
271}
272
273// static
274size_t NuPlayer::Renderer::AudioSinkCallback(
275        MediaPlayerBase::AudioSink * /* audioSink */,
276        void *buffer,
277        size_t size,
278        void *cookie,
279        MediaPlayerBase::AudioSink::cb_event_t event) {
280    NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
281
282    switch (event) {
283        case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
284        {
285            return me->fillAudioBuffer(buffer, size);
286            break;
287        }
288
289        case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
290        {
291            me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM);
292            break;
293        }
294
295        case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
296        {
297            // TODO: send this to player.
298            break;
299        }
300    }
301
302    return 0;
303}
304
305size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
306    Mutex::Autolock autoLock(mLock);
307
308    if (!offloadingAudio()) {
309        return 0;
310    }
311
312    bool hasEOS = false;
313
314    size_t sizeCopied = 0;
315    while (sizeCopied < size && !mAudioQueue.empty()) {
316        QueueEntry *entry = &*mAudioQueue.begin();
317
318        if (entry->mBuffer == NULL) { // EOS
319            hasEOS = true;
320            mAudioQueue.erase(mAudioQueue.begin());
321            entry = NULL;
322            break;
323        }
324
325        if (entry->mOffset == 0) {
326            int64_t mediaTimeUs;
327            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
328            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
329            if (mFirstAudioTimeUs == -1) {
330                mFirstAudioTimeUs = mediaTimeUs;
331            }
332            mAnchorTimeMediaUs = mediaTimeUs;
333
334            uint32_t numFramesPlayed;
335            CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
336
337            // TODO: figure out how to calculate initial latency.
338            // Otherwise, the initial time is not correct till the first sample
339            // is played.
340            mAnchorTimeMediaUs = mFirstAudioTimeUs
341                    + (numFramesPlayed * mAudioSink->msecsPerFrame()) * 1000ll;
342            mAnchorTimeRealUs = ALooper::GetNowUs();
343        }
344
345        size_t copy = entry->mBuffer->size() - entry->mOffset;
346        size_t sizeRemaining = size - sizeCopied;
347        if (copy > sizeRemaining) {
348            copy = sizeRemaining;
349        }
350
351        memcpy((char *)buffer + sizeCopied,
352               entry->mBuffer->data() + entry->mOffset,
353               copy);
354
355        entry->mOffset += copy;
356        if (entry->mOffset == entry->mBuffer->size()) {
357            entry->mNotifyConsumed->post();
358            mAudioQueue.erase(mAudioQueue.begin());
359            entry = NULL;
360        }
361        sizeCopied += copy;
362        notifyIfMediaRenderingStarted();
363    }
364
365    if (sizeCopied != 0) {
366        notifyPosition();
367    }
368
369    if (hasEOS) {
370        (new AMessage(kWhatStopAudioSink, id()))->post();
371    }
372
373    return sizeCopied;
374}
375
376bool NuPlayer::Renderer::onDrainAudioQueue() {
377    uint32_t numFramesPlayed;
378    if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
379        return false;
380    }
381
382    ssize_t numFramesAvailableToWrite =
383        mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
384
385#if 0
386    if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
387        ALOGI("audio sink underrun");
388    } else {
389        ALOGV("audio queue has %d frames left to play",
390             mAudioSink->frameCount() - numFramesAvailableToWrite);
391    }
392#endif
393
394    size_t numBytesAvailableToWrite =
395        numFramesAvailableToWrite * mAudioSink->frameSize();
396
397    while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) {
398        QueueEntry *entry = &*mAudioQueue.begin();
399
400        if (entry->mBuffer == NULL) {
401            // EOS
402
403            notifyEOS(true /* audio */, entry->mFinalResult);
404
405            mAudioQueue.erase(mAudioQueue.begin());
406            entry = NULL;
407            return false;
408        }
409
410        if (entry->mOffset == 0) {
411            int64_t mediaTimeUs;
412            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
413
414            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
415
416            mAnchorTimeMediaUs = mediaTimeUs;
417
418            uint32_t numFramesPlayed;
419            CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
420
421            uint32_t numFramesPendingPlayout =
422                mNumFramesWritten - numFramesPlayed;
423
424            int64_t realTimeOffsetUs =
425                (mAudioSink->latency() / 2  /* XXX */
426                    + numFramesPendingPlayout
427                        * mAudioSink->msecsPerFrame()) * 1000ll;
428
429            // ALOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs);
430
431            mAnchorTimeRealUs =
432                ALooper::GetNowUs() + realTimeOffsetUs;
433        }
434
435        size_t copy = entry->mBuffer->size() - entry->mOffset;
436        if (copy > numBytesAvailableToWrite) {
437            copy = numBytesAvailableToWrite;
438        }
439
440        CHECK_EQ(mAudioSink->write(
441                    entry->mBuffer->data() + entry->mOffset, copy),
442                 (ssize_t)copy);
443
444        entry->mOffset += copy;
445        if (entry->mOffset == entry->mBuffer->size()) {
446            entry->mNotifyConsumed->post();
447            mAudioQueue.erase(mAudioQueue.begin());
448
449            entry = NULL;
450        }
451
452        numBytesAvailableToWrite -= copy;
453        size_t copiedFrames = copy / mAudioSink->frameSize();
454        mNumFramesWritten += copiedFrames;
455
456        notifyIfMediaRenderingStarted();
457    }
458
459    notifyPosition();
460
461    return !mAudioQueue.empty();
462}
463
464void NuPlayer::Renderer::postDrainVideoQueue() {
465    if (mDrainVideoQueuePending || mSyncQueues || mPaused) {
466        return;
467    }
468
469    if (mVideoQueue.empty()) {
470        return;
471    }
472
473    QueueEntry &entry = *mVideoQueue.begin();
474
475    sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
476    msg->setInt32("generation", mVideoQueueGeneration);
477
478    int64_t delayUs;
479
480    if (entry.mBuffer == NULL) {
481        // EOS doesn't carry a timestamp.
482        delayUs = 0;
483    } else if (mFlags & FLAG_REAL_TIME) {
484        int64_t mediaTimeUs;
485        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
486
487        delayUs = mediaTimeUs - ALooper::GetNowUs();
488    } else {
489        int64_t mediaTimeUs;
490        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
491
492        if (mAnchorTimeMediaUs < 0) {
493            delayUs = 0;
494
495            if (!mHasAudio) {
496                mAnchorTimeMediaUs = mediaTimeUs;
497                mAnchorTimeRealUs = ALooper::GetNowUs();
498            }
499        } else {
500            int64_t realTimeUs =
501                (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;
502
503            delayUs = realTimeUs - ALooper::GetNowUs();
504        }
505    }
506
507    ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs);
508    msg->post(delayUs);
509
510    mDrainVideoQueuePending = true;
511}
512
513void NuPlayer::Renderer::onDrainVideoQueue() {
514    if (mVideoQueue.empty()) {
515        return;
516    }
517
518    QueueEntry *entry = &*mVideoQueue.begin();
519
520    if (entry->mBuffer == NULL) {
521        // EOS
522
523        notifyEOS(false /* audio */, entry->mFinalResult);
524
525        mVideoQueue.erase(mVideoQueue.begin());
526        entry = NULL;
527
528        mVideoLateByUs = 0ll;
529
530        notifyPosition();
531        return;
532    }
533
534    int64_t realTimeUs;
535    if (mFlags & FLAG_REAL_TIME) {
536        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
537    } else {
538        int64_t mediaTimeUs;
539        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
540
541        realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs;
542    }
543
544    mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;
545    bool tooLate = (mVideoLateByUs > 40000);
546
547    if (tooLate) {
548        ALOGV("video late by %lld us (%.2f secs)",
549             mVideoLateByUs, mVideoLateByUs / 1E6);
550    } else {
551        ALOGV("rendering video at media time %.2f secs",
552                (mFlags & FLAG_REAL_TIME ? realTimeUs :
553                (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
554    }
555
556    entry->mNotifyConsumed->setInt32("render", !tooLate);
557    entry->mNotifyConsumed->post();
558    mVideoQueue.erase(mVideoQueue.begin());
559    entry = NULL;
560
561    if (!mVideoRenderingStarted) {
562        mVideoRenderingStarted = true;
563        notifyVideoRenderingStart();
564    }
565
566    notifyIfMediaRenderingStarted();
567
568    notifyPosition();
569}
570
571void NuPlayer::Renderer::notifyVideoRenderingStart() {
572    sp<AMessage> notify = mNotify->dup();
573    notify->setInt32("what", kWhatVideoRenderingStart);
574    notify->post();
575}
576
577void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult) {
578    sp<AMessage> notify = mNotify->dup();
579    notify->setInt32("what", kWhatEOS);
580    notify->setInt32("audio", static_cast<int32_t>(audio));
581    notify->setInt32("finalResult", finalResult);
582    notify->post();
583}
584
585void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
586    int32_t audio;
587    CHECK(msg->findInt32("audio", &audio));
588
589    if (audio) {
590        mHasAudio = true;
591    } else {
592        mHasVideo = true;
593    }
594
595    if (dropBufferWhileFlushing(audio, msg)) {
596        return;
597    }
598
599    sp<ABuffer> buffer;
600    CHECK(msg->findBuffer("buffer", &buffer));
601
602    sp<AMessage> notifyConsumed;
603    CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
604
605    QueueEntry entry;
606    entry.mBuffer = buffer;
607    entry.mNotifyConsumed = notifyConsumed;
608    entry.mOffset = 0;
609    entry.mFinalResult = OK;
610
611    if (audio) {
612        Mutex::Autolock autoLock(mLock);
613        mAudioQueue.push_back(entry);
614        postDrainAudioQueue_l();
615    } else {
616        mVideoQueue.push_back(entry);
617        postDrainVideoQueue();
618    }
619
620    Mutex::Autolock autoLock(mLock);
621    if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
622        return;
623    }
624
625    sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
626    sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
627
628    if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
629        // EOS signalled on either queue.
630        syncQueuesDone_l();
631        return;
632    }
633
634    int64_t firstAudioTimeUs;
635    int64_t firstVideoTimeUs;
636    CHECK(firstAudioBuffer->meta()
637            ->findInt64("timeUs", &firstAudioTimeUs));
638    CHECK(firstVideoBuffer->meta()
639            ->findInt64("timeUs", &firstVideoTimeUs));
640
641    int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
642
643    ALOGV("queueDiff = %.2f secs", diff / 1E6);
644
645    if (diff > 100000ll) {
646        // Audio data starts More than 0.1 secs before video.
647        // Drop some audio.
648
649        (*mAudioQueue.begin()).mNotifyConsumed->post();
650        mAudioQueue.erase(mAudioQueue.begin());
651        return;
652    }
653
654    syncQueuesDone_l();
655}
656
657void NuPlayer::Renderer::syncQueuesDone_l() {
658    if (!mSyncQueues) {
659        return;
660    }
661
662    mSyncQueues = false;
663
664    if (!mAudioQueue.empty()) {
665        postDrainAudioQueue_l();
666    }
667
668    if (!mVideoQueue.empty()) {
669        postDrainVideoQueue();
670    }
671}
672
673void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
674    int32_t audio;
675    CHECK(msg->findInt32("audio", &audio));
676
677    if (dropBufferWhileFlushing(audio, msg)) {
678        return;
679    }
680
681    int32_t finalResult;
682    CHECK(msg->findInt32("finalResult", &finalResult));
683
684    QueueEntry entry;
685    entry.mOffset = 0;
686    entry.mFinalResult = finalResult;
687
688    if (audio) {
689        Mutex::Autolock autoLock(mLock);
690        if (mAudioQueue.empty() && mSyncQueues) {
691            syncQueuesDone_l();
692        }
693        mAudioQueue.push_back(entry);
694        postDrainAudioQueue_l();
695    } else {
696        if (mVideoQueue.empty() && mSyncQueues) {
697            Mutex::Autolock autoLock(mLock);
698            syncQueuesDone_l();
699        }
700        mVideoQueue.push_back(entry);
701        postDrainVideoQueue();
702    }
703}
704
705void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
706    int32_t audio;
707    CHECK(msg->findInt32("audio", &audio));
708
709    // If we're currently syncing the queues, i.e. dropping audio while
710    // aligning the first audio/video buffer times and only one of the
711    // two queues has data, we may starve that queue by not requesting
712    // more buffers from the decoder. If the other source then encounters
713    // a discontinuity that leads to flushing, we'll never find the
714    // corresponding discontinuity on the other queue.
715    // Therefore we'll stop syncing the queues if at least one of them
716    // is flushed.
717    {
718         Mutex::Autolock autoLock(mLock);
719         syncQueuesDone_l();
720    }
721
722    ALOGV("flushing %s", audio ? "audio" : "video");
723    if (audio) {
724        {
725            Mutex::Autolock autoLock(mLock);
726            flushQueue(&mAudioQueue);
727        }
728
729        Mutex::Autolock autoLock(mFlushLock);
730        mFlushingAudio = false;
731
732        mDrainAudioQueuePending = false;
733        ++mAudioQueueGeneration;
734
735        prepareForMediaRenderingStart();
736        if (offloadingAudio()) {
737            mFirstAudioTimeUs = -1;
738            mAudioSink->pause();
739            mAudioSink->flush();
740            mAudioSink->start();
741        }
742    } else {
743        flushQueue(&mVideoQueue);
744
745        Mutex::Autolock autoLock(mFlushLock);
746        mFlushingVideo = false;
747
748        mDrainVideoQueuePending = false;
749        ++mVideoQueueGeneration;
750
751        prepareForMediaRenderingStart();
752    }
753
754    notifyFlushComplete(audio);
755}
756
757void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
758    while (!queue->empty()) {
759        QueueEntry *entry = &*queue->begin();
760
761        if (entry->mBuffer != NULL) {
762            entry->mNotifyConsumed->post();
763        }
764
765        queue->erase(queue->begin());
766        entry = NULL;
767    }
768}
769
770void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
771    sp<AMessage> notify = mNotify->dup();
772    notify->setInt32("what", kWhatFlushComplete);
773    notify->setInt32("audio", static_cast<int32_t>(audio));
774    notify->post();
775}
776
777bool NuPlayer::Renderer::dropBufferWhileFlushing(
778        bool audio, const sp<AMessage> &msg) {
779    bool flushing = false;
780
781    {
782        Mutex::Autolock autoLock(mFlushLock);
783        if (audio) {
784            flushing = mFlushingAudio;
785        } else {
786            flushing = mFlushingVideo;
787        }
788    }
789
790    if (!flushing) {
791        return false;
792    }
793
794    sp<AMessage> notifyConsumed;
795    if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
796        notifyConsumed->post();
797    }
798
799    return true;
800}
801
802void NuPlayer::Renderer::onAudioSinkChanged() {
803    if (offloadingAudio()) {
804        return;
805    }
806    CHECK(!mDrainAudioQueuePending);
807    mNumFramesWritten = 0;
808    uint32_t written;
809    if (mAudioSink->getFramesWritten(&written) == OK) {
810        mNumFramesWritten = written;
811    }
812}
813
814void NuPlayer::Renderer::onDisableOffloadAudio() {
815    Mutex::Autolock autoLock(mLock);
816    mFlags &= ~FLAG_OFFLOAD_AUDIO;
817}
818
819void NuPlayer::Renderer::notifyPosition() {
820    if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) {
821        return;
822    }
823
824    int64_t nowUs = ALooper::GetNowUs();
825
826    if (mLastPositionUpdateUs >= 0
827            && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) {
828        return;
829    }
830    mLastPositionUpdateUs = nowUs;
831
832    int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
833
834    sp<AMessage> notify = mNotify->dup();
835    notify->setInt32("what", kWhatPosition);
836    notify->setInt64("positionUs", positionUs);
837    notify->setInt64("videoLateByUs", mVideoLateByUs);
838    notify->post();
839}
840
841void NuPlayer::Renderer::onPause() {
842    CHECK(!mPaused);
843
844    mDrainAudioQueuePending = false;
845    ++mAudioQueueGeneration;
846
847    mDrainVideoQueuePending = false;
848    ++mVideoQueueGeneration;
849
850    prepareForMediaRenderingStart();
851
852    if (mHasAudio) {
853        mAudioSink->pause();
854    }
855
856    ALOGV("now paused audio queue has %d entries, video has %d entries",
857          mAudioQueue.size(), mVideoQueue.size());
858
859    mPaused = true;
860}
861
862void NuPlayer::Renderer::onResume() {
863    if (!mPaused) {
864        return;
865    }
866
867    if (mHasAudio) {
868        mAudioSink->start();
869    }
870
871    mPaused = false;
872
873    Mutex::Autolock autoLock(mLock);
874    if (!mAudioQueue.empty()) {
875        postDrainAudioQueue_l();
876    }
877
878    if (!mVideoQueue.empty()) {
879        postDrainVideoQueue();
880    }
881}
882
883}  // namespace android
884
885