NuPlayerRenderer.cpp revision fbe8bef8bcf7aed97f0332908a817b0e6d91b9ba
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "NuPlayerRenderer"
19#include <utils/Log.h>
20
21#include "NuPlayerRenderer.h"
22
23#include <media/stagefright/foundation/ABuffer.h>
24#include <media/stagefright/foundation/ADebug.h>
25#include <media/stagefright/foundation/AMessage.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28
29#include <inttypes.h>
30
31namespace android {
32
33// static
34const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
35
36NuPlayer::Renderer::Renderer(
37        const sp<MediaPlayerBase::AudioSink> &sink,
38        const sp<AMessage> &notify,
39        uint32_t flags)
40    : mAudioSink(sink),
41      mNotify(notify),
42      mFlags(flags),
43      mNumFramesWritten(0),
44      mDrainAudioQueuePending(false),
45      mDrainVideoQueuePending(false),
46      mAudioQueueGeneration(0),
47      mVideoQueueGeneration(0),
48      mFirstAudioTimeUs(-1),
49      mAnchorTimeMediaUs(-1),
50      mAnchorTimeRealUs(-1),
51      mFlushingAudio(false),
52      mFlushingVideo(false),
53      mHasAudio(false),
54      mHasVideo(false),
55      mSyncQueues(false),
56      mPaused(false),
57      mVideoRenderingStarted(false),
58      mVideoRenderingStartGeneration(0),
59      mAudioRenderingStartGeneration(0),
60      mLastPositionUpdateUs(-1ll),
61      mVideoLateByUs(0ll),
62      mVideoSampleReceived(false) {
63}
64
65NuPlayer::Renderer::~Renderer() {
66    if (offloadingAudio()) {
67        mAudioSink->stop();
68        mAudioSink->flush();
69        mAudioSink->close();
70    }
71}
72
73void NuPlayer::Renderer::queueBuffer(
74        bool audio,
75        const sp<ABuffer> &buffer,
76        const sp<AMessage> &notifyConsumed) {
77    sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
78    msg->setInt32("audio", static_cast<int32_t>(audio));
79    msg->setBuffer("buffer", buffer);
80    msg->setMessage("notifyConsumed", notifyConsumed);
81    msg->post();
82}
83
84void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
85    CHECK_NE(finalResult, (status_t)OK);
86
87    sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
88    msg->setInt32("audio", static_cast<int32_t>(audio));
89    msg->setInt32("finalResult", finalResult);
90    msg->post();
91}
92
93void NuPlayer::Renderer::flush(bool audio) {
94    {
95        Mutex::Autolock autoLock(mFlushLock);
96        if (audio) {
97            if (mFlushingAudio) {
98                return;
99            }
100            mFlushingAudio = true;
101        } else {
102            if (mFlushingVideo) {
103                return;
104            }
105            mFlushingVideo = true;
106        }
107    }
108
109    sp<AMessage> msg = new AMessage(kWhatFlush, id());
110    msg->setInt32("audio", static_cast<int32_t>(audio));
111    msg->post();
112}
113
114void NuPlayer::Renderer::signalTimeDiscontinuity() {
115    Mutex::Autolock autoLock(mLock);
116    // CHECK(mAudioQueue.empty());
117    // CHECK(mVideoQueue.empty());
118    mAnchorTimeMediaUs = -1;
119    mAnchorTimeRealUs = -1;
120    mSyncQueues = false;
121}
122
123void NuPlayer::Renderer::signalAudioSinkChanged() {
124    (new AMessage(kWhatAudioSinkChanged, id()))->post();
125}
126
127void NuPlayer::Renderer::signalDisableOffloadAudio() {
128    (new AMessage(kWhatDisableOffloadAudio, id()))->post();
129}
130
131void NuPlayer::Renderer::pause() {
132    (new AMessage(kWhatPause, id()))->post();
133}
134
135void NuPlayer::Renderer::resume() {
136    (new AMessage(kWhatResume, id()))->post();
137}
138
139void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
140    switch (msg->what()) {
141        case kWhatStopAudioSink:
142        {
143            mAudioSink->stop();
144            break;
145        }
146
147        case kWhatDrainAudioQueue:
148        {
149            int32_t generation;
150            CHECK(msg->findInt32("generation", &generation));
151            if (generation != mAudioQueueGeneration) {
152                break;
153            }
154
155            mDrainAudioQueuePending = false;
156
157            if (onDrainAudioQueue()) {
158                uint32_t numFramesPlayed;
159                CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
160                         (status_t)OK);
161
162                uint32_t numFramesPendingPlayout =
163                    mNumFramesWritten - numFramesPlayed;
164
165                // This is how long the audio sink will have data to
166                // play back.
167                int64_t delayUs =
168                    mAudioSink->msecsPerFrame()
169                        * numFramesPendingPlayout * 1000ll;
170
171                // Let's give it more data after about half that time
172                // has elapsed.
173                // kWhatDrainAudioQueue is used for non-offloading mode,
174                // and mLock is used only for offloading mode. Therefore,
175                // no need to acquire mLock here.
176                postDrainAudioQueue_l(delayUs / 2);
177            }
178            break;
179        }
180
181        case kWhatDrainVideoQueue:
182        {
183            int32_t generation;
184            CHECK(msg->findInt32("generation", &generation));
185            if (generation != mVideoQueueGeneration) {
186                break;
187            }
188
189            mDrainVideoQueuePending = false;
190
191            onDrainVideoQueue();
192
193            postDrainVideoQueue();
194            break;
195        }
196
197        case kWhatQueueBuffer:
198        {
199            onQueueBuffer(msg);
200            break;
201        }
202
203        case kWhatQueueEOS:
204        {
205            onQueueEOS(msg);
206            break;
207        }
208
209        case kWhatFlush:
210        {
211            onFlush(msg);
212            break;
213        }
214
215        case kWhatAudioSinkChanged:
216        {
217            onAudioSinkChanged();
218            break;
219        }
220
221        case kWhatDisableOffloadAudio:
222        {
223            onDisableOffloadAudio();
224            break;
225        }
226
227        case kWhatPause:
228        {
229            onPause();
230            break;
231        }
232
233        case kWhatResume:
234        {
235            onResume();
236            break;
237        }
238
239        case kWhatAudioOffloadTearDown:
240        {
241            onAudioOffloadTearDown();
242            break;
243        }
244
245        default:
246            TRESPASS();
247            break;
248    }
249}
250
251void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
252    if (mDrainAudioQueuePending || mSyncQueues || mPaused
253            || offloadingAudio()) {
254        return;
255    }
256
257    if (mAudioQueue.empty()) {
258        return;
259    }
260
261    mDrainAudioQueuePending = true;
262    sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
263    msg->setInt32("generation", mAudioQueueGeneration);
264    msg->post(delayUs);
265}
266
267void NuPlayer::Renderer::prepareForMediaRenderingStart() {
268    mAudioRenderingStartGeneration = mAudioQueueGeneration;
269    mVideoRenderingStartGeneration = mVideoQueueGeneration;
270}
271
272void NuPlayer::Renderer::notifyIfMediaRenderingStarted() {
273    if (mVideoRenderingStartGeneration == mVideoQueueGeneration &&
274        mAudioRenderingStartGeneration == mAudioQueueGeneration) {
275        mVideoRenderingStartGeneration = -1;
276        mAudioRenderingStartGeneration = -1;
277
278        sp<AMessage> notify = mNotify->dup();
279        notify->setInt32("what", kWhatMediaRenderingStart);
280        notify->post();
281    }
282}
283
284// static
285size_t NuPlayer::Renderer::AudioSinkCallback(
286        MediaPlayerBase::AudioSink * /* audioSink */,
287        void *buffer,
288        size_t size,
289        void *cookie,
290        MediaPlayerBase::AudioSink::cb_event_t event) {
291    NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
292
293    switch (event) {
294        case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
295        {
296            return me->fillAudioBuffer(buffer, size);
297            break;
298        }
299
300        case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
301        {
302            me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM);
303            break;
304        }
305
306        case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
307        {
308            me->notifyAudioOffloadTearDown();
309            break;
310        }
311    }
312
313    return 0;
314}
315
316size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
317    Mutex::Autolock autoLock(mLock);
318
319    if (!offloadingAudio()) {
320        return 0;
321    }
322
323    bool hasEOS = false;
324
325    size_t sizeCopied = 0;
326    bool firstEntry = true;
327    while (sizeCopied < size && !mAudioQueue.empty()) {
328        QueueEntry *entry = &*mAudioQueue.begin();
329
330        if (entry->mBuffer == NULL) { // EOS
331            hasEOS = true;
332            mAudioQueue.erase(mAudioQueue.begin());
333            entry = NULL;
334            break;
335        }
336
337        if (firstEntry && entry->mOffset == 0) {
338            firstEntry = false;
339            int64_t mediaTimeUs;
340            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
341            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
342            if (mFirstAudioTimeUs == -1) {
343                mFirstAudioTimeUs = mediaTimeUs;
344            }
345
346            uint32_t numFramesPlayed;
347            CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
348
349            // TODO: figure out how to calculate initial latency.
350            // Otherwise, the initial time is not correct till the first sample
351            // is played.
352            mAnchorTimeMediaUs = mFirstAudioTimeUs
353                    + (numFramesPlayed * mAudioSink->msecsPerFrame()) * 1000ll;
354            mAnchorTimeRealUs = ALooper::GetNowUs();
355        }
356
357        size_t copy = entry->mBuffer->size() - entry->mOffset;
358        size_t sizeRemaining = size - sizeCopied;
359        if (copy > sizeRemaining) {
360            copy = sizeRemaining;
361        }
362
363        memcpy((char *)buffer + sizeCopied,
364               entry->mBuffer->data() + entry->mOffset,
365               copy);
366
367        entry->mOffset += copy;
368        if (entry->mOffset == entry->mBuffer->size()) {
369            entry->mNotifyConsumed->post();
370            mAudioQueue.erase(mAudioQueue.begin());
371            entry = NULL;
372        }
373        sizeCopied += copy;
374        notifyIfMediaRenderingStarted();
375    }
376
377    if (sizeCopied != 0) {
378        notifyPosition();
379    }
380
381    if (hasEOS) {
382        (new AMessage(kWhatStopAudioSink, id()))->post();
383    }
384
385    return sizeCopied;
386}
387
388bool NuPlayer::Renderer::onDrainAudioQueue() {
389    uint32_t numFramesPlayed;
390    if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
391        return false;
392    }
393
394    ssize_t numFramesAvailableToWrite =
395        mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
396
397#if 0
398    if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
399        ALOGI("audio sink underrun");
400    } else {
401        ALOGV("audio queue has %d frames left to play",
402             mAudioSink->frameCount() - numFramesAvailableToWrite);
403    }
404#endif
405
406    size_t numBytesAvailableToWrite =
407        numFramesAvailableToWrite * mAudioSink->frameSize();
408
409    while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) {
410        QueueEntry *entry = &*mAudioQueue.begin();
411
412        if (entry->mBuffer == NULL) {
413            // EOS
414            int64_t postEOSDelayUs = 0;
415            if (mAudioSink->needsTrailingPadding()) {
416                postEOSDelayUs = getAudioPendingPlayoutUs() + 1000 * mAudioSink->latency();
417            }
418            notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
419
420            mAudioQueue.erase(mAudioQueue.begin());
421            entry = NULL;
422            return false;
423        }
424
425        if (entry->mOffset == 0) {
426            int64_t mediaTimeUs;
427            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
428            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
429            mAnchorTimeMediaUs = mediaTimeUs;
430
431            mAnchorTimeRealUs = ALooper::GetNowUs()
432                    + getAudioPendingPlayoutUs() + 1000 * mAudioSink->latency() / 2;
433        }
434
435        size_t copy = entry->mBuffer->size() - entry->mOffset;
436        if (copy > numBytesAvailableToWrite) {
437            copy = numBytesAvailableToWrite;
438        }
439
440        ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy);
441        if (written < 0) {
442            // An error in AudioSink write is fatal here.
443            LOG_ALWAYS_FATAL("AudioSink write error(%zd) when writing %zu bytes", written, copy);
444        }
445
446        entry->mOffset += written;
447        if (entry->mOffset == entry->mBuffer->size()) {
448            entry->mNotifyConsumed->post();
449            mAudioQueue.erase(mAudioQueue.begin());
450
451            entry = NULL;
452        }
453
454        numBytesAvailableToWrite -= written;
455        size_t copiedFrames = written / mAudioSink->frameSize();
456        mNumFramesWritten += copiedFrames;
457
458        notifyIfMediaRenderingStarted();
459
460        if (written != (ssize_t)copy) {
461            // A short count was received from AudioSink::write()
462            //
463            // AudioSink write should block until exactly the number of bytes are delivered.
464            // But it may return with a short count (without an error) when:
465            //
466            // 1) Size to be copied is not a multiple of the frame size. We consider this fatal.
467            // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
468
469            // (Case 1)
470            // Must be a multiple of the frame size.  If it is not a multiple of a frame size, it
471            // needs to fail, as we should not carry over fractional frames between calls.
472            CHECK_EQ(copy % mAudioSink->frameSize(), 0);
473
474            // (Case 2)
475            // Return early to the caller.
476            // Beware of calling immediately again as this may busy-loop if you are not careful.
477            ALOGW("AudioSink write short frame count %zd < %zu", written, copy);
478            break;
479        }
480    }
481    notifyPosition();
482
483    return !mAudioQueue.empty();
484}
485
486int64_t NuPlayer::Renderer::getAudioPendingPlayoutUs() {
487    uint32_t numFramesPlayed;
488    CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
489
490    uint32_t numFramesPendingPlayout = mNumFramesWritten - numFramesPlayed;
491    return numFramesPendingPlayout * mAudioSink->msecsPerFrame() * 1000;
492}
493
494void NuPlayer::Renderer::postDrainVideoQueue() {
495    if (mDrainVideoQueuePending
496            || mSyncQueues
497            || (mPaused && mVideoSampleReceived)) {
498        return;
499    }
500
501    if (mVideoQueue.empty()) {
502        return;
503    }
504
505    QueueEntry &entry = *mVideoQueue.begin();
506
507    sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
508    msg->setInt32("generation", mVideoQueueGeneration);
509
510    int64_t delayUs;
511
512    if (entry.mBuffer == NULL) {
513        // EOS doesn't carry a timestamp.
514        delayUs = 0;
515    } else if (mFlags & FLAG_REAL_TIME) {
516        int64_t mediaTimeUs;
517        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
518
519        delayUs = mediaTimeUs - ALooper::GetNowUs();
520    } else {
521        int64_t mediaTimeUs;
522        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
523
524        if (mAnchorTimeMediaUs < 0) {
525            delayUs = 0;
526
527            if (!mHasAudio) {
528                mAnchorTimeMediaUs = mediaTimeUs;
529                mAnchorTimeRealUs = ALooper::GetNowUs();
530            }
531        } else {
532            int64_t realTimeUs =
533                (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;
534
535            delayUs = realTimeUs - ALooper::GetNowUs();
536        }
537    }
538
539    ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs);
540    msg->post(delayUs);
541
542    mDrainVideoQueuePending = true;
543}
544
545void NuPlayer::Renderer::onDrainVideoQueue() {
546    if (mVideoQueue.empty()) {
547        return;
548    }
549
550    QueueEntry *entry = &*mVideoQueue.begin();
551
552    if (entry->mBuffer == NULL) {
553        // EOS
554
555        notifyEOS(false /* audio */, entry->mFinalResult);
556
557        mVideoQueue.erase(mVideoQueue.begin());
558        entry = NULL;
559
560        mVideoLateByUs = 0ll;
561
562        notifyPosition();
563        return;
564    }
565
566    int64_t realTimeUs;
567    if (mFlags & FLAG_REAL_TIME) {
568        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
569    } else {
570        int64_t mediaTimeUs;
571        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
572
573        realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs;
574    }
575
576    bool tooLate = false;
577
578    if (!mPaused) {
579        mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;
580        tooLate = (mVideoLateByUs > 40000);
581
582        if (tooLate) {
583            ALOGV("video late by %lld us (%.2f secs)",
584                 mVideoLateByUs, mVideoLateByUs / 1E6);
585        } else {
586            ALOGV("rendering video at media time %.2f secs",
587                    (mFlags & FLAG_REAL_TIME ? realTimeUs :
588                    (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
589        }
590    } else {
591        mVideoLateByUs = 0ll;
592    }
593
594    entry->mNotifyConsumed->setInt32("render", !tooLate);
595    entry->mNotifyConsumed->post();
596    mVideoQueue.erase(mVideoQueue.begin());
597    entry = NULL;
598
599    mVideoSampleReceived = true;
600
601    if (!mPaused) {
602        if (!mVideoRenderingStarted) {
603            mVideoRenderingStarted = true;
604            notifyVideoRenderingStart();
605        }
606        notifyIfMediaRenderingStarted();
607    }
608
609    notifyPosition();
610}
611
612void NuPlayer::Renderer::notifyVideoRenderingStart() {
613    sp<AMessage> notify = mNotify->dup();
614    notify->setInt32("what", kWhatVideoRenderingStart);
615    notify->post();
616}
617
618void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) {
619    sp<AMessage> notify = mNotify->dup();
620    notify->setInt32("what", kWhatEOS);
621    notify->setInt32("audio", static_cast<int32_t>(audio));
622    notify->setInt32("finalResult", finalResult);
623    notify->post(delayUs);
624}
625
626void NuPlayer::Renderer::notifyAudioOffloadTearDown() {
627    (new AMessage(kWhatAudioOffloadTearDown, id()))->post();
628}
629
630void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
631    int32_t audio;
632    CHECK(msg->findInt32("audio", &audio));
633
634    if (audio) {
635        mHasAudio = true;
636    } else {
637        mHasVideo = true;
638    }
639
640    if (dropBufferWhileFlushing(audio, msg)) {
641        return;
642    }
643
644    sp<ABuffer> buffer;
645    CHECK(msg->findBuffer("buffer", &buffer));
646
647    sp<AMessage> notifyConsumed;
648    CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
649
650    QueueEntry entry;
651    entry.mBuffer = buffer;
652    entry.mNotifyConsumed = notifyConsumed;
653    entry.mOffset = 0;
654    entry.mFinalResult = OK;
655
656    if (audio) {
657        Mutex::Autolock autoLock(mLock);
658        mAudioQueue.push_back(entry);
659        postDrainAudioQueue_l();
660    } else {
661        mVideoQueue.push_back(entry);
662        postDrainVideoQueue();
663    }
664
665    Mutex::Autolock autoLock(mLock);
666    if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
667        return;
668    }
669
670    sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
671    sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
672
673    if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
674        // EOS signalled on either queue.
675        syncQueuesDone_l();
676        return;
677    }
678
679    int64_t firstAudioTimeUs;
680    int64_t firstVideoTimeUs;
681    CHECK(firstAudioBuffer->meta()
682            ->findInt64("timeUs", &firstAudioTimeUs));
683    CHECK(firstVideoBuffer->meta()
684            ->findInt64("timeUs", &firstVideoTimeUs));
685
686    int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
687
688    ALOGV("queueDiff = %.2f secs", diff / 1E6);
689
690    if (diff > 100000ll) {
691        // Audio data starts More than 0.1 secs before video.
692        // Drop some audio.
693
694        (*mAudioQueue.begin()).mNotifyConsumed->post();
695        mAudioQueue.erase(mAudioQueue.begin());
696        return;
697    }
698
699    syncQueuesDone_l();
700}
701
702void NuPlayer::Renderer::syncQueuesDone_l() {
703    if (!mSyncQueues) {
704        return;
705    }
706
707    mSyncQueues = false;
708
709    if (!mAudioQueue.empty()) {
710        postDrainAudioQueue_l();
711    }
712
713    if (!mVideoQueue.empty()) {
714        postDrainVideoQueue();
715    }
716}
717
718void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
719    int32_t audio;
720    CHECK(msg->findInt32("audio", &audio));
721
722    if (dropBufferWhileFlushing(audio, msg)) {
723        return;
724    }
725
726    int32_t finalResult;
727    CHECK(msg->findInt32("finalResult", &finalResult));
728
729    QueueEntry entry;
730    entry.mOffset = 0;
731    entry.mFinalResult = finalResult;
732
733    if (audio) {
734        Mutex::Autolock autoLock(mLock);
735        if (mAudioQueue.empty() && mSyncQueues) {
736            syncQueuesDone_l();
737        }
738        mAudioQueue.push_back(entry);
739        postDrainAudioQueue_l();
740    } else {
741        if (mVideoQueue.empty() && mSyncQueues) {
742            Mutex::Autolock autoLock(mLock);
743            syncQueuesDone_l();
744        }
745        mVideoQueue.push_back(entry);
746        postDrainVideoQueue();
747    }
748}
749
750void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
751    int32_t audio;
752    CHECK(msg->findInt32("audio", &audio));
753
754    {
755        Mutex::Autolock autoLock(mFlushLock);
756        if (audio) {
757            mFlushingAudio = false;
758        } else {
759            mFlushingVideo = false;
760        }
761    }
762
763    // If we're currently syncing the queues, i.e. dropping audio while
764    // aligning the first audio/video buffer times and only one of the
765    // two queues has data, we may starve that queue by not requesting
766    // more buffers from the decoder. If the other source then encounters
767    // a discontinuity that leads to flushing, we'll never find the
768    // corresponding discontinuity on the other queue.
769    // Therefore we'll stop syncing the queues if at least one of them
770    // is flushed.
771    {
772         Mutex::Autolock autoLock(mLock);
773         syncQueuesDone_l();
774    }
775
776    ALOGV("flushing %s", audio ? "audio" : "video");
777    if (audio) {
778        {
779            Mutex::Autolock autoLock(mLock);
780            flushQueue(&mAudioQueue);
781
782            ++mAudioQueueGeneration;
783            prepareForMediaRenderingStart();
784
785            if (offloadingAudio()) {
786                mFirstAudioTimeUs = -1;
787            }
788        }
789
790        mDrainAudioQueuePending = false;
791
792        if (offloadingAudio()) {
793            mAudioSink->pause();
794            mAudioSink->flush();
795            mAudioSink->start();
796        }
797    } else {
798        flushQueue(&mVideoQueue);
799
800        mDrainVideoQueuePending = false;
801        ++mVideoQueueGeneration;
802
803        prepareForMediaRenderingStart();
804    }
805
806    mVideoSampleReceived = false;
807    notifyFlushComplete(audio);
808}
809
810void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
811    while (!queue->empty()) {
812        QueueEntry *entry = &*queue->begin();
813
814        if (entry->mBuffer != NULL) {
815            entry->mNotifyConsumed->post();
816        }
817
818        queue->erase(queue->begin());
819        entry = NULL;
820    }
821}
822
823void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
824    sp<AMessage> notify = mNotify->dup();
825    notify->setInt32("what", kWhatFlushComplete);
826    notify->setInt32("audio", static_cast<int32_t>(audio));
827    notify->post();
828}
829
830bool NuPlayer::Renderer::dropBufferWhileFlushing(
831        bool audio, const sp<AMessage> &msg) {
832    bool flushing = false;
833
834    {
835        Mutex::Autolock autoLock(mFlushLock);
836        if (audio) {
837            flushing = mFlushingAudio;
838        } else {
839            flushing = mFlushingVideo;
840        }
841    }
842
843    if (!flushing) {
844        return false;
845    }
846
847    sp<AMessage> notifyConsumed;
848    if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
849        notifyConsumed->post();
850    }
851
852    return true;
853}
854
855void NuPlayer::Renderer::onAudioSinkChanged() {
856    if (offloadingAudio()) {
857        return;
858    }
859    CHECK(!mDrainAudioQueuePending);
860    mNumFramesWritten = 0;
861    uint32_t written;
862    if (mAudioSink->getFramesWritten(&written) == OK) {
863        mNumFramesWritten = written;
864    }
865}
866
867void NuPlayer::Renderer::onDisableOffloadAudio() {
868    Mutex::Autolock autoLock(mLock);
869    mFlags &= ~FLAG_OFFLOAD_AUDIO;
870    ++mAudioQueueGeneration;
871}
872
873void NuPlayer::Renderer::notifyPosition() {
874    if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) {
875        return;
876    }
877
878    int64_t nowUs = ALooper::GetNowUs();
879
880    if (mLastPositionUpdateUs >= 0
881            && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) {
882        return;
883    }
884    mLastPositionUpdateUs = nowUs;
885
886    int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
887
888    sp<AMessage> notify = mNotify->dup();
889    notify->setInt32("what", kWhatPosition);
890    notify->setInt64("positionUs", positionUs);
891    notify->setInt64("videoLateByUs", mVideoLateByUs);
892    notify->post();
893}
894
895void NuPlayer::Renderer::onPause() {
896    CHECK(!mPaused);
897
898    {
899        Mutex::Autolock autoLock(mLock);
900        ++mAudioQueueGeneration;
901        ++mVideoQueueGeneration;
902        prepareForMediaRenderingStart();
903    }
904
905    mDrainAudioQueuePending = false;
906    mDrainVideoQueuePending = false;
907
908    if (mHasAudio) {
909        mAudioSink->pause();
910    }
911
912    ALOGV("now paused audio queue has %d entries, video has %d entries",
913          mAudioQueue.size(), mVideoQueue.size());
914
915    mPaused = true;
916}
917
918void NuPlayer::Renderer::onResume() {
919    if (!mPaused) {
920        return;
921    }
922
923    if (mHasAudio) {
924        mAudioSink->start();
925    }
926
927    mPaused = false;
928
929    Mutex::Autolock autoLock(mLock);
930    if (!mAudioQueue.empty()) {
931        postDrainAudioQueue_l();
932    }
933
934    if (!mVideoQueue.empty()) {
935        postDrainVideoQueue();
936    }
937}
938
939void NuPlayer::Renderer::onAudioOffloadTearDown() {
940    uint32_t numFramesPlayed;
941    CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
942
943    int64_t firstAudioTimeUs;
944    {
945        Mutex::Autolock autoLock(mLock);
946        firstAudioTimeUs = mFirstAudioTimeUs;
947    }
948    int64_t currentPositionUs = firstAudioTimeUs
949            + (numFramesPlayed * mAudioSink->msecsPerFrame()) * 1000ll;
950
951    mAudioSink->stop();
952    mAudioSink->flush();
953
954    sp<AMessage> notify = mNotify->dup();
955    notify->setInt32("what", kWhatAudioOffloadTearDown);
956    notify->setInt64("positionUs", currentPositionUs);
957    notify->post();
958}
959
960}  // namespace android
961
962