NuPlayerRenderer.cpp revision 49966fff32b27f8821ebe280f25688b3c4f5f73f
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "NuPlayerRenderer"
19#include <utils/Log.h>
20
21#include "NuPlayerRenderer.h"
22
23#include <cutils/properties.h>
24
25#include <media/stagefright/foundation/ABuffer.h>
26#include <media/stagefright/foundation/ADebug.h>
27#include <media/stagefright/foundation/AMessage.h>
28#include <media/stagefright/MediaErrors.h>
29#include <media/stagefright/MetaData.h>
30
31#include <VideoFrameScheduler.h>
32
33#include <inttypes.h>
34
35namespace android {
36
37// Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink
38// is closed to allow the audio DSP to power down.
39static const int64_t kOffloadPauseMaxUs = 60000000ll;
40
41// static
42const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
43
44static bool sFrameAccurateAVsync = false;
45
46static void readProperties() {
47    char value[PROPERTY_VALUE_MAX];
48    if (property_get("persist.sys.media.avsync", value, NULL)) {
49        sFrameAccurateAVsync =
50            !strcmp("1", value) || !strcasecmp("true", value);
51    }
52}
53
54NuPlayer::Renderer::Renderer(
55        const sp<MediaPlayerBase::AudioSink> &sink,
56        const sp<AMessage> &notify,
57        uint32_t flags)
58    : mAudioSink(sink),
59      mNotify(notify),
60      mFlags(flags),
61      mNumFramesWritten(0),
62      mDrainAudioQueuePending(false),
63      mDrainVideoQueuePending(false),
64      mAudioQueueGeneration(0),
65      mVideoQueueGeneration(0),
66      mFirstAnchorTimeMediaUs(-1),
67      mAnchorTimeMediaUs(-1),
68      mAnchorTimeRealUs(-1),
69      mFlushingAudio(false),
70      mFlushingVideo(false),
71      mHasAudio(false),
72      mHasVideo(false),
73      mSyncQueues(false),
74      mPaused(false),
75      mPauseStartedTimeRealUs(-1),
76      mVideoSampleReceived(false),
77      mVideoRenderingStarted(false),
78      mVideoRenderingStartGeneration(0),
79      mAudioRenderingStartGeneration(0),
80      mLastPositionUpdateUs(-1ll),
81      mVideoLateByUs(0ll),
82      mAudioOffloadPauseTimeoutGeneration(0),
83      mAudioOffloadTornDown(false) {
84    readProperties();
85}
86
87NuPlayer::Renderer::~Renderer() {
88    if (offloadingAudio()) {
89        mAudioSink->stop();
90        mAudioSink->flush();
91        mAudioSink->close();
92    }
93}
94
95void NuPlayer::Renderer::queueBuffer(
96        bool audio,
97        const sp<ABuffer> &buffer,
98        const sp<AMessage> &notifyConsumed) {
99    sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
100    msg->setInt32("audio", static_cast<int32_t>(audio));
101    msg->setBuffer("buffer", buffer);
102    msg->setMessage("notifyConsumed", notifyConsumed);
103    msg->post();
104}
105
106void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
107    CHECK_NE(finalResult, (status_t)OK);
108
109    sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
110    msg->setInt32("audio", static_cast<int32_t>(audio));
111    msg->setInt32("finalResult", finalResult);
112    msg->post();
113}
114
115void NuPlayer::Renderer::flush(bool audio) {
116    {
117        Mutex::Autolock autoLock(mFlushLock);
118        if (audio) {
119            if (mFlushingAudio) {
120                return;
121            }
122            mFlushingAudio = true;
123        } else {
124            if (mFlushingVideo) {
125                return;
126            }
127            mFlushingVideo = true;
128        }
129    }
130
131    sp<AMessage> msg = new AMessage(kWhatFlush, id());
132    msg->setInt32("audio", static_cast<int32_t>(audio));
133    msg->post();
134}
135
136void NuPlayer::Renderer::signalTimeDiscontinuity() {
137    Mutex::Autolock autoLock(mLock);
138    // CHECK(mAudioQueue.empty());
139    // CHECK(mVideoQueue.empty());
140    mFirstAnchorTimeMediaUs = -1;
141    mAnchorTimeMediaUs = -1;
142    mAnchorTimeRealUs = -1;
143    mSyncQueues = false;
144}
145
146void NuPlayer::Renderer::signalAudioSinkChanged() {
147    (new AMessage(kWhatAudioSinkChanged, id()))->post();
148}
149
150void NuPlayer::Renderer::signalDisableOffloadAudio() {
151    (new AMessage(kWhatDisableOffloadAudio, id()))->post();
152}
153
154void NuPlayer::Renderer::pause() {
155    (new AMessage(kWhatPause, id()))->post();
156}
157
158void NuPlayer::Renderer::resume() {
159    (new AMessage(kWhatResume, id()))->post();
160}
161
162void NuPlayer::Renderer::setVideoFrameRate(float fps) {
163    sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, id());
164    msg->setFloat("frame-rate", fps);
165    msg->post();
166}
167
168void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
169    switch (msg->what()) {
170        case kWhatStopAudioSink:
171        {
172            mAudioSink->stop();
173            break;
174        }
175
176        case kWhatDrainAudioQueue:
177        {
178            int32_t generation;
179            CHECK(msg->findInt32("generation", &generation));
180            if (generation != mAudioQueueGeneration) {
181                break;
182            }
183
184            mDrainAudioQueuePending = false;
185
186            if (onDrainAudioQueue()) {
187                uint32_t numFramesPlayed;
188                CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
189                         (status_t)OK);
190
191                uint32_t numFramesPendingPlayout =
192                    mNumFramesWritten - numFramesPlayed;
193
194                // This is how long the audio sink will have data to
195                // play back.
196                int64_t delayUs =
197                    mAudioSink->msecsPerFrame()
198                        * numFramesPendingPlayout * 1000ll;
199
200                // Let's give it more data after about half that time
201                // has elapsed.
202                // kWhatDrainAudioQueue is used for non-offloading mode,
203                // and mLock is used only for offloading mode. Therefore,
204                // no need to acquire mLock here.
205                postDrainAudioQueue_l(delayUs / 2);
206            }
207            break;
208        }
209
210        case kWhatDrainVideoQueue:
211        {
212            int32_t generation;
213            CHECK(msg->findInt32("generation", &generation));
214            if (generation != mVideoQueueGeneration) {
215                break;
216            }
217
218            mDrainVideoQueuePending = false;
219
220            onDrainVideoQueue();
221
222            postDrainVideoQueue();
223            break;
224        }
225
226        case kWhatQueueBuffer:
227        {
228            onQueueBuffer(msg);
229            break;
230        }
231
232        case kWhatQueueEOS:
233        {
234            onQueueEOS(msg);
235            break;
236        }
237
238        case kWhatFlush:
239        {
240            onFlush(msg);
241            break;
242        }
243
244        case kWhatAudioSinkChanged:
245        {
246            onAudioSinkChanged();
247            break;
248        }
249
250        case kWhatDisableOffloadAudio:
251        {
252            onDisableOffloadAudio();
253            break;
254        }
255
256        case kWhatPause:
257        {
258            onPause();
259            break;
260        }
261
262        case kWhatResume:
263        {
264            onResume();
265            break;
266        }
267
268        case kWhatSetVideoFrameRate:
269        {
270            float fps;
271            CHECK(msg->findFloat("frame-rate", &fps));
272            onSetVideoFrameRate(fps);
273            break;
274        }
275
276        case kWhatAudioOffloadTearDown:
277        {
278            onAudioOffloadTearDown(kDueToError);
279            break;
280        }
281
282        case kWhatAudioOffloadPauseTimeout:
283        {
284            int32_t generation;
285            CHECK(msg->findInt32("generation", &generation));
286            if (generation != mAudioOffloadPauseTimeoutGeneration) {
287                break;
288            }
289            ALOGV("Audio Offload tear down due to pause timeout.");
290            onAudioOffloadTearDown(kDueToTimeout);
291            break;
292        }
293
294        default:
295            TRESPASS();
296            break;
297    }
298}
299
300void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
301    if (mDrainAudioQueuePending || mSyncQueues || mPaused
302            || offloadingAudio()) {
303        return;
304    }
305
306    if (mAudioQueue.empty()) {
307        return;
308    }
309
310    mDrainAudioQueuePending = true;
311    sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
312    msg->setInt32("generation", mAudioQueueGeneration);
313    msg->post(delayUs);
314}
315
316void NuPlayer::Renderer::prepareForMediaRenderingStart() {
317    mAudioRenderingStartGeneration = mAudioQueueGeneration;
318    mVideoRenderingStartGeneration = mVideoQueueGeneration;
319}
320
321void NuPlayer::Renderer::notifyIfMediaRenderingStarted() {
322    if (mVideoRenderingStartGeneration == mVideoQueueGeneration &&
323        mAudioRenderingStartGeneration == mAudioQueueGeneration) {
324        mVideoRenderingStartGeneration = -1;
325        mAudioRenderingStartGeneration = -1;
326
327        sp<AMessage> notify = mNotify->dup();
328        notify->setInt32("what", kWhatMediaRenderingStart);
329        notify->post();
330    }
331}
332
333// static
334size_t NuPlayer::Renderer::AudioSinkCallback(
335        MediaPlayerBase::AudioSink * /* audioSink */,
336        void *buffer,
337        size_t size,
338        void *cookie,
339        MediaPlayerBase::AudioSink::cb_event_t event) {
340    NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
341
342    switch (event) {
343        case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
344        {
345            return me->fillAudioBuffer(buffer, size);
346            break;
347        }
348
349        case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
350        {
351            me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM);
352            break;
353        }
354
355        case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
356        {
357            me->notifyAudioOffloadTearDown();
358            break;
359        }
360    }
361
362    return 0;
363}
364
365size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
366    Mutex::Autolock autoLock(mLock);
367
368    if (!offloadingAudio() || mPaused) {
369        return 0;
370    }
371
372    bool hasEOS = false;
373
374    size_t sizeCopied = 0;
375    bool firstEntry = true;
376    while (sizeCopied < size && !mAudioQueue.empty()) {
377        QueueEntry *entry = &*mAudioQueue.begin();
378
379        if (entry->mBuffer == NULL) { // EOS
380            hasEOS = true;
381            mAudioQueue.erase(mAudioQueue.begin());
382            entry = NULL;
383            break;
384        }
385
386        if (firstEntry && entry->mOffset == 0) {
387            firstEntry = false;
388            int64_t mediaTimeUs;
389            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
390            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
391            if (mFirstAnchorTimeMediaUs == -1) {
392                mFirstAnchorTimeMediaUs = mediaTimeUs;
393            }
394
395            int64_t nowUs = ALooper::GetNowUs();
396            mAnchorTimeMediaUs =
397                mFirstAnchorTimeMediaUs + getPlayedOutAudioDurationUs(nowUs);
398            mAnchorTimeRealUs = nowUs;
399
400            notifyPosition();
401        }
402
403        size_t copy = entry->mBuffer->size() - entry->mOffset;
404        size_t sizeRemaining = size - sizeCopied;
405        if (copy > sizeRemaining) {
406            copy = sizeRemaining;
407        }
408
409        memcpy((char *)buffer + sizeCopied,
410               entry->mBuffer->data() + entry->mOffset,
411               copy);
412
413        entry->mOffset += copy;
414        if (entry->mOffset == entry->mBuffer->size()) {
415            entry->mNotifyConsumed->post();
416            mAudioQueue.erase(mAudioQueue.begin());
417            entry = NULL;
418        }
419        sizeCopied += copy;
420        notifyIfMediaRenderingStarted();
421    }
422
423    if (hasEOS) {
424        (new AMessage(kWhatStopAudioSink, id()))->post();
425    }
426
427    return sizeCopied;
428}
429
430bool NuPlayer::Renderer::onDrainAudioQueue() {
431    uint32_t numFramesPlayed;
432    if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
433        return false;
434    }
435
436    ssize_t numFramesAvailableToWrite =
437        mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
438
439#if 0
440    if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
441        ALOGI("audio sink underrun");
442    } else {
443        ALOGV("audio queue has %d frames left to play",
444             mAudioSink->frameCount() - numFramesAvailableToWrite);
445    }
446#endif
447
448    size_t numBytesAvailableToWrite =
449        numFramesAvailableToWrite * mAudioSink->frameSize();
450
451    while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) {
452        QueueEntry *entry = &*mAudioQueue.begin();
453
454        if (entry->mBuffer == NULL) {
455            // EOS
456            int64_t postEOSDelayUs = 0;
457            if (mAudioSink->needsTrailingPadding()) {
458                postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
459            }
460            notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
461
462            mAudioQueue.erase(mAudioQueue.begin());
463            entry = NULL;
464            return false;
465        }
466
467        if (entry->mOffset == 0) {
468            int64_t mediaTimeUs;
469            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
470            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
471            if (mFirstAnchorTimeMediaUs == -1) {
472                mFirstAnchorTimeMediaUs = mediaTimeUs;
473            }
474            mAnchorTimeMediaUs = mediaTimeUs;
475
476            int64_t nowUs = ALooper::GetNowUs();
477            mAnchorTimeRealUs = nowUs + getPendingAudioPlayoutDurationUs(nowUs);
478
479            notifyPosition();
480        }
481
482        size_t copy = entry->mBuffer->size() - entry->mOffset;
483        if (copy > numBytesAvailableToWrite) {
484            copy = numBytesAvailableToWrite;
485        }
486
487        ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy);
488        if (written < 0) {
489            // An error in AudioSink write is fatal here.
490            LOG_ALWAYS_FATAL("AudioSink write error(%zd) when writing %zu bytes", written, copy);
491        }
492
493        entry->mOffset += written;
494        if (entry->mOffset == entry->mBuffer->size()) {
495            entry->mNotifyConsumed->post();
496            mAudioQueue.erase(mAudioQueue.begin());
497
498            entry = NULL;
499        }
500
501        numBytesAvailableToWrite -= written;
502        size_t copiedFrames = written / mAudioSink->frameSize();
503        mNumFramesWritten += copiedFrames;
504
505        notifyIfMediaRenderingStarted();
506
507        if (written != (ssize_t)copy) {
508            // A short count was received from AudioSink::write()
509            //
510            // AudioSink write should block until exactly the number of bytes are delivered.
511            // But it may return with a short count (without an error) when:
512            //
513            // 1) Size to be copied is not a multiple of the frame size. We consider this fatal.
514            // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
515
516            // (Case 1)
517            // Must be a multiple of the frame size.  If it is not a multiple of a frame size, it
518            // needs to fail, as we should not carry over fractional frames between calls.
519            CHECK_EQ(copy % mAudioSink->frameSize(), 0);
520
521            // (Case 2)
522            // Return early to the caller.
523            // Beware of calling immediately again as this may busy-loop if you are not careful.
524            ALOGW("AudioSink write short frame count %zd < %zu", written, copy);
525            break;
526        }
527    }
528    return !mAudioQueue.empty();
529}
530
531int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
532    int64_t writtenAudioDurationUs =
533        mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame();
534    return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs);
535}
536
537void NuPlayer::Renderer::postDrainVideoQueue() {
538    if (mDrainVideoQueuePending
539            || mSyncQueues
540            || (mPaused && mVideoSampleReceived)) {
541        return;
542    }
543
544    if (mVideoQueue.empty()) {
545        return;
546    }
547
548    QueueEntry &entry = *mVideoQueue.begin();
549
550    sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
551    msg->setInt32("generation", mVideoQueueGeneration);
552
553    if (entry.mBuffer == NULL) {
554        // EOS doesn't carry a timestamp.
555        msg->post();
556        mDrainVideoQueuePending = true;
557        return;
558    }
559
560    int64_t delayUs;
561    int64_t nowUs = ALooper::GetNowUs();
562    int64_t realTimeUs;
563    if (mFlags & FLAG_REAL_TIME) {
564        int64_t mediaTimeUs;
565        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
566        realTimeUs = mediaTimeUs;
567    } else {
568        int64_t mediaTimeUs;
569        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
570
571        if (mFirstAnchorTimeMediaUs == -1 && !mHasAudio) {
572            mFirstAnchorTimeMediaUs = mediaTimeUs;
573        }
574        if (mAnchorTimeMediaUs < 0) {
575            if (!mHasAudio) {
576                mAnchorTimeMediaUs = mediaTimeUs;
577                mAnchorTimeRealUs = nowUs;
578                if (!mPaused || mVideoSampleReceived) {
579                    notifyPosition();
580                }
581            }
582            realTimeUs = nowUs;
583        } else {
584            realTimeUs =
585                (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;
586        }
587    }
588
589    realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
590    int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
591
592    delayUs = realTimeUs - nowUs;
593
594    ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs);
595    // post 2 display refreshes before rendering is due
596    // FIXME currently this increases power consumption, so unless frame-accurate
597    // AV sync is requested, post closer to required render time (at 0.63 vsyncs)
598    if (!sFrameAccurateAVsync) {
599        twoVsyncsUs >>= 4;
600    }
601    msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
602
603    mDrainVideoQueuePending = true;
604}
605
606void NuPlayer::Renderer::onDrainVideoQueue() {
607    if (mVideoQueue.empty()) {
608        return;
609    }
610
611    QueueEntry *entry = &*mVideoQueue.begin();
612
613    if (entry->mBuffer == NULL) {
614        // EOS
615
616        notifyEOS(false /* audio */, entry->mFinalResult);
617
618        mVideoQueue.erase(mVideoQueue.begin());
619        entry = NULL;
620
621        mVideoLateByUs = 0ll;
622        return;
623    }
624
625    int64_t realTimeUs;
626    if (mFlags & FLAG_REAL_TIME) {
627        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
628    } else {
629        int64_t mediaTimeUs;
630        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
631
632        realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs;
633    }
634
635    bool tooLate = false;
636
637    if (!mPaused) {
638        mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;
639        tooLate = (mVideoLateByUs > 40000);
640
641        if (tooLate) {
642            ALOGV("video late by %lld us (%.2f secs)",
643                 mVideoLateByUs, mVideoLateByUs / 1E6);
644        } else {
645            ALOGV("rendering video at media time %.2f secs",
646                    (mFlags & FLAG_REAL_TIME ? realTimeUs :
647                    (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
648        }
649    } else {
650        mVideoLateByUs = 0ll;
651        if (!mHasAudio && !mVideoSampleReceived) {
652            mAnchorTimeMediaUs = -1;
653            mAnchorTimeRealUs = -1;
654        }
655    }
656
657    entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll);
658    entry->mNotifyConsumed->setInt32("render", !tooLate);
659    entry->mNotifyConsumed->post();
660    mVideoQueue.erase(mVideoQueue.begin());
661    entry = NULL;
662
663    mVideoSampleReceived = true;
664
665    if (!mPaused) {
666        if (!mVideoRenderingStarted) {
667            mVideoRenderingStarted = true;
668            notifyVideoRenderingStart();
669        }
670        notifyIfMediaRenderingStarted();
671    }
672}
673
674void NuPlayer::Renderer::notifyVideoRenderingStart() {
675    sp<AMessage> notify = mNotify->dup();
676    notify->setInt32("what", kWhatVideoRenderingStart);
677    notify->post();
678}
679
680void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) {
681    sp<AMessage> notify = mNotify->dup();
682    notify->setInt32("what", kWhatEOS);
683    notify->setInt32("audio", static_cast<int32_t>(audio));
684    notify->setInt32("finalResult", finalResult);
685    notify->post(delayUs);
686}
687
688void NuPlayer::Renderer::notifyAudioOffloadTearDown() {
689    (new AMessage(kWhatAudioOffloadTearDown, id()))->post();
690}
691
692void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
693    int32_t audio;
694    CHECK(msg->findInt32("audio", &audio));
695
696    if (audio) {
697        mHasAudio = true;
698    } else {
699        mHasVideo = true;
700        if (mVideoScheduler == NULL) {
701            mVideoScheduler = new VideoFrameScheduler();
702            mVideoScheduler->init();
703        }
704    }
705
706    if (dropBufferWhileFlushing(audio, msg)) {
707        return;
708    }
709
710    sp<ABuffer> buffer;
711    CHECK(msg->findBuffer("buffer", &buffer));
712
713    sp<AMessage> notifyConsumed;
714    CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
715
716    QueueEntry entry;
717    entry.mBuffer = buffer;
718    entry.mNotifyConsumed = notifyConsumed;
719    entry.mOffset = 0;
720    entry.mFinalResult = OK;
721
722    if (audio) {
723        Mutex::Autolock autoLock(mLock);
724        mAudioQueue.push_back(entry);
725        postDrainAudioQueue_l();
726    } else {
727        mVideoQueue.push_back(entry);
728        postDrainVideoQueue();
729    }
730
731    Mutex::Autolock autoLock(mLock);
732    if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
733        return;
734    }
735
736    sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
737    sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
738
739    if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
740        // EOS signalled on either queue.
741        syncQueuesDone_l();
742        return;
743    }
744
745    int64_t firstAudioTimeUs;
746    int64_t firstVideoTimeUs;
747    CHECK(firstAudioBuffer->meta()
748            ->findInt64("timeUs", &firstAudioTimeUs));
749    CHECK(firstVideoBuffer->meta()
750            ->findInt64("timeUs", &firstVideoTimeUs));
751
752    int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
753
754    ALOGV("queueDiff = %.2f secs", diff / 1E6);
755
756    if (diff > 100000ll) {
757        // Audio data starts More than 0.1 secs before video.
758        // Drop some audio.
759
760        (*mAudioQueue.begin()).mNotifyConsumed->post();
761        mAudioQueue.erase(mAudioQueue.begin());
762        return;
763    }
764
765    syncQueuesDone_l();
766}
767
768void NuPlayer::Renderer::syncQueuesDone_l() {
769    if (!mSyncQueues) {
770        return;
771    }
772
773    mSyncQueues = false;
774
775    if (!mAudioQueue.empty()) {
776        postDrainAudioQueue_l();
777    }
778
779    if (!mVideoQueue.empty()) {
780        postDrainVideoQueue();
781    }
782}
783
784void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
785    int32_t audio;
786    CHECK(msg->findInt32("audio", &audio));
787
788    if (dropBufferWhileFlushing(audio, msg)) {
789        return;
790    }
791
792    int32_t finalResult;
793    CHECK(msg->findInt32("finalResult", &finalResult));
794
795    QueueEntry entry;
796    entry.mOffset = 0;
797    entry.mFinalResult = finalResult;
798
799    if (audio) {
800        Mutex::Autolock autoLock(mLock);
801        if (mAudioQueue.empty() && mSyncQueues) {
802            syncQueuesDone_l();
803        }
804        mAudioQueue.push_back(entry);
805        postDrainAudioQueue_l();
806    } else {
807        if (mVideoQueue.empty() && mSyncQueues) {
808            Mutex::Autolock autoLock(mLock);
809            syncQueuesDone_l();
810        }
811        mVideoQueue.push_back(entry);
812        postDrainVideoQueue();
813    }
814}
815
816void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
817    int32_t audio;
818    CHECK(msg->findInt32("audio", &audio));
819
820    {
821        Mutex::Autolock autoLock(mFlushLock);
822        if (audio) {
823            mFlushingAudio = false;
824        } else {
825            mFlushingVideo = false;
826        }
827    }
828
829    // If we're currently syncing the queues, i.e. dropping audio while
830    // aligning the first audio/video buffer times and only one of the
831    // two queues has data, we may starve that queue by not requesting
832    // more buffers from the decoder. If the other source then encounters
833    // a discontinuity that leads to flushing, we'll never find the
834    // corresponding discontinuity on the other queue.
835    // Therefore we'll stop syncing the queues if at least one of them
836    // is flushed.
837    {
838         Mutex::Autolock autoLock(mLock);
839         syncQueuesDone_l();
840         if (!mHasAudio) {
841             mPauseStartedTimeRealUs = -1;
842         }
843    }
844
845    ALOGV("flushing %s", audio ? "audio" : "video");
846    if (audio) {
847        {
848            Mutex::Autolock autoLock(mLock);
849            flushQueue(&mAudioQueue);
850
851            ++mAudioQueueGeneration;
852            prepareForMediaRenderingStart();
853
854            if (offloadingAudio()) {
855                mFirstAnchorTimeMediaUs = -1;
856            }
857        }
858
859        mDrainAudioQueuePending = false;
860
861        if (offloadingAudio()) {
862            mAudioSink->pause();
863            mAudioSink->flush();
864            mAudioSink->start();
865        }
866    } else {
867        flushQueue(&mVideoQueue);
868
869        mDrainVideoQueuePending = false;
870        ++mVideoQueueGeneration;
871
872        if (mVideoScheduler != NULL) {
873            mVideoScheduler->restart();
874        }
875
876        prepareForMediaRenderingStart();
877    }
878
879    mVideoSampleReceived = false;
880    notifyFlushComplete(audio);
881}
882
883void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
884    while (!queue->empty()) {
885        QueueEntry *entry = &*queue->begin();
886
887        if (entry->mBuffer != NULL) {
888            entry->mNotifyConsumed->post();
889        }
890
891        queue->erase(queue->begin());
892        entry = NULL;
893    }
894}
895
896void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
897    sp<AMessage> notify = mNotify->dup();
898    notify->setInt32("what", kWhatFlushComplete);
899    notify->setInt32("audio", static_cast<int32_t>(audio));
900    notify->post();
901}
902
903bool NuPlayer::Renderer::dropBufferWhileFlushing(
904        bool audio, const sp<AMessage> &msg) {
905    bool flushing = false;
906
907    {
908        Mutex::Autolock autoLock(mFlushLock);
909        if (audio) {
910            flushing = mFlushingAudio;
911        } else {
912            flushing = mFlushingVideo;
913        }
914    }
915
916    if (!flushing) {
917        return false;
918    }
919
920    sp<AMessage> notifyConsumed;
921    if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
922        notifyConsumed->post();
923    }
924
925    return true;
926}
927
928void NuPlayer::Renderer::onAudioSinkChanged() {
929    if (offloadingAudio()) {
930        return;
931    }
932    CHECK(!mDrainAudioQueuePending);
933    mNumFramesWritten = 0;
934    uint32_t written;
935    if (mAudioSink->getFramesWritten(&written) == OK) {
936        mNumFramesWritten = written;
937    }
938}
939
940void NuPlayer::Renderer::onDisableOffloadAudio() {
941    Mutex::Autolock autoLock(mLock);
942    mFlags &= ~FLAG_OFFLOAD_AUDIO;
943    ++mAudioQueueGeneration;
944}
945
946void NuPlayer::Renderer::notifyPosition() {
947    // notifyPosition() must be called only after setting mAnchorTimeRealUs
948    // and mAnchorTimeMediaUs, and must not be paused as it extrapolates position.
949    //CHECK_GE(mAnchorTimeRealUs, 0);
950    //CHECK_GE(mAnchorTimeMediaUs, 0);
951    //CHECK(!mPaused || !mHasAudio);  // video-only does display in paused mode.
952
953    int64_t nowUs = ALooper::GetNowUs();
954
955    if (mLastPositionUpdateUs >= 0
956            && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) {
957        return;
958    }
959    mLastPositionUpdateUs = nowUs;
960
961    int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
962
963    //ALOGD("notifyPosition: positionUs(%lld) nowUs(%lld) mAnchorTimeRealUs(%lld)"
964    //        " mAnchorTimeMediaUs(%lld) mFirstAnchorTimeMediaUs(%lld)",
965    //        (long long)positionUs, (long long)nowUs, (long long)mAnchorTimeRealUs,
966    //        (long long)mAnchorTimeMediaUs, (long long)mFirstAnchorTimeMediaUs);
967
968    // Due to adding the latency to mAnchorTimeRealUs in onDrainAudioQueue(),
969    // positionUs may be less than the first media time.  This is avoided
970    // here to prevent potential retrograde motion of the position bar
971    // when starting up after a seek.
972    if (positionUs < mFirstAnchorTimeMediaUs) {
973        positionUs = mFirstAnchorTimeMediaUs;
974    }
975    sp<AMessage> notify = mNotify->dup();
976    notify->setInt32("what", kWhatPosition);
977    notify->setInt64("positionUs", positionUs);
978    notify->setInt64("videoLateByUs", mVideoLateByUs);
979    notify->post();
980}
981
982void NuPlayer::Renderer::onPause() {
983    if (mPaused) {
984        ALOGW("Renderer::onPause() called while already paused!");
985        return;
986    }
987    {
988        Mutex::Autolock autoLock(mLock);
989        ++mAudioQueueGeneration;
990        ++mVideoQueueGeneration;
991        prepareForMediaRenderingStart();
992        mPaused = true;
993        if (!mHasAudio) {
994            mPauseStartedTimeRealUs = ALooper::GetNowUs();
995        }
996    }
997
998    mDrainAudioQueuePending = false;
999    mDrainVideoQueuePending = false;
1000
1001    if (mHasAudio) {
1002        mAudioSink->pause();
1003        startAudioOffloadPauseTimeout();
1004    }
1005
1006    ALOGV("now paused audio queue has %d entries, video has %d entries",
1007          mAudioQueue.size(), mVideoQueue.size());
1008}
1009
1010void NuPlayer::Renderer::onResume() {
1011    readProperties();
1012
1013    if (!mPaused) {
1014        return;
1015    }
1016
1017    if (mHasAudio) {
1018        cancelAudioOffloadPauseTimeout();
1019        mAudioSink->start();
1020    }
1021
1022    Mutex::Autolock autoLock(mLock);
1023    mPaused = false;
1024    if (!mHasAudio && mPauseStartedTimeRealUs != -1) {
1025        mAnchorTimeRealUs += ALooper::GetNowUs() - mPauseStartedTimeRealUs;
1026        mPauseStartedTimeRealUs = -1;
1027    }
1028
1029    if (!mAudioQueue.empty()) {
1030        postDrainAudioQueue_l();
1031    }
1032
1033    if (!mVideoQueue.empty()) {
1034        postDrainVideoQueue();
1035    }
1036}
1037
1038void NuPlayer::Renderer::onSetVideoFrameRate(float fps) {
1039    if (mVideoScheduler == NULL) {
1040        mVideoScheduler = new VideoFrameScheduler();
1041    }
1042    mVideoScheduler->init(fps);
1043}
1044
1045// TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs()
1046// as it acquires locks and may query the audio driver.
1047//
1048// Some calls are not needed since notifyPosition() doesn't always deliver a message.
1049// Some calls could conceivably retrieve extrapolated data instead of
1050// accessing getTimestamp() or getPosition() every time a data buffer with
1051// a media time is received.
1052//
1053int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) {
1054    uint32_t numFramesPlayed;
1055    int64_t numFramesPlayedAt;
1056    AudioTimestamp ts;
1057    static const int64_t kStaleTimestamp100ms = 100000;
1058
1059    status_t res = mAudioSink->getTimestamp(ts);
1060    if (res == OK) {                 // case 1: mixing audio tracks and offloaded tracks.
1061        numFramesPlayed = ts.mPosition;
1062        numFramesPlayedAt =
1063            ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000;
1064        const int64_t timestampAge = nowUs - numFramesPlayedAt;
1065        if (timestampAge > kStaleTimestamp100ms) {
1066            // This is an audio FIXME.
1067            // getTimestamp returns a timestamp which may come from audio mixing threads.
1068            // After pausing, the MixerThread may go idle, thus the mTime estimate may
1069            // become stale. Assuming that the MixerThread runs 20ms, with FastMixer at 5ms,
1070            // the max latency should be about 25ms with an average around 12ms (to be verified).
1071            // For safety we use 100ms.
1072            ALOGV("getTimestamp: returned stale timestamp nowUs(%lld) numFramesPlayedAt(%lld)",
1073                    (long long)nowUs, (long long)numFramesPlayedAt);
1074            numFramesPlayedAt = nowUs - kStaleTimestamp100ms;
1075        }
1076        //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAt);
1077    } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track
1078        numFramesPlayed = 0;
1079        numFramesPlayedAt = nowUs;
1080        //ALOGD("getTimestamp: WOULD_BLOCK %d %lld",
1081        //        numFramesPlayed, (long long)numFramesPlayedAt);
1082    } else {                         // case 3: transitory at new track or audio fast tracks.
1083        res = mAudioSink->getPosition(&numFramesPlayed);
1084        CHECK_EQ(res, (status_t)OK);
1085        numFramesPlayedAt = nowUs;
1086        numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */
1087        //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt);
1088    }
1089
1090    // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
1091    //CHECK_EQ(numFramesPlayed & (1 << 31), 0);  // can't be negative until 12.4 hrs, test
1092    int64_t durationUs = (int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame()
1093            + nowUs - numFramesPlayedAt;
1094    if (durationUs < 0) {
1095        // Occurs when numFramesPlayed position is very small and the following:
1096        // (1) In case 1, the time nowUs is computed before getTimestamp() is called and
1097        //     numFramesPlayedAt is greater than nowUs by time more than numFramesPlayed.
1098        // (2) In case 3, using getPosition and adding mAudioSink->latency() to
1099        //     numFramesPlayedAt, by a time amount greater than numFramesPlayed.
1100        //
1101        // Both of these are transitory conditions.
1102        ALOGV("getPlayedOutAudioDurationUs: negative duration %lld set to zero", (long long)durationUs);
1103        durationUs = 0;
1104    }
1105    ALOGV("getPlayedOutAudioDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)",
1106            (long long)durationUs, (long long)nowUs, numFramesPlayed, (long long)numFramesPlayedAt);
1107    return durationUs;
1108}
1109
1110void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reason) {
1111    if (mAudioOffloadTornDown) {
1112        return;
1113    }
1114    mAudioOffloadTornDown = true;
1115
1116    int64_t firstAudioTimeUs;
1117    {
1118        Mutex::Autolock autoLock(mLock);
1119        firstAudioTimeUs = mFirstAnchorTimeMediaUs;
1120    }
1121
1122    int64_t currentPositionUs =
1123        firstAudioTimeUs + getPlayedOutAudioDurationUs(ALooper::GetNowUs());
1124
1125    mAudioSink->stop();
1126    mAudioSink->flush();
1127
1128    sp<AMessage> notify = mNotify->dup();
1129    notify->setInt32("what", kWhatAudioOffloadTearDown);
1130    notify->setInt64("positionUs", currentPositionUs);
1131    notify->setInt32("reason", reason);
1132    notify->post();
1133}
1134
1135void NuPlayer::Renderer::startAudioOffloadPauseTimeout() {
1136    if (offloadingAudio()) {
1137        sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, id());
1138        msg->setInt32("generation", mAudioOffloadPauseTimeoutGeneration);
1139        msg->post(kOffloadPauseMaxUs);
1140    }
1141}
1142
1143void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() {
1144    if (offloadingAudio()) {
1145        ++mAudioOffloadPauseTimeoutGeneration;
1146    }
1147}
1148
1149}  // namespace android
1150
1151