NuPlayerRenderer.cpp revision f0e83644637bd05852c244df481f21a0d435ff66
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "NuPlayerRenderer"
19#include <utils/Log.h>
20
21#include "NuPlayerRenderer.h"
22
23#include <media/stagefright/foundation/ABuffer.h>
24#include <media/stagefright/foundation/ADebug.h>
25#include <media/stagefright/foundation/AMessage.h>
26#include <media/stagefright/foundation/AUtils.h>
27#include <media/stagefright/MediaErrors.h>
28#include <media/stagefright/MetaData.h>
29#include <media/stagefright/Utils.h>
30
31#include <VideoFrameScheduler.h>
32
33#include <inttypes.h>
34
35namespace android {
36
37// Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink
38// is closed to allow the audio DSP to power down.
39static const int64_t kOffloadPauseMaxUs = 10000000ll;
40
41// static
42const NuPlayer::Renderer::PcmInfo NuPlayer::Renderer::AUDIO_PCMINFO_INITIALIZER = {
43        AUDIO_CHANNEL_NONE,
44        AUDIO_OUTPUT_FLAG_NONE,
45        AUDIO_FORMAT_INVALID,
46        0, // mNumChannels
47        0 // mSampleRate
48};
49
50// static
51const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
52
53NuPlayer::Renderer::Renderer(
54        const sp<MediaPlayerBase::AudioSink> &sink,
55        const sp<AMessage> &notify,
56        uint32_t flags)
57    : mAudioSink(sink),
58      mNotify(notify),
59      mFlags(flags),
60      mNumFramesWritten(0),
61      mDrainAudioQueuePending(false),
62      mDrainVideoQueuePending(false),
63      mAudioQueueGeneration(0),
64      mVideoQueueGeneration(0),
65      mAudioFirstAnchorTimeMediaUs(-1),
66      mAnchorTimeMediaUs(-1),
67      mAnchorTimeRealUs(-1),
68      mAnchorNumFramesWritten(-1),
69      mAnchorMaxMediaUs(-1),
70      mVideoLateByUs(0ll),
71      mHasAudio(false),
72      mHasVideo(false),
73      mPauseStartedTimeRealUs(-1),
74      mFlushingAudio(false),
75      mFlushingVideo(false),
76      mNotifyCompleteAudio(false),
77      mNotifyCompleteVideo(false),
78      mSyncQueues(false),
79      mPaused(false),
80      mVideoSampleReceived(false),
81      mVideoRenderingStarted(false),
82      mVideoRenderingStartGeneration(0),
83      mAudioRenderingStartGeneration(0),
84      mAudioOffloadPauseTimeoutGeneration(0),
85      mAudioOffloadTornDown(false),
86      mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER),
87      mCurrentPcmInfo(AUDIO_PCMINFO_INITIALIZER),
88      mTotalBuffersQueued(0),
89      mLastAudioBufferDrained(0) {
90}
91
92NuPlayer::Renderer::~Renderer() {
93    if (offloadingAudio()) {
94        mAudioSink->stop();
95        mAudioSink->flush();
96        mAudioSink->close();
97    }
98}
99
100void NuPlayer::Renderer::queueBuffer(
101        bool audio,
102        const sp<ABuffer> &buffer,
103        const sp<AMessage> &notifyConsumed) {
104    sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
105    msg->setInt32("audio", static_cast<int32_t>(audio));
106    msg->setBuffer("buffer", buffer);
107    msg->setMessage("notifyConsumed", notifyConsumed);
108    msg->post();
109}
110
111void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
112    CHECK_NE(finalResult, (status_t)OK);
113
114    sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
115    msg->setInt32("audio", static_cast<int32_t>(audio));
116    msg->setInt32("finalResult", finalResult);
117    msg->post();
118}
119
120void NuPlayer::Renderer::flush(bool audio, bool notifyComplete) {
121    {
122        Mutex::Autolock autoLock(mFlushLock);
123        if (audio) {
124            mNotifyCompleteAudio |= notifyComplete;
125            if (mFlushingAudio) {
126                return;
127            }
128            mFlushingAudio = true;
129        } else {
130            mNotifyCompleteVideo |= notifyComplete;
131            if (mFlushingVideo) {
132                return;
133            }
134            mFlushingVideo = true;
135        }
136    }
137
138    sp<AMessage> msg = new AMessage(kWhatFlush, id());
139    msg->setInt32("audio", static_cast<int32_t>(audio));
140    msg->post();
141}
142
143void NuPlayer::Renderer::signalTimeDiscontinuity() {
144    Mutex::Autolock autoLock(mLock);
145    // CHECK(mAudioQueue.empty());
146    // CHECK(mVideoQueue.empty());
147    setAudioFirstAnchorTime(-1);
148    setAnchorTime(-1, -1);
149    setVideoLateByUs(0);
150    mSyncQueues = false;
151}
152
153void NuPlayer::Renderer::signalAudioSinkChanged() {
154    (new AMessage(kWhatAudioSinkChanged, id()))->post();
155}
156
157void NuPlayer::Renderer::signalDisableOffloadAudio() {
158    (new AMessage(kWhatDisableOffloadAudio, id()))->post();
159}
160
161void NuPlayer::Renderer::signalEnableOffloadAudio() {
162    (new AMessage(kWhatEnableOffloadAudio, id()))->post();
163}
164
165void NuPlayer::Renderer::pause() {
166    (new AMessage(kWhatPause, id()))->post();
167}
168
169void NuPlayer::Renderer::resume() {
170    (new AMessage(kWhatResume, id()))->post();
171}
172
173void NuPlayer::Renderer::setVideoFrameRate(float fps) {
174    sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, id());
175    msg->setFloat("frame-rate", fps);
176    msg->post();
177}
178
179status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) {
180    return getCurrentPosition(mediaUs, ALooper::GetNowUs());
181}
182
183status_t NuPlayer::Renderer::getCurrentPosition(
184        int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) {
185    Mutex::Autolock autoLock(mTimeLock);
186    if (!mHasAudio && !mHasVideo) {
187        return NO_INIT;
188    }
189
190    if (mAnchorTimeMediaUs < 0) {
191        return NO_INIT;
192    }
193
194    int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
195
196    if (mPauseStartedTimeRealUs != -1) {
197        positionUs -= (nowUs - mPauseStartedTimeRealUs);
198    }
199
200    // limit position to the last queued media time (for video only stream
201    // position will be discrete as we don't know how long each frame lasts)
202    if (mAnchorMaxMediaUs >= 0 && !allowPastQueuedVideo) {
203        if (positionUs > mAnchorMaxMediaUs) {
204            positionUs = mAnchorMaxMediaUs;
205        }
206    }
207
208    if (positionUs < mAudioFirstAnchorTimeMediaUs) {
209        positionUs = mAudioFirstAnchorTimeMediaUs;
210    }
211
212    *mediaUs = (positionUs <= 0) ? 0 : positionUs;
213    return OK;
214}
215
216void NuPlayer::Renderer::setHasMedia(bool audio) {
217    Mutex::Autolock autoLock(mTimeLock);
218    if (audio) {
219        mHasAudio = true;
220    } else {
221        mHasVideo = true;
222    }
223}
224
225void NuPlayer::Renderer::setAudioFirstAnchorTime(int64_t mediaUs) {
226    Mutex::Autolock autoLock(mTimeLock);
227    mAudioFirstAnchorTimeMediaUs = mediaUs;
228}
229
230void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs) {
231    Mutex::Autolock autoLock(mTimeLock);
232    if (mAudioFirstAnchorTimeMediaUs == -1) {
233        mAudioFirstAnchorTimeMediaUs = mediaUs;
234    }
235}
236
237void NuPlayer::Renderer::setAnchorTime(
238        int64_t mediaUs, int64_t realUs, int64_t numFramesWritten, bool resume) {
239    Mutex::Autolock autoLock(mTimeLock);
240    mAnchorTimeMediaUs = mediaUs;
241    mAnchorTimeRealUs = realUs;
242    mAnchorNumFramesWritten = numFramesWritten;
243    if (resume) {
244        mPauseStartedTimeRealUs = -1;
245    }
246}
247
248void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) {
249    Mutex::Autolock autoLock(mTimeLock);
250    mVideoLateByUs = lateUs;
251}
252
253int64_t NuPlayer::Renderer::getVideoLateByUs() {
254    Mutex::Autolock autoLock(mTimeLock);
255    return mVideoLateByUs;
256}
257
258void NuPlayer::Renderer::setPauseStartedTimeRealUs(int64_t realUs) {
259    Mutex::Autolock autoLock(mTimeLock);
260    mPauseStartedTimeRealUs = realUs;
261}
262
263status_t NuPlayer::Renderer::openAudioSink(
264        const sp<AMessage> &format,
265        bool offloadOnly,
266        bool hasVideo,
267        uint32_t flags,
268        bool *isOffloaded) {
269    sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, id());
270    msg->setMessage("format", format);
271    msg->setInt32("offload-only", offloadOnly);
272    msg->setInt32("has-video", hasVideo);
273    msg->setInt32("flags", flags);
274
275    sp<AMessage> response;
276    msg->postAndAwaitResponse(&response);
277
278    int32_t err;
279    if (!response->findInt32("err", &err)) {
280        err = INVALID_OPERATION;
281    } else if (err == OK && isOffloaded != NULL) {
282        int32_t offload;
283        CHECK(response->findInt32("offload", &offload));
284        *isOffloaded = (offload != 0);
285    }
286    return err;
287}
288
289void NuPlayer::Renderer::closeAudioSink() {
290    sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, id());
291
292    sp<AMessage> response;
293    msg->postAndAwaitResponse(&response);
294}
295
296void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
297    switch (msg->what()) {
298        case kWhatOpenAudioSink:
299        {
300            sp<AMessage> format;
301            CHECK(msg->findMessage("format", &format));
302
303            int32_t offloadOnly;
304            CHECK(msg->findInt32("offload-only", &offloadOnly));
305
306            int32_t hasVideo;
307            CHECK(msg->findInt32("has-video", &hasVideo));
308
309            uint32_t flags;
310            CHECK(msg->findInt32("flags", (int32_t *)&flags));
311
312            status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags);
313
314            sp<AMessage> response = new AMessage;
315            response->setInt32("err", err);
316            response->setInt32("offload", offloadingAudio());
317
318            uint32_t replyID;
319            CHECK(msg->senderAwaitsResponse(&replyID));
320            response->postReply(replyID);
321
322            break;
323        }
324
325        case kWhatCloseAudioSink:
326        {
327            uint32_t replyID;
328            CHECK(msg->senderAwaitsResponse(&replyID));
329
330            onCloseAudioSink();
331
332            sp<AMessage> response = new AMessage;
333            response->postReply(replyID);
334            break;
335        }
336
337        case kWhatStopAudioSink:
338        {
339            mAudioSink->stop();
340            break;
341        }
342
343        case kWhatDrainAudioQueue:
344        {
345            int32_t generation;
346            CHECK(msg->findInt32("generation", &generation));
347            if (generation != mAudioQueueGeneration) {
348                break;
349            }
350
351            mDrainAudioQueuePending = false;
352
353            if (onDrainAudioQueue()) {
354                uint32_t numFramesPlayed;
355                CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
356                         (status_t)OK);
357
358                uint32_t numFramesPendingPlayout =
359                    mNumFramesWritten - numFramesPlayed;
360
361                // This is how long the audio sink will have data to
362                // play back.
363                int64_t delayUs =
364                    mAudioSink->msecsPerFrame()
365                        * numFramesPendingPlayout * 1000ll;
366
367                // Let's give it more data after about half that time
368                // has elapsed.
369                // kWhatDrainAudioQueue is used for non-offloading mode,
370                // and mLock is used only for offloading mode. Therefore,
371                // no need to acquire mLock here.
372                postDrainAudioQueue_l(delayUs / 2);
373            }
374            break;
375        }
376
377        case kWhatDrainVideoQueue:
378        {
379            int32_t generation;
380            CHECK(msg->findInt32("generation", &generation));
381            if (generation != mVideoQueueGeneration) {
382                break;
383            }
384
385            mDrainVideoQueuePending = false;
386
387            onDrainVideoQueue();
388
389            postDrainVideoQueue();
390            break;
391        }
392
393        case kWhatPostDrainVideoQueue:
394        {
395            int32_t generation;
396            CHECK(msg->findInt32("generation", &generation));
397            if (generation != mVideoQueueGeneration) {
398                break;
399            }
400
401            mDrainVideoQueuePending = false;
402            postDrainVideoQueue();
403            break;
404        }
405
406        case kWhatQueueBuffer:
407        {
408            onQueueBuffer(msg);
409            break;
410        }
411
412        case kWhatQueueEOS:
413        {
414            onQueueEOS(msg);
415            break;
416        }
417
418        case kWhatFlush:
419        {
420            onFlush(msg);
421            break;
422        }
423
424        case kWhatAudioSinkChanged:
425        {
426            onAudioSinkChanged();
427            break;
428        }
429
430        case kWhatDisableOffloadAudio:
431        {
432            onDisableOffloadAudio();
433            break;
434        }
435
436        case kWhatEnableOffloadAudio:
437        {
438            onEnableOffloadAudio();
439            break;
440        }
441
442        case kWhatPause:
443        {
444            onPause();
445            break;
446        }
447
448        case kWhatResume:
449        {
450            onResume();
451            break;
452        }
453
454        case kWhatSetVideoFrameRate:
455        {
456            float fps;
457            CHECK(msg->findFloat("frame-rate", &fps));
458            onSetVideoFrameRate(fps);
459            break;
460        }
461
462        case kWhatAudioOffloadTearDown:
463        {
464            onAudioOffloadTearDown(kDueToError);
465            break;
466        }
467
468        case kWhatAudioOffloadPauseTimeout:
469        {
470            int32_t generation;
471            CHECK(msg->findInt32("generation", &generation));
472            if (generation != mAudioOffloadPauseTimeoutGeneration) {
473                break;
474            }
475            ALOGV("Audio Offload tear down due to pause timeout.");
476            onAudioOffloadTearDown(kDueToTimeout);
477            break;
478        }
479
480        default:
481            TRESPASS();
482            break;
483    }
484}
485
486void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
487    if (mDrainAudioQueuePending || mSyncQueues || mPaused
488            || offloadingAudio()) {
489        return;
490    }
491
492    if (mAudioQueue.empty()) {
493        return;
494    }
495
496    mDrainAudioQueuePending = true;
497    sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
498    msg->setInt32("generation", mAudioQueueGeneration);
499    msg->post(delayUs);
500}
501
502void NuPlayer::Renderer::prepareForMediaRenderingStart() {
503    mAudioRenderingStartGeneration = mAudioQueueGeneration;
504    mVideoRenderingStartGeneration = mVideoQueueGeneration;
505}
506
507void NuPlayer::Renderer::notifyIfMediaRenderingStarted() {
508    if (mVideoRenderingStartGeneration == mVideoQueueGeneration &&
509        mAudioRenderingStartGeneration == mAudioQueueGeneration) {
510        mVideoRenderingStartGeneration = -1;
511        mAudioRenderingStartGeneration = -1;
512
513        sp<AMessage> notify = mNotify->dup();
514        notify->setInt32("what", kWhatMediaRenderingStart);
515        notify->post();
516    }
517}
518
519// static
520size_t NuPlayer::Renderer::AudioSinkCallback(
521        MediaPlayerBase::AudioSink * /* audioSink */,
522        void *buffer,
523        size_t size,
524        void *cookie,
525        MediaPlayerBase::AudioSink::cb_event_t event) {
526    NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
527
528    switch (event) {
529        case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
530        {
531            return me->fillAudioBuffer(buffer, size);
532            break;
533        }
534
535        case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
536        {
537            me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM);
538            break;
539        }
540
541        case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
542        {
543            me->notifyAudioOffloadTearDown();
544            break;
545        }
546    }
547
548    return 0;
549}
550
551size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
552    Mutex::Autolock autoLock(mLock);
553
554    if (!offloadingAudio() || mPaused) {
555        return 0;
556    }
557
558    bool hasEOS = false;
559
560    size_t sizeCopied = 0;
561    bool firstEntry = true;
562    while (sizeCopied < size && !mAudioQueue.empty()) {
563        QueueEntry *entry = &*mAudioQueue.begin();
564
565        if (entry->mBuffer == NULL) { // EOS
566            hasEOS = true;
567            mAudioQueue.erase(mAudioQueue.begin());
568            entry = NULL;
569            break;
570        }
571
572        if (firstEntry && entry->mOffset == 0) {
573            firstEntry = false;
574            int64_t mediaTimeUs;
575            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
576            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
577            setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
578        }
579
580        size_t copy = entry->mBuffer->size() - entry->mOffset;
581        size_t sizeRemaining = size - sizeCopied;
582        if (copy > sizeRemaining) {
583            copy = sizeRemaining;
584        }
585
586        memcpy((char *)buffer + sizeCopied,
587               entry->mBuffer->data() + entry->mOffset,
588               copy);
589
590        entry->mOffset += copy;
591        if (entry->mOffset == entry->mBuffer->size()) {
592            entry->mNotifyConsumed->post();
593            mAudioQueue.erase(mAudioQueue.begin());
594            entry = NULL;
595        }
596        sizeCopied += copy;
597        notifyIfMediaRenderingStarted();
598    }
599
600    if (mAudioFirstAnchorTimeMediaUs >= 0) {
601        int64_t nowUs = ALooper::GetNowUs();
602        setAnchorTime(mAudioFirstAnchorTimeMediaUs, nowUs - getPlayedOutAudioDurationUs(nowUs));
603    }
604
605    // we don't know how much data we are queueing for offloaded tracks
606    mAnchorMaxMediaUs = -1;
607
608    if (hasEOS) {
609        (new AMessage(kWhatStopAudioSink, id()))->post();
610    }
611
612    return sizeCopied;
613}
614
615bool NuPlayer::Renderer::onDrainAudioQueue() {
616    uint32_t numFramesPlayed;
617    if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
618        return false;
619    }
620
621    ssize_t numFramesAvailableToWrite =
622        mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
623
624#if 0
625    if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
626        ALOGI("audio sink underrun");
627    } else {
628        ALOGV("audio queue has %d frames left to play",
629             mAudioSink->frameCount() - numFramesAvailableToWrite);
630    }
631#endif
632
633    size_t numBytesAvailableToWrite =
634        numFramesAvailableToWrite * mAudioSink->frameSize();
635
636    while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) {
637        QueueEntry *entry = &*mAudioQueue.begin();
638
639        mLastAudioBufferDrained = entry->mBufferOrdinal;
640
641        if (entry->mBuffer == NULL) {
642            // EOS
643            int64_t postEOSDelayUs = 0;
644            if (mAudioSink->needsTrailingPadding()) {
645                postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
646            }
647            notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
648
649            mAudioQueue.erase(mAudioQueue.begin());
650            entry = NULL;
651            // Need to stop the track here, because that will play out the last
652            // little bit at the end of the file. Otherwise short files won't play.
653            mAudioSink->stop();
654            mNumFramesWritten = 0;
655            return false;
656        }
657
658        if (entry->mOffset == 0) {
659            int64_t mediaTimeUs;
660            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
661            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
662            onNewAudioMediaTime(mediaTimeUs);
663        }
664
665        size_t copy = entry->mBuffer->size() - entry->mOffset;
666        if (copy > numBytesAvailableToWrite) {
667            copy = numBytesAvailableToWrite;
668        }
669
670        ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy);
671        if (written < 0) {
672            // An error in AudioSink write. Perhaps the AudioSink was not properly opened.
673            ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy);
674            break;
675        }
676
677        entry->mOffset += written;
678        if (entry->mOffset == entry->mBuffer->size()) {
679            entry->mNotifyConsumed->post();
680            mAudioQueue.erase(mAudioQueue.begin());
681
682            entry = NULL;
683        }
684
685        numBytesAvailableToWrite -= written;
686        size_t copiedFrames = written / mAudioSink->frameSize();
687        mNumFramesWritten += copiedFrames;
688
689        notifyIfMediaRenderingStarted();
690
691        if (written != (ssize_t)copy) {
692            // A short count was received from AudioSink::write()
693            //
694            // AudioSink write should block until exactly the number of bytes are delivered.
695            // But it may return with a short count (without an error) when:
696            //
697            // 1) Size to be copied is not a multiple of the frame size. We consider this fatal.
698            // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
699
700            // (Case 1)
701            // Must be a multiple of the frame size.  If it is not a multiple of a frame size, it
702            // needs to fail, as we should not carry over fractional frames between calls.
703            CHECK_EQ(copy % mAudioSink->frameSize(), 0);
704
705            // (Case 2)
706            // Return early to the caller.
707            // Beware of calling immediately again as this may busy-loop if you are not careful.
708            ALOGW("AudioSink write short frame count %zd < %zu", written, copy);
709            break;
710        }
711    }
712    mAnchorMaxMediaUs =
713        mAnchorTimeMediaUs +
714                (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL)
715                        * 1000LL * mAudioSink->msecsPerFrame());
716
717    return !mAudioQueue.empty();
718}
719
720int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
721    int64_t writtenAudioDurationUs =
722        mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame();
723    return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs);
724}
725
726int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
727    int64_t currentPositionUs;
728    if (getCurrentPosition(&currentPositionUs, nowUs, true /* allowPastQueuedVideo */) != OK) {
729        // If failed to get current position, e.g. due to audio clock is not ready, then just
730        // play out video immediately without delay.
731        return nowUs;
732    }
733    return (mediaTimeUs - currentPositionUs) + nowUs;
734}
735
736void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) {
737    // TRICKY: vorbis decoder generates multiple frames with the same
738    // timestamp, so only update on the first frame with a given timestamp
739    if (mediaTimeUs == mAnchorTimeMediaUs) {
740        return;
741    }
742    setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
743    int64_t nowUs = ALooper::GetNowUs();
744    setAnchorTime(
745            mediaTimeUs, nowUs + getPendingAudioPlayoutDurationUs(nowUs), mNumFramesWritten);
746}
747
748void NuPlayer::Renderer::postDrainVideoQueue() {
749    if (mDrainVideoQueuePending
750            || mSyncQueues
751            || (mPaused && mVideoSampleReceived)) {
752        return;
753    }
754
755    if (mVideoQueue.empty()) {
756        return;
757    }
758
759    QueueEntry &entry = *mVideoQueue.begin();
760
761    sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
762    msg->setInt32("generation", mVideoQueueGeneration);
763
764    if (entry.mBuffer == NULL) {
765        // EOS doesn't carry a timestamp.
766        msg->post();
767        mDrainVideoQueuePending = true;
768        return;
769    }
770
771    int64_t delayUs;
772    int64_t nowUs = ALooper::GetNowUs();
773    int64_t realTimeUs;
774    if (mFlags & FLAG_REAL_TIME) {
775        int64_t mediaTimeUs;
776        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
777        realTimeUs = mediaTimeUs;
778    } else {
779        int64_t mediaTimeUs;
780        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
781
782        if (mAnchorTimeMediaUs < 0) {
783            setAnchorTime(mediaTimeUs, nowUs);
784            mAnchorMaxMediaUs = mediaTimeUs;
785            realTimeUs = nowUs;
786        } else {
787            realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
788        }
789        if (!mHasAudio) {
790            mAnchorMaxMediaUs = mediaTimeUs + 100000; // smooth out videos >= 10fps
791        }
792
793        // Heuristics to handle situation when media time changed without a
794        // discontinuity. If we have not drained an audio buffer that was
795        // received after this buffer, repost in 10 msec. Otherwise repost
796        // in 500 msec.
797        delayUs = realTimeUs - nowUs;
798        if (delayUs > 500000) {
799            int64_t postDelayUs = 500000;
800            if (mHasAudio && (mLastAudioBufferDrained - entry.mBufferOrdinal) <= 0) {
801                postDelayUs = 10000;
802            }
803            msg->setWhat(kWhatPostDrainVideoQueue);
804            msg->post(postDelayUs);
805            mVideoScheduler->restart();
806            ALOGI("possible video time jump of %dms, retrying in %dms",
807                    (int)(delayUs / 1000), (int)(postDelayUs / 1000));
808            mDrainVideoQueuePending = true;
809            return;
810        }
811    }
812
813    realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
814    int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
815
816    delayUs = realTimeUs - nowUs;
817
818    ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs);
819    // post 2 display refreshes before rendering is due
820    msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
821
822    mDrainVideoQueuePending = true;
823}
824
825void NuPlayer::Renderer::onDrainVideoQueue() {
826    if (mVideoQueue.empty()) {
827        return;
828    }
829
830    QueueEntry *entry = &*mVideoQueue.begin();
831
832    if (entry->mBuffer == NULL) {
833        // EOS
834
835        notifyEOS(false /* audio */, entry->mFinalResult);
836
837        mVideoQueue.erase(mVideoQueue.begin());
838        entry = NULL;
839
840        setVideoLateByUs(0);
841        return;
842    }
843
844    int64_t nowUs = -1;
845    int64_t realTimeUs;
846    if (mFlags & FLAG_REAL_TIME) {
847        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
848    } else {
849        int64_t mediaTimeUs;
850        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
851
852        nowUs = ALooper::GetNowUs();
853        realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
854    }
855
856    bool tooLate = false;
857
858    if (!mPaused) {
859        if (nowUs == -1) {
860            nowUs = ALooper::GetNowUs();
861        }
862        setVideoLateByUs(nowUs - realTimeUs);
863        tooLate = (mVideoLateByUs > 40000);
864
865        if (tooLate) {
866            ALOGV("video late by %lld us (%.2f secs)",
867                 mVideoLateByUs, mVideoLateByUs / 1E6);
868        } else {
869            ALOGV("rendering video at media time %.2f secs",
870                    (mFlags & FLAG_REAL_TIME ? realTimeUs :
871                    (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
872        }
873    } else {
874        setVideoLateByUs(0);
875        if (!mVideoSampleReceived && !mHasAudio) {
876            // This will ensure that the first frame after a flush won't be used as anchor
877            // when renderer is in paused state, because resume can happen any time after seek.
878            setAnchorTime(-1, -1);
879        }
880    }
881
882    entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll);
883    entry->mNotifyConsumed->setInt32("render", !tooLate);
884    entry->mNotifyConsumed->post();
885    mVideoQueue.erase(mVideoQueue.begin());
886    entry = NULL;
887
888    mVideoSampleReceived = true;
889
890    if (!mPaused) {
891        if (!mVideoRenderingStarted) {
892            mVideoRenderingStarted = true;
893            notifyVideoRenderingStart();
894        }
895        notifyIfMediaRenderingStarted();
896    }
897}
898
899void NuPlayer::Renderer::notifyVideoRenderingStart() {
900    sp<AMessage> notify = mNotify->dup();
901    notify->setInt32("what", kWhatVideoRenderingStart);
902    notify->post();
903}
904
905void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) {
906    sp<AMessage> notify = mNotify->dup();
907    notify->setInt32("what", kWhatEOS);
908    notify->setInt32("audio", static_cast<int32_t>(audio));
909    notify->setInt32("finalResult", finalResult);
910    notify->post(delayUs);
911}
912
913void NuPlayer::Renderer::notifyAudioOffloadTearDown() {
914    (new AMessage(kWhatAudioOffloadTearDown, id()))->post();
915}
916
917void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
918    int32_t audio;
919    CHECK(msg->findInt32("audio", &audio));
920
921    setHasMedia(audio);
922
923    if (mHasVideo) {
924        if (mVideoScheduler == NULL) {
925            mVideoScheduler = new VideoFrameScheduler();
926            mVideoScheduler->init();
927        }
928    }
929
930    if (dropBufferWhileFlushing(audio, msg)) {
931        return;
932    }
933
934    sp<ABuffer> buffer;
935    CHECK(msg->findBuffer("buffer", &buffer));
936
937    sp<AMessage> notifyConsumed;
938    CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
939
940    QueueEntry entry;
941    entry.mBuffer = buffer;
942    entry.mNotifyConsumed = notifyConsumed;
943    entry.mOffset = 0;
944    entry.mFinalResult = OK;
945    entry.mBufferOrdinal = ++mTotalBuffersQueued;
946
947    if (audio) {
948        Mutex::Autolock autoLock(mLock);
949        mAudioQueue.push_back(entry);
950        postDrainAudioQueue_l();
951    } else {
952        mVideoQueue.push_back(entry);
953        postDrainVideoQueue();
954    }
955
956    Mutex::Autolock autoLock(mLock);
957    if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
958        return;
959    }
960
961    sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
962    sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
963
964    if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
965        // EOS signalled on either queue.
966        syncQueuesDone_l();
967        return;
968    }
969
970    int64_t firstAudioTimeUs;
971    int64_t firstVideoTimeUs;
972    CHECK(firstAudioBuffer->meta()
973            ->findInt64("timeUs", &firstAudioTimeUs));
974    CHECK(firstVideoBuffer->meta()
975            ->findInt64("timeUs", &firstVideoTimeUs));
976
977    int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
978
979    ALOGV("queueDiff = %.2f secs", diff / 1E6);
980
981    if (diff > 100000ll) {
982        // Audio data starts More than 0.1 secs before video.
983        // Drop some audio.
984
985        (*mAudioQueue.begin()).mNotifyConsumed->post();
986        mAudioQueue.erase(mAudioQueue.begin());
987        return;
988    }
989
990    syncQueuesDone_l();
991}
992
993void NuPlayer::Renderer::syncQueuesDone_l() {
994    if (!mSyncQueues) {
995        return;
996    }
997
998    mSyncQueues = false;
999
1000    if (!mAudioQueue.empty()) {
1001        postDrainAudioQueue_l();
1002    }
1003
1004    if (!mVideoQueue.empty()) {
1005        postDrainVideoQueue();
1006    }
1007}
1008
1009void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
1010    int32_t audio;
1011    CHECK(msg->findInt32("audio", &audio));
1012
1013    if (dropBufferWhileFlushing(audio, msg)) {
1014        return;
1015    }
1016
1017    int32_t finalResult;
1018    CHECK(msg->findInt32("finalResult", &finalResult));
1019
1020    QueueEntry entry;
1021    entry.mOffset = 0;
1022    entry.mFinalResult = finalResult;
1023
1024    if (audio) {
1025        Mutex::Autolock autoLock(mLock);
1026        if (mAudioQueue.empty() && mSyncQueues) {
1027            syncQueuesDone_l();
1028        }
1029        mAudioQueue.push_back(entry);
1030        postDrainAudioQueue_l();
1031    } else {
1032        if (mVideoQueue.empty() && mSyncQueues) {
1033            Mutex::Autolock autoLock(mLock);
1034            syncQueuesDone_l();
1035        }
1036        mVideoQueue.push_back(entry);
1037        postDrainVideoQueue();
1038    }
1039}
1040
1041void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
1042    int32_t audio, notifyComplete;
1043    CHECK(msg->findInt32("audio", &audio));
1044
1045    {
1046        Mutex::Autolock autoLock(mFlushLock);
1047        if (audio) {
1048            mFlushingAudio = false;
1049            notifyComplete = mNotifyCompleteAudio;
1050            mNotifyCompleteAudio = false;
1051        } else {
1052            mFlushingVideo = false;
1053            notifyComplete = mNotifyCompleteVideo;
1054            mNotifyCompleteVideo = false;
1055        }
1056    }
1057
1058    // If we're currently syncing the queues, i.e. dropping audio while
1059    // aligning the first audio/video buffer times and only one of the
1060    // two queues has data, we may starve that queue by not requesting
1061    // more buffers from the decoder. If the other source then encounters
1062    // a discontinuity that leads to flushing, we'll never find the
1063    // corresponding discontinuity on the other queue.
1064    // Therefore we'll stop syncing the queues if at least one of them
1065    // is flushed.
1066    {
1067         Mutex::Autolock autoLock(mLock);
1068         syncQueuesDone_l();
1069         setPauseStartedTimeRealUs(-1);
1070         setAnchorTime(-1, -1);
1071    }
1072
1073    ALOGV("flushing %s", audio ? "audio" : "video");
1074    if (audio) {
1075        {
1076            Mutex::Autolock autoLock(mLock);
1077            flushQueue(&mAudioQueue);
1078
1079            ++mAudioQueueGeneration;
1080            prepareForMediaRenderingStart();
1081
1082            if (offloadingAudio()) {
1083                setAudioFirstAnchorTime(-1);
1084            }
1085        }
1086
1087        mDrainAudioQueuePending = false;
1088
1089        if (offloadingAudio()) {
1090            mAudioSink->pause();
1091            mAudioSink->flush();
1092            mAudioSink->start();
1093        }
1094    } else {
1095        flushQueue(&mVideoQueue);
1096
1097        mDrainVideoQueuePending = false;
1098        ++mVideoQueueGeneration;
1099
1100        if (mVideoScheduler != NULL) {
1101            mVideoScheduler->restart();
1102        }
1103
1104        prepareForMediaRenderingStart();
1105    }
1106
1107    mVideoSampleReceived = false;
1108
1109    if (notifyComplete) {
1110        notifyFlushComplete(audio);
1111    }
1112}
1113
1114void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
1115    while (!queue->empty()) {
1116        QueueEntry *entry = &*queue->begin();
1117
1118        if (entry->mBuffer != NULL) {
1119            entry->mNotifyConsumed->post();
1120        }
1121
1122        queue->erase(queue->begin());
1123        entry = NULL;
1124    }
1125}
1126
1127void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
1128    sp<AMessage> notify = mNotify->dup();
1129    notify->setInt32("what", kWhatFlushComplete);
1130    notify->setInt32("audio", static_cast<int32_t>(audio));
1131    notify->post();
1132}
1133
1134bool NuPlayer::Renderer::dropBufferWhileFlushing(
1135        bool audio, const sp<AMessage> &msg) {
1136    bool flushing = false;
1137
1138    {
1139        Mutex::Autolock autoLock(mFlushLock);
1140        if (audio) {
1141            flushing = mFlushingAudio;
1142        } else {
1143            flushing = mFlushingVideo;
1144        }
1145    }
1146
1147    if (!flushing) {
1148        return false;
1149    }
1150
1151    sp<AMessage> notifyConsumed;
1152    if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
1153        notifyConsumed->post();
1154    }
1155
1156    return true;
1157}
1158
1159void NuPlayer::Renderer::onAudioSinkChanged() {
1160    if (offloadingAudio()) {
1161        return;
1162    }
1163    CHECK(!mDrainAudioQueuePending);
1164    mNumFramesWritten = 0;
1165    mAnchorNumFramesWritten = -1;
1166    uint32_t written;
1167    if (mAudioSink->getFramesWritten(&written) == OK) {
1168        mNumFramesWritten = written;
1169    }
1170}
1171
1172void NuPlayer::Renderer::onDisableOffloadAudio() {
1173    Mutex::Autolock autoLock(mLock);
1174    mFlags &= ~FLAG_OFFLOAD_AUDIO;
1175    ++mAudioQueueGeneration;
1176}
1177
1178void NuPlayer::Renderer::onEnableOffloadAudio() {
1179    Mutex::Autolock autoLock(mLock);
1180    mFlags |= FLAG_OFFLOAD_AUDIO;
1181    ++mAudioQueueGeneration;
1182}
1183
1184void NuPlayer::Renderer::onPause() {
1185    if (mPaused) {
1186        ALOGW("Renderer::onPause() called while already paused!");
1187        return;
1188    }
1189    {
1190        Mutex::Autolock autoLock(mLock);
1191        ++mAudioQueueGeneration;
1192        ++mVideoQueueGeneration;
1193        prepareForMediaRenderingStart();
1194        mPaused = true;
1195        setPauseStartedTimeRealUs(ALooper::GetNowUs());
1196    }
1197
1198    mDrainAudioQueuePending = false;
1199    mDrainVideoQueuePending = false;
1200
1201    if (mHasAudio) {
1202        mAudioSink->pause();
1203        startAudioOffloadPauseTimeout();
1204    }
1205
1206    ALOGV("now paused audio queue has %d entries, video has %d entries",
1207          mAudioQueue.size(), mVideoQueue.size());
1208}
1209
1210void NuPlayer::Renderer::onResume() {
1211    if (!mPaused) {
1212        return;
1213    }
1214
1215    if (mHasAudio) {
1216        cancelAudioOffloadPauseTimeout();
1217        mAudioSink->start();
1218    }
1219
1220    Mutex::Autolock autoLock(mLock);
1221    mPaused = false;
1222    if (mPauseStartedTimeRealUs != -1) {
1223        int64_t newAnchorRealUs =
1224            mAnchorTimeRealUs + ALooper::GetNowUs() - mPauseStartedTimeRealUs;
1225        setAnchorTime(
1226                mAnchorTimeMediaUs, newAnchorRealUs, mAnchorNumFramesWritten, true /* resume */);
1227    }
1228
1229    if (!mAudioQueue.empty()) {
1230        postDrainAudioQueue_l();
1231    }
1232
1233    if (!mVideoQueue.empty()) {
1234        postDrainVideoQueue();
1235    }
1236}
1237
1238void NuPlayer::Renderer::onSetVideoFrameRate(float fps) {
1239    if (mVideoScheduler == NULL) {
1240        mVideoScheduler = new VideoFrameScheduler();
1241    }
1242    mVideoScheduler->init(fps);
1243}
1244
1245// TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs()
1246// as it acquires locks and may query the audio driver.
1247//
1248// Some calls could conceivably retrieve extrapolated data instead of
1249// accessing getTimestamp() or getPosition() every time a data buffer with
1250// a media time is received.
1251//
1252int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) {
1253    uint32_t numFramesPlayed;
1254    int64_t numFramesPlayedAt;
1255    AudioTimestamp ts;
1256    static const int64_t kStaleTimestamp100ms = 100000;
1257
1258    status_t res = mAudioSink->getTimestamp(ts);
1259    if (res == OK) {                 // case 1: mixing audio tracks and offloaded tracks.
1260        numFramesPlayed = ts.mPosition;
1261        numFramesPlayedAt =
1262            ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000;
1263        const int64_t timestampAge = nowUs - numFramesPlayedAt;
1264        if (timestampAge > kStaleTimestamp100ms) {
1265            // This is an audio FIXME.
1266            // getTimestamp returns a timestamp which may come from audio mixing threads.
1267            // After pausing, the MixerThread may go idle, thus the mTime estimate may
1268            // become stale. Assuming that the MixerThread runs 20ms, with FastMixer at 5ms,
1269            // the max latency should be about 25ms with an average around 12ms (to be verified).
1270            // For safety we use 100ms.
1271            ALOGV("getTimestamp: returned stale timestamp nowUs(%lld) numFramesPlayedAt(%lld)",
1272                    (long long)nowUs, (long long)numFramesPlayedAt);
1273            numFramesPlayedAt = nowUs - kStaleTimestamp100ms;
1274        }
1275        //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAt);
1276    } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track
1277        numFramesPlayed = 0;
1278        numFramesPlayedAt = nowUs;
1279        //ALOGD("getTimestamp: WOULD_BLOCK %d %lld",
1280        //        numFramesPlayed, (long long)numFramesPlayedAt);
1281    } else {                         // case 3: transitory at new track or audio fast tracks.
1282        res = mAudioSink->getPosition(&numFramesPlayed);
1283        CHECK_EQ(res, (status_t)OK);
1284        numFramesPlayedAt = nowUs;
1285        numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */
1286        //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt);
1287    }
1288
1289    // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
1290    //CHECK_EQ(numFramesPlayed & (1 << 31), 0);  // can't be negative until 12.4 hrs, test
1291    int64_t durationUs = (int64_t)((int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame())
1292            + nowUs - numFramesPlayedAt;
1293    if (durationUs < 0) {
1294        // Occurs when numFramesPlayed position is very small and the following:
1295        // (1) In case 1, the time nowUs is computed before getTimestamp() is called and
1296        //     numFramesPlayedAt is greater than nowUs by time more than numFramesPlayed.
1297        // (2) In case 3, using getPosition and adding mAudioSink->latency() to
1298        //     numFramesPlayedAt, by a time amount greater than numFramesPlayed.
1299        //
1300        // Both of these are transitory conditions.
1301        ALOGV("getPlayedOutAudioDurationUs: negative duration %lld set to zero", (long long)durationUs);
1302        durationUs = 0;
1303    }
1304    ALOGV("getPlayedOutAudioDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)",
1305            (long long)durationUs, (long long)nowUs, numFramesPlayed, (long long)numFramesPlayedAt);
1306    return durationUs;
1307}
1308
1309void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reason) {
1310    if (mAudioOffloadTornDown) {
1311        return;
1312    }
1313    mAudioOffloadTornDown = true;
1314
1315    int64_t currentPositionUs;
1316    if (getCurrentPosition(&currentPositionUs) != OK) {
1317        currentPositionUs = 0;
1318    }
1319
1320    mAudioSink->stop();
1321    mAudioSink->flush();
1322
1323    sp<AMessage> notify = mNotify->dup();
1324    notify->setInt32("what", kWhatAudioOffloadTearDown);
1325    notify->setInt64("positionUs", currentPositionUs);
1326    notify->setInt32("reason", reason);
1327    notify->post();
1328}
1329
1330void NuPlayer::Renderer::startAudioOffloadPauseTimeout() {
1331    if (offloadingAudio()) {
1332        sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, id());
1333        msg->setInt32("generation", mAudioOffloadPauseTimeoutGeneration);
1334        msg->post(kOffloadPauseMaxUs);
1335    }
1336}
1337
1338void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() {
1339    if (offloadingAudio()) {
1340        ++mAudioOffloadPauseTimeoutGeneration;
1341    }
1342}
1343
1344status_t NuPlayer::Renderer::onOpenAudioSink(
1345        const sp<AMessage> &format,
1346        bool offloadOnly,
1347        bool hasVideo,
1348        uint32_t flags) {
1349    ALOGV("openAudioSink: offloadOnly(%d) offloadingAudio(%d)",
1350            offloadOnly, offloadingAudio());
1351    bool audioSinkChanged = false;
1352
1353    int32_t numChannels;
1354    CHECK(format->findInt32("channel-count", &numChannels));
1355
1356    int32_t channelMask;
1357    if (!format->findInt32("channel-mask", &channelMask)) {
1358        // signal to the AudioSink to derive the mask from count.
1359        channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
1360    }
1361
1362    int32_t sampleRate;
1363    CHECK(format->findInt32("sample-rate", &sampleRate));
1364
1365    if (offloadingAudio()) {
1366        audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT;
1367        AString mime;
1368        CHECK(format->findString("mime", &mime));
1369        status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str());
1370
1371        if (err != OK) {
1372            ALOGE("Couldn't map mime \"%s\" to a valid "
1373                    "audio_format", mime.c_str());
1374            onDisableOffloadAudio();
1375        } else {
1376            ALOGV("Mime \"%s\" mapped to audio_format 0x%x",
1377                    mime.c_str(), audioFormat);
1378
1379            int avgBitRate = -1;
1380            format->findInt32("bit-rate", &avgBitRate);
1381
1382            int32_t aacProfile = -1;
1383            if (audioFormat == AUDIO_FORMAT_AAC
1384                    && format->findInt32("aac-profile", &aacProfile)) {
1385                // Redefine AAC format as per aac profile
1386                mapAACProfileToAudioFormat(
1387                        audioFormat,
1388                        aacProfile);
1389            }
1390
1391            audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER;
1392            offloadInfo.duration_us = -1;
1393            format->findInt64(
1394                    "durationUs", &offloadInfo.duration_us);
1395            offloadInfo.sample_rate = sampleRate;
1396            offloadInfo.channel_mask = channelMask;
1397            offloadInfo.format = audioFormat;
1398            offloadInfo.stream_type = AUDIO_STREAM_MUSIC;
1399            offloadInfo.bit_rate = avgBitRate;
1400            offloadInfo.has_video = hasVideo;
1401            offloadInfo.is_streaming = true;
1402
1403            if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) {
1404                ALOGV("openAudioSink: no change in offload mode");
1405                // no change from previous configuration, everything ok.
1406                return OK;
1407            }
1408            mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
1409
1410            ALOGV("openAudioSink: try to open AudioSink in offload mode");
1411            uint32_t offloadFlags = flags;
1412            offloadFlags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
1413            offloadFlags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
1414            audioSinkChanged = true;
1415            mAudioSink->close();
1416            err = mAudioSink->open(
1417                    sampleRate,
1418                    numChannels,
1419                    (audio_channel_mask_t)channelMask,
1420                    audioFormat,
1421                    8 /* bufferCount */,
1422                    &NuPlayer::Renderer::AudioSinkCallback,
1423                    this,
1424                    (audio_output_flags_t)offloadFlags,
1425                    &offloadInfo);
1426
1427            if (err == OK) {
1428                // If the playback is offloaded to h/w, we pass
1429                // the HAL some metadata information.
1430                // We don't want to do this for PCM because it
1431                // will be going through the AudioFlinger mixer
1432                // before reaching the hardware.
1433                // TODO
1434                mCurrentOffloadInfo = offloadInfo;
1435                err = mAudioSink->start();
1436                ALOGV_IF(err == OK, "openAudioSink: offload succeeded");
1437            }
1438            if (err != OK) {
1439                // Clean up, fall back to non offload mode.
1440                mAudioSink->close();
1441                onDisableOffloadAudio();
1442                mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
1443                ALOGV("openAudioSink: offload failed");
1444            }
1445        }
1446    }
1447    if (!offloadOnly && !offloadingAudio()) {
1448        ALOGV("openAudioSink: open AudioSink in NON-offload mode");
1449        uint32_t pcmFlags = flags;
1450        pcmFlags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
1451
1452        const PcmInfo info = {
1453                (audio_channel_mask_t)channelMask,
1454                (audio_output_flags_t)pcmFlags,
1455                AUDIO_FORMAT_PCM_16_BIT, // TODO: change to audioFormat
1456                numChannels,
1457                sampleRate
1458        };
1459        if (memcmp(&mCurrentPcmInfo, &info, sizeof(info)) == 0) {
1460            ALOGV("openAudioSink: no change in pcm mode");
1461            // no change from previous configuration, everything ok.
1462            return OK;
1463        }
1464
1465        audioSinkChanged = true;
1466        mAudioSink->close();
1467        mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
1468        status_t err = mAudioSink->open(
1469                    sampleRate,
1470                    numChannels,
1471                    (audio_channel_mask_t)channelMask,
1472                    AUDIO_FORMAT_PCM_16_BIT,
1473                    8 /* bufferCount */,
1474                    NULL,
1475                    NULL,
1476                    (audio_output_flags_t)pcmFlags);
1477        if (err != OK) {
1478            ALOGW("openAudioSink: non offloaded open failed status: %d", err);
1479            mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
1480            return err;
1481        }
1482        mCurrentPcmInfo = info;
1483        mAudioSink->start();
1484    }
1485    if (audioSinkChanged) {
1486        onAudioSinkChanged();
1487    }
1488    if (offloadingAudio()) {
1489        mAudioOffloadTornDown = false;
1490    }
1491    return OK;
1492}
1493
1494void NuPlayer::Renderer::onCloseAudioSink() {
1495    mAudioSink->close();
1496    mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
1497    mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
1498}
1499
1500}  // namespace android
1501
1502