NuPlayerRenderer.cpp revision a10fd23bb9fcf16e778c639ea5638e2917dacd89
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "NuPlayerRenderer"
19#include <utils/Log.h>
20
21#include "NuPlayerRenderer.h"
22
23#include <media/stagefright/foundation/ABuffer.h>
24#include <media/stagefright/foundation/ADebug.h>
25#include <media/stagefright/foundation/AMessage.h>
26#include <media/stagefright/foundation/AUtils.h>
27#include <media/stagefright/MediaErrors.h>
28#include <media/stagefright/MetaData.h>
29#include <media/stagefright/Utils.h>
30
31#include <VideoFrameScheduler.h>
32
33#include <inttypes.h>
34
35namespace android {
36
37// Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink
38// is closed to allow the audio DSP to power down.
39static const int64_t kOffloadPauseMaxUs = 10000000ll;
40
41// static
42const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
43
44NuPlayer::Renderer::Renderer(
45        const sp<MediaPlayerBase::AudioSink> &sink,
46        const sp<AMessage> &notify,
47        uint32_t flags)
48    : mAudioSink(sink),
49      mNotify(notify),
50      mFlags(flags),
51      mNumFramesWritten(0),
52      mDrainAudioQueuePending(false),
53      mDrainVideoQueuePending(false),
54      mAudioQueueGeneration(0),
55      mVideoQueueGeneration(0),
56      mAudioFirstAnchorTimeMediaUs(-1),
57      mAnchorTimeMediaUs(-1),
58      mAnchorTimeRealUs(-1),
59      mAnchorNumFramesWritten(-1),
60      mAnchorMaxMediaUs(-1),
61      mVideoLateByUs(0ll),
62      mHasAudio(false),
63      mHasVideo(false),
64      mPauseStartedTimeRealUs(-1),
65      mFlushingAudio(false),
66      mFlushingVideo(false),
67      mSyncQueues(false),
68      mPaused(false),
69      mVideoSampleReceived(false),
70      mVideoRenderingStarted(false),
71      mVideoRenderingStartGeneration(0),
72      mAudioRenderingStartGeneration(0),
73      mAudioOffloadPauseTimeoutGeneration(0),
74      mAudioOffloadTornDown(false),
75      mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER),
76      mTotalBuffersQueued(0),
77      mLastAudioBufferDrained(0) {
78}
79
80NuPlayer::Renderer::~Renderer() {
81    if (offloadingAudio()) {
82        mAudioSink->stop();
83        mAudioSink->flush();
84        mAudioSink->close();
85    }
86}
87
88void NuPlayer::Renderer::queueBuffer(
89        bool audio,
90        const sp<ABuffer> &buffer,
91        const sp<AMessage> &notifyConsumed) {
92    sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
93    msg->setInt32("audio", static_cast<int32_t>(audio));
94    msg->setBuffer("buffer", buffer);
95    msg->setMessage("notifyConsumed", notifyConsumed);
96    msg->post();
97}
98
99void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
100    CHECK_NE(finalResult, (status_t)OK);
101
102    sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
103    msg->setInt32("audio", static_cast<int32_t>(audio));
104    msg->setInt32("finalResult", finalResult);
105    msg->post();
106}
107
108void NuPlayer::Renderer::flush(bool audio) {
109    {
110        Mutex::Autolock autoLock(mFlushLock);
111        if (audio) {
112            if (mFlushingAudio) {
113                return;
114            }
115            mFlushingAudio = true;
116        } else {
117            if (mFlushingVideo) {
118                return;
119            }
120            mFlushingVideo = true;
121        }
122    }
123
124    sp<AMessage> msg = new AMessage(kWhatFlush, id());
125    msg->setInt32("audio", static_cast<int32_t>(audio));
126    msg->post();
127}
128
129void NuPlayer::Renderer::signalTimeDiscontinuity() {
130    Mutex::Autolock autoLock(mLock);
131    // CHECK(mAudioQueue.empty());
132    // CHECK(mVideoQueue.empty());
133    setAudioFirstAnchorTime(-1);
134    setAnchorTime(-1, -1);
135    setVideoLateByUs(0);
136    mSyncQueues = false;
137}
138
139void NuPlayer::Renderer::signalAudioSinkChanged() {
140    (new AMessage(kWhatAudioSinkChanged, id()))->post();
141}
142
143void NuPlayer::Renderer::signalDisableOffloadAudio() {
144    (new AMessage(kWhatDisableOffloadAudio, id()))->post();
145}
146
147void NuPlayer::Renderer::signalEnableOffloadAudio() {
148    (new AMessage(kWhatEnableOffloadAudio, id()))->post();
149}
150
151void NuPlayer::Renderer::pause() {
152    (new AMessage(kWhatPause, id()))->post();
153}
154
155void NuPlayer::Renderer::resume() {
156    (new AMessage(kWhatResume, id()))->post();
157}
158
159void NuPlayer::Renderer::setVideoFrameRate(float fps) {
160    sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, id());
161    msg->setFloat("frame-rate", fps);
162    msg->post();
163}
164
165status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) {
166    return getCurrentPosition(mediaUs, ALooper::GetNowUs());
167}
168
169status_t NuPlayer::Renderer::getCurrentPosition(
170        int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) {
171    Mutex::Autolock autoLock(mTimeLock);
172    if (!mHasAudio && !mHasVideo) {
173        return NO_INIT;
174    }
175
176    if (mAnchorTimeMediaUs < 0) {
177        return NO_INIT;
178    }
179
180    int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
181
182    if (mPauseStartedTimeRealUs != -1) {
183        positionUs -= (nowUs - mPauseStartedTimeRealUs);
184    }
185
186    // limit position to the last queued media time (for video only stream
187    // position will be discrete as we don't know how long each frame lasts)
188    if (mAnchorMaxMediaUs >= 0 && !allowPastQueuedVideo) {
189        if (positionUs > mAnchorMaxMediaUs) {
190            positionUs = mAnchorMaxMediaUs;
191        }
192    }
193
194    if (positionUs < mAudioFirstAnchorTimeMediaUs) {
195        positionUs = mAudioFirstAnchorTimeMediaUs;
196    }
197
198    *mediaUs = (positionUs <= 0) ? 0 : positionUs;
199    return OK;
200}
201
202void NuPlayer::Renderer::setHasMedia(bool audio) {
203    Mutex::Autolock autoLock(mTimeLock);
204    if (audio) {
205        mHasAudio = true;
206    } else {
207        mHasVideo = true;
208    }
209}
210
211void NuPlayer::Renderer::setAudioFirstAnchorTime(int64_t mediaUs) {
212    Mutex::Autolock autoLock(mTimeLock);
213    mAudioFirstAnchorTimeMediaUs = mediaUs;
214}
215
216void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs) {
217    Mutex::Autolock autoLock(mTimeLock);
218    if (mAudioFirstAnchorTimeMediaUs == -1) {
219        mAudioFirstAnchorTimeMediaUs = mediaUs;
220    }
221}
222
223void NuPlayer::Renderer::setAnchorTime(
224        int64_t mediaUs, int64_t realUs, int64_t numFramesWritten, bool resume) {
225    Mutex::Autolock autoLock(mTimeLock);
226    mAnchorTimeMediaUs = mediaUs;
227    mAnchorTimeRealUs = realUs;
228    mAnchorNumFramesWritten = numFramesWritten;
229    if (resume) {
230        mPauseStartedTimeRealUs = -1;
231    }
232}
233
234void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) {
235    Mutex::Autolock autoLock(mTimeLock);
236    mVideoLateByUs = lateUs;
237}
238
239int64_t NuPlayer::Renderer::getVideoLateByUs() {
240    Mutex::Autolock autoLock(mTimeLock);
241    return mVideoLateByUs;
242}
243
244void NuPlayer::Renderer::setPauseStartedTimeRealUs(int64_t realUs) {
245    Mutex::Autolock autoLock(mTimeLock);
246    mPauseStartedTimeRealUs = realUs;
247}
248
249bool NuPlayer::Renderer::openAudioSink(
250        const sp<AMessage> &format,
251        bool offloadOnly,
252        bool hasVideo,
253        uint32_t flags) {
254    sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, id());
255    msg->setMessage("format", format);
256    msg->setInt32("offload-only", offloadOnly);
257    msg->setInt32("has-video", hasVideo);
258    msg->setInt32("flags", flags);
259
260    sp<AMessage> response;
261    msg->postAndAwaitResponse(&response);
262
263    int32_t offload;
264    CHECK(response->findInt32("offload", &offload));
265    return (offload != 0);
266}
267
268void NuPlayer::Renderer::closeAudioSink() {
269    sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, id());
270
271    sp<AMessage> response;
272    msg->postAndAwaitResponse(&response);
273}
274
275void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
276    switch (msg->what()) {
277        case kWhatOpenAudioSink:
278        {
279            sp<AMessage> format;
280            CHECK(msg->findMessage("format", &format));
281
282            int32_t offloadOnly;
283            CHECK(msg->findInt32("offload-only", &offloadOnly));
284
285            int32_t hasVideo;
286            CHECK(msg->findInt32("has-video", &hasVideo));
287
288            uint32_t flags;
289            CHECK(msg->findInt32("flags", (int32_t *)&flags));
290
291            bool offload = onOpenAudioSink(format, offloadOnly, hasVideo, flags);
292
293            sp<AMessage> response = new AMessage;
294            response->setInt32("offload", offload);
295
296            uint32_t replyID;
297            CHECK(msg->senderAwaitsResponse(&replyID));
298            response->postReply(replyID);
299
300            break;
301        }
302
303        case kWhatCloseAudioSink:
304        {
305            uint32_t replyID;
306            CHECK(msg->senderAwaitsResponse(&replyID));
307
308            onCloseAudioSink();
309
310            sp<AMessage> response = new AMessage;
311            response->postReply(replyID);
312            break;
313        }
314
315        case kWhatStopAudioSink:
316        {
317            mAudioSink->stop();
318            break;
319        }
320
321        case kWhatDrainAudioQueue:
322        {
323            int32_t generation;
324            CHECK(msg->findInt32("generation", &generation));
325            if (generation != mAudioQueueGeneration) {
326                break;
327            }
328
329            mDrainAudioQueuePending = false;
330
331            if (onDrainAudioQueue()) {
332                uint32_t numFramesPlayed;
333                CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
334                         (status_t)OK);
335
336                uint32_t numFramesPendingPlayout =
337                    mNumFramesWritten - numFramesPlayed;
338
339                // This is how long the audio sink will have data to
340                // play back.
341                int64_t delayUs =
342                    mAudioSink->msecsPerFrame()
343                        * numFramesPendingPlayout * 1000ll;
344
345                // Let's give it more data after about half that time
346                // has elapsed.
347                // kWhatDrainAudioQueue is used for non-offloading mode,
348                // and mLock is used only for offloading mode. Therefore,
349                // no need to acquire mLock here.
350                postDrainAudioQueue_l(delayUs / 2);
351            }
352            break;
353        }
354
355        case kWhatDrainVideoQueue:
356        {
357            int32_t generation;
358            CHECK(msg->findInt32("generation", &generation));
359            if (generation != mVideoQueueGeneration) {
360                break;
361            }
362
363            mDrainVideoQueuePending = false;
364
365            onDrainVideoQueue();
366
367            postDrainVideoQueue();
368            break;
369        }
370
371        case kWhatPostDrainVideoQueue:
372        {
373            int32_t generation;
374            CHECK(msg->findInt32("generation", &generation));
375            if (generation != mVideoQueueGeneration) {
376                break;
377            }
378
379            mDrainVideoQueuePending = false;
380            postDrainVideoQueue();
381            break;
382        }
383
384        case kWhatQueueBuffer:
385        {
386            onQueueBuffer(msg);
387            break;
388        }
389
390        case kWhatQueueEOS:
391        {
392            onQueueEOS(msg);
393            break;
394        }
395
396        case kWhatFlush:
397        {
398            onFlush(msg);
399            break;
400        }
401
402        case kWhatAudioSinkChanged:
403        {
404            onAudioSinkChanged();
405            break;
406        }
407
408        case kWhatDisableOffloadAudio:
409        {
410            onDisableOffloadAudio();
411            break;
412        }
413
414        case kWhatEnableOffloadAudio:
415        {
416            onEnableOffloadAudio();
417            break;
418        }
419
420        case kWhatPause:
421        {
422            onPause();
423            break;
424        }
425
426        case kWhatResume:
427        {
428            onResume();
429            break;
430        }
431
432        case kWhatSetVideoFrameRate:
433        {
434            float fps;
435            CHECK(msg->findFloat("frame-rate", &fps));
436            onSetVideoFrameRate(fps);
437            break;
438        }
439
440        case kWhatAudioOffloadTearDown:
441        {
442            onAudioOffloadTearDown(kDueToError);
443            break;
444        }
445
446        case kWhatAudioOffloadPauseTimeout:
447        {
448            int32_t generation;
449            CHECK(msg->findInt32("generation", &generation));
450            if (generation != mAudioOffloadPauseTimeoutGeneration) {
451                break;
452            }
453            ALOGV("Audio Offload tear down due to pause timeout.");
454            onAudioOffloadTearDown(kDueToTimeout);
455            break;
456        }
457
458        default:
459            TRESPASS();
460            break;
461    }
462}
463
464void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
465    if (mDrainAudioQueuePending || mSyncQueues || mPaused
466            || offloadingAudio()) {
467        return;
468    }
469
470    if (mAudioQueue.empty()) {
471        return;
472    }
473
474    mDrainAudioQueuePending = true;
475    sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
476    msg->setInt32("generation", mAudioQueueGeneration);
477    msg->post(delayUs);
478}
479
480void NuPlayer::Renderer::prepareForMediaRenderingStart() {
481    mAudioRenderingStartGeneration = mAudioQueueGeneration;
482    mVideoRenderingStartGeneration = mVideoQueueGeneration;
483}
484
485void NuPlayer::Renderer::notifyIfMediaRenderingStarted() {
486    if (mVideoRenderingStartGeneration == mVideoQueueGeneration &&
487        mAudioRenderingStartGeneration == mAudioQueueGeneration) {
488        mVideoRenderingStartGeneration = -1;
489        mAudioRenderingStartGeneration = -1;
490
491        sp<AMessage> notify = mNotify->dup();
492        notify->setInt32("what", kWhatMediaRenderingStart);
493        notify->post();
494    }
495}
496
497// static
498size_t NuPlayer::Renderer::AudioSinkCallback(
499        MediaPlayerBase::AudioSink * /* audioSink */,
500        void *buffer,
501        size_t size,
502        void *cookie,
503        MediaPlayerBase::AudioSink::cb_event_t event) {
504    NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
505
506    switch (event) {
507        case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
508        {
509            return me->fillAudioBuffer(buffer, size);
510            break;
511        }
512
513        case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
514        {
515            me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM);
516            break;
517        }
518
519        case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
520        {
521            me->notifyAudioOffloadTearDown();
522            break;
523        }
524    }
525
526    return 0;
527}
528
529size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
530    Mutex::Autolock autoLock(mLock);
531
532    if (!offloadingAudio() || mPaused) {
533        return 0;
534    }
535
536    bool hasEOS = false;
537
538    size_t sizeCopied = 0;
539    bool firstEntry = true;
540    while (sizeCopied < size && !mAudioQueue.empty()) {
541        QueueEntry *entry = &*mAudioQueue.begin();
542
543        if (entry->mBuffer == NULL) { // EOS
544            hasEOS = true;
545            mAudioQueue.erase(mAudioQueue.begin());
546            entry = NULL;
547            break;
548        }
549
550        if (firstEntry && entry->mOffset == 0) {
551            firstEntry = false;
552            int64_t mediaTimeUs;
553            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
554            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
555            setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
556        }
557
558        size_t copy = entry->mBuffer->size() - entry->mOffset;
559        size_t sizeRemaining = size - sizeCopied;
560        if (copy > sizeRemaining) {
561            copy = sizeRemaining;
562        }
563
564        memcpy((char *)buffer + sizeCopied,
565               entry->mBuffer->data() + entry->mOffset,
566               copy);
567
568        entry->mOffset += copy;
569        if (entry->mOffset == entry->mBuffer->size()) {
570            entry->mNotifyConsumed->post();
571            mAudioQueue.erase(mAudioQueue.begin());
572            entry = NULL;
573        }
574        sizeCopied += copy;
575        notifyIfMediaRenderingStarted();
576    }
577
578    if (mAudioFirstAnchorTimeMediaUs >= 0) {
579        int64_t nowUs = ALooper::GetNowUs();
580        setAnchorTime(mAudioFirstAnchorTimeMediaUs, nowUs - getPlayedOutAudioDurationUs(nowUs));
581    }
582
583    // we don't know how much data we are queueing for offloaded tracks
584    mAnchorMaxMediaUs = -1;
585
586    if (hasEOS) {
587        (new AMessage(kWhatStopAudioSink, id()))->post();
588    }
589
590    return sizeCopied;
591}
592
593bool NuPlayer::Renderer::onDrainAudioQueue() {
594    uint32_t numFramesPlayed;
595    if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
596        return false;
597    }
598
599    ssize_t numFramesAvailableToWrite =
600        mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
601
602#if 0
603    if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
604        ALOGI("audio sink underrun");
605    } else {
606        ALOGV("audio queue has %d frames left to play",
607             mAudioSink->frameCount() - numFramesAvailableToWrite);
608    }
609#endif
610
611    size_t numBytesAvailableToWrite =
612        numFramesAvailableToWrite * mAudioSink->frameSize();
613
614    while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) {
615        QueueEntry *entry = &*mAudioQueue.begin();
616
617        mLastAudioBufferDrained = entry->mBufferOrdinal;
618
619        if (entry->mBuffer == NULL) {
620            // EOS
621            int64_t postEOSDelayUs = 0;
622            if (mAudioSink->needsTrailingPadding()) {
623                postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
624            }
625            notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
626
627            mAudioQueue.erase(mAudioQueue.begin());
628            entry = NULL;
629            // Need to stop the track here, because that will play out the last
630            // little bit at the end of the file. Otherwise short files won't play.
631            mAudioSink->stop();
632            mNumFramesWritten = 0;
633            return false;
634        }
635
636        if (entry->mOffset == 0) {
637            int64_t mediaTimeUs;
638            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
639            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
640            onNewAudioMediaTime(mediaTimeUs);
641        }
642
643        size_t copy = entry->mBuffer->size() - entry->mOffset;
644        if (copy > numBytesAvailableToWrite) {
645            copy = numBytesAvailableToWrite;
646        }
647
648        ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy);
649        if (written < 0) {
650            // An error in AudioSink write is fatal here.
651            LOG_ALWAYS_FATAL("AudioSink write error(%zd) when writing %zu bytes", written, copy);
652        }
653
654        entry->mOffset += written;
655        if (entry->mOffset == entry->mBuffer->size()) {
656            entry->mNotifyConsumed->post();
657            mAudioQueue.erase(mAudioQueue.begin());
658
659            entry = NULL;
660        }
661
662        numBytesAvailableToWrite -= written;
663        size_t copiedFrames = written / mAudioSink->frameSize();
664        mNumFramesWritten += copiedFrames;
665
666        notifyIfMediaRenderingStarted();
667
668        if (written != (ssize_t)copy) {
669            // A short count was received from AudioSink::write()
670            //
671            // AudioSink write should block until exactly the number of bytes are delivered.
672            // But it may return with a short count (without an error) when:
673            //
674            // 1) Size to be copied is not a multiple of the frame size. We consider this fatal.
675            // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
676
677            // (Case 1)
678            // Must be a multiple of the frame size.  If it is not a multiple of a frame size, it
679            // needs to fail, as we should not carry over fractional frames between calls.
680            CHECK_EQ(copy % mAudioSink->frameSize(), 0);
681
682            // (Case 2)
683            // Return early to the caller.
684            // Beware of calling immediately again as this may busy-loop if you are not careful.
685            ALOGW("AudioSink write short frame count %zd < %zu", written, copy);
686            break;
687        }
688    }
689    mAnchorMaxMediaUs =
690        mAnchorTimeMediaUs +
691                (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL)
692                        * 1000LL * mAudioSink->msecsPerFrame());
693
694    return !mAudioQueue.empty();
695}
696
697int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
698    int64_t writtenAudioDurationUs =
699        mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame();
700    return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs);
701}
702
703int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
704    int64_t currentPositionUs;
705    if (getCurrentPosition(&currentPositionUs, nowUs, true /* allowPastQueuedVideo */) != OK) {
706        // If failed to get current position, e.g. due to audio clock is not ready, then just
707        // play out video immediately without delay.
708        return nowUs;
709    }
710    return (mediaTimeUs - currentPositionUs) + nowUs;
711}
712
713void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) {
714    // TRICKY: vorbis decoder generates multiple frames with the same
715    // timestamp, so only update on the first frame with a given timestamp
716    if (mediaTimeUs == mAnchorTimeMediaUs) {
717        return;
718    }
719    setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
720    int64_t nowUs = ALooper::GetNowUs();
721    setAnchorTime(
722            mediaTimeUs, nowUs + getPendingAudioPlayoutDurationUs(nowUs), mNumFramesWritten);
723}
724
725void NuPlayer::Renderer::postDrainVideoQueue() {
726    if (mDrainVideoQueuePending
727            || mSyncQueues
728            || (mPaused && mVideoSampleReceived)) {
729        return;
730    }
731
732    if (mVideoQueue.empty()) {
733        return;
734    }
735
736    QueueEntry &entry = *mVideoQueue.begin();
737
738    sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
739    msg->setInt32("generation", mVideoQueueGeneration);
740
741    if (entry.mBuffer == NULL) {
742        // EOS doesn't carry a timestamp.
743        msg->post();
744        mDrainVideoQueuePending = true;
745        return;
746    }
747
748    int64_t delayUs;
749    int64_t nowUs = ALooper::GetNowUs();
750    int64_t realTimeUs;
751    if (mFlags & FLAG_REAL_TIME) {
752        int64_t mediaTimeUs;
753        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
754        realTimeUs = mediaTimeUs;
755    } else {
756        int64_t mediaTimeUs;
757        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
758
759        if (mAnchorTimeMediaUs < 0) {
760            setAnchorTime(mediaTimeUs, nowUs);
761            realTimeUs = nowUs;
762        } else {
763            realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
764        }
765        if (!mHasAudio) {
766            mAnchorMaxMediaUs = mediaTimeUs + 100000; // smooth out videos >= 10fps
767        }
768
769        // Heuristics to handle situation when media time changed without a
770        // discontinuity. If we have not drained an audio buffer that was
771        // received after this buffer, repost in 10 msec. Otherwise repost
772        // in 500 msec.
773        delayUs = realTimeUs - nowUs;
774        if (delayUs > 500000) {
775            int64_t postDelayUs = 500000;
776            if (mHasAudio && (mLastAudioBufferDrained - entry.mBufferOrdinal) <= 0) {
777                postDelayUs = 10000;
778            }
779            msg->setWhat(kWhatPostDrainVideoQueue);
780            msg->post(postDelayUs);
781            mVideoScheduler->restart();
782            ALOGI("possible video time jump of %dms, retrying in %dms",
783                    (int)(delayUs / 1000), (int)(postDelayUs / 1000));
784            mDrainVideoQueuePending = true;
785            return;
786        }
787    }
788
789    realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
790    int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
791
792    delayUs = realTimeUs - nowUs;
793
794    ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs);
795    // post 2 display refreshes before rendering is due
796    msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
797
798    mDrainVideoQueuePending = true;
799}
800
801void NuPlayer::Renderer::onDrainVideoQueue() {
802    if (mVideoQueue.empty()) {
803        return;
804    }
805
806    QueueEntry *entry = &*mVideoQueue.begin();
807
808    if (entry->mBuffer == NULL) {
809        // EOS
810
811        notifyEOS(false /* audio */, entry->mFinalResult);
812
813        mVideoQueue.erase(mVideoQueue.begin());
814        entry = NULL;
815
816        setVideoLateByUs(0);
817        return;
818    }
819
820    int64_t nowUs = -1;
821    int64_t realTimeUs;
822    if (mFlags & FLAG_REAL_TIME) {
823        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
824    } else {
825        int64_t mediaTimeUs;
826        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
827
828        nowUs = ALooper::GetNowUs();
829        realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
830    }
831
832    bool tooLate = false;
833
834    if (!mPaused) {
835        if (nowUs == -1) {
836            nowUs = ALooper::GetNowUs();
837        }
838        setVideoLateByUs(nowUs - realTimeUs);
839        tooLate = (mVideoLateByUs > 40000);
840
841        if (tooLate) {
842            ALOGV("video late by %lld us (%.2f secs)",
843                 mVideoLateByUs, mVideoLateByUs / 1E6);
844        } else {
845            ALOGV("rendering video at media time %.2f secs",
846                    (mFlags & FLAG_REAL_TIME ? realTimeUs :
847                    (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
848        }
849    } else {
850        setVideoLateByUs(0);
851        if (!mVideoSampleReceived && !mHasAudio) {
852            // This will ensure that the first frame after a flush won't be used as anchor
853            // when renderer is in paused state, because resume can happen any time after seek.
854            setAnchorTime(-1, -1);
855        }
856    }
857
858    entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll);
859    entry->mNotifyConsumed->setInt32("render", !tooLate);
860    entry->mNotifyConsumed->post();
861    mVideoQueue.erase(mVideoQueue.begin());
862    entry = NULL;
863
864    mVideoSampleReceived = true;
865
866    if (!mPaused) {
867        if (!mVideoRenderingStarted) {
868            mVideoRenderingStarted = true;
869            notifyVideoRenderingStart();
870        }
871        notifyIfMediaRenderingStarted();
872    }
873}
874
875void NuPlayer::Renderer::notifyVideoRenderingStart() {
876    sp<AMessage> notify = mNotify->dup();
877    notify->setInt32("what", kWhatVideoRenderingStart);
878    notify->post();
879}
880
881void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) {
882    sp<AMessage> notify = mNotify->dup();
883    notify->setInt32("what", kWhatEOS);
884    notify->setInt32("audio", static_cast<int32_t>(audio));
885    notify->setInt32("finalResult", finalResult);
886    notify->post(delayUs);
887}
888
889void NuPlayer::Renderer::notifyAudioOffloadTearDown() {
890    (new AMessage(kWhatAudioOffloadTearDown, id()))->post();
891}
892
893void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
894    int32_t audio;
895    CHECK(msg->findInt32("audio", &audio));
896
897    setHasMedia(audio);
898
899    if (mHasVideo) {
900        if (mVideoScheduler == NULL) {
901            mVideoScheduler = new VideoFrameScheduler();
902            mVideoScheduler->init();
903        }
904    }
905
906    if (dropBufferWhileFlushing(audio, msg)) {
907        return;
908    }
909
910    sp<ABuffer> buffer;
911    CHECK(msg->findBuffer("buffer", &buffer));
912
913    sp<AMessage> notifyConsumed;
914    CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
915
916    QueueEntry entry;
917    entry.mBuffer = buffer;
918    entry.mNotifyConsumed = notifyConsumed;
919    entry.mOffset = 0;
920    entry.mFinalResult = OK;
921    entry.mBufferOrdinal = ++mTotalBuffersQueued;
922
923    if (audio) {
924        Mutex::Autolock autoLock(mLock);
925        mAudioQueue.push_back(entry);
926        postDrainAudioQueue_l();
927    } else {
928        mVideoQueue.push_back(entry);
929        postDrainVideoQueue();
930    }
931
932    Mutex::Autolock autoLock(mLock);
933    if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
934        return;
935    }
936
937    sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
938    sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
939
940    if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
941        // EOS signalled on either queue.
942        syncQueuesDone_l();
943        return;
944    }
945
946    int64_t firstAudioTimeUs;
947    int64_t firstVideoTimeUs;
948    CHECK(firstAudioBuffer->meta()
949            ->findInt64("timeUs", &firstAudioTimeUs));
950    CHECK(firstVideoBuffer->meta()
951            ->findInt64("timeUs", &firstVideoTimeUs));
952
953    int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
954
955    ALOGV("queueDiff = %.2f secs", diff / 1E6);
956
957    if (diff > 100000ll) {
958        // Audio data starts More than 0.1 secs before video.
959        // Drop some audio.
960
961        (*mAudioQueue.begin()).mNotifyConsumed->post();
962        mAudioQueue.erase(mAudioQueue.begin());
963        return;
964    }
965
966    syncQueuesDone_l();
967}
968
969void NuPlayer::Renderer::syncQueuesDone_l() {
970    if (!mSyncQueues) {
971        return;
972    }
973
974    mSyncQueues = false;
975
976    if (!mAudioQueue.empty()) {
977        postDrainAudioQueue_l();
978    }
979
980    if (!mVideoQueue.empty()) {
981        postDrainVideoQueue();
982    }
983}
984
985void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
986    int32_t audio;
987    CHECK(msg->findInt32("audio", &audio));
988
989    if (dropBufferWhileFlushing(audio, msg)) {
990        return;
991    }
992
993    int32_t finalResult;
994    CHECK(msg->findInt32("finalResult", &finalResult));
995
996    QueueEntry entry;
997    entry.mOffset = 0;
998    entry.mFinalResult = finalResult;
999
1000    if (audio) {
1001        Mutex::Autolock autoLock(mLock);
1002        if (mAudioQueue.empty() && mSyncQueues) {
1003            syncQueuesDone_l();
1004        }
1005        mAudioQueue.push_back(entry);
1006        postDrainAudioQueue_l();
1007    } else {
1008        if (mVideoQueue.empty() && mSyncQueues) {
1009            Mutex::Autolock autoLock(mLock);
1010            syncQueuesDone_l();
1011        }
1012        mVideoQueue.push_back(entry);
1013        postDrainVideoQueue();
1014    }
1015}
1016
1017void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
1018    int32_t audio;
1019    CHECK(msg->findInt32("audio", &audio));
1020
1021    {
1022        Mutex::Autolock autoLock(mFlushLock);
1023        if (audio) {
1024            mFlushingAudio = false;
1025        } else {
1026            mFlushingVideo = false;
1027        }
1028    }
1029
1030    // If we're currently syncing the queues, i.e. dropping audio while
1031    // aligning the first audio/video buffer times and only one of the
1032    // two queues has data, we may starve that queue by not requesting
1033    // more buffers from the decoder. If the other source then encounters
1034    // a discontinuity that leads to flushing, we'll never find the
1035    // corresponding discontinuity on the other queue.
1036    // Therefore we'll stop syncing the queues if at least one of them
1037    // is flushed.
1038    {
1039         Mutex::Autolock autoLock(mLock);
1040         syncQueuesDone_l();
1041         setPauseStartedTimeRealUs(-1);
1042    }
1043
1044    ALOGV("flushing %s", audio ? "audio" : "video");
1045    if (audio) {
1046        {
1047            Mutex::Autolock autoLock(mLock);
1048            flushQueue(&mAudioQueue);
1049
1050            ++mAudioQueueGeneration;
1051            prepareForMediaRenderingStart();
1052
1053            if (offloadingAudio()) {
1054                setAudioFirstAnchorTime(-1);
1055            }
1056        }
1057
1058        mDrainAudioQueuePending = false;
1059
1060        if (offloadingAudio()) {
1061            mAudioSink->pause();
1062            mAudioSink->flush();
1063            mAudioSink->start();
1064        }
1065    } else {
1066        flushQueue(&mVideoQueue);
1067
1068        mDrainVideoQueuePending = false;
1069        ++mVideoQueueGeneration;
1070
1071        if (mVideoScheduler != NULL) {
1072            mVideoScheduler->restart();
1073        }
1074
1075        prepareForMediaRenderingStart();
1076    }
1077
1078    mVideoSampleReceived = false;
1079    notifyFlushComplete(audio);
1080}
1081
1082void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
1083    while (!queue->empty()) {
1084        QueueEntry *entry = &*queue->begin();
1085
1086        if (entry->mBuffer != NULL) {
1087            entry->mNotifyConsumed->post();
1088        }
1089
1090        queue->erase(queue->begin());
1091        entry = NULL;
1092    }
1093}
1094
1095void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
1096    sp<AMessage> notify = mNotify->dup();
1097    notify->setInt32("what", kWhatFlushComplete);
1098    notify->setInt32("audio", static_cast<int32_t>(audio));
1099    notify->post();
1100}
1101
1102bool NuPlayer::Renderer::dropBufferWhileFlushing(
1103        bool audio, const sp<AMessage> &msg) {
1104    bool flushing = false;
1105
1106    {
1107        Mutex::Autolock autoLock(mFlushLock);
1108        if (audio) {
1109            flushing = mFlushingAudio;
1110        } else {
1111            flushing = mFlushingVideo;
1112        }
1113    }
1114
1115    if (!flushing) {
1116        return false;
1117    }
1118
1119    sp<AMessage> notifyConsumed;
1120    if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
1121        notifyConsumed->post();
1122    }
1123
1124    return true;
1125}
1126
1127void NuPlayer::Renderer::onAudioSinkChanged() {
1128    if (offloadingAudio()) {
1129        return;
1130    }
1131    CHECK(!mDrainAudioQueuePending);
1132    mNumFramesWritten = 0;
1133    mAnchorNumFramesWritten = -1;
1134    uint32_t written;
1135    if (mAudioSink->getFramesWritten(&written) == OK) {
1136        mNumFramesWritten = written;
1137    }
1138}
1139
1140void NuPlayer::Renderer::onDisableOffloadAudio() {
1141    Mutex::Autolock autoLock(mLock);
1142    mFlags &= ~FLAG_OFFLOAD_AUDIO;
1143    ++mAudioQueueGeneration;
1144}
1145
1146void NuPlayer::Renderer::onEnableOffloadAudio() {
1147    Mutex::Autolock autoLock(mLock);
1148    mFlags |= FLAG_OFFLOAD_AUDIO;
1149    ++mAudioQueueGeneration;
1150}
1151
1152void NuPlayer::Renderer::onPause() {
1153    if (mPaused) {
1154        ALOGW("Renderer::onPause() called while already paused!");
1155        return;
1156    }
1157    {
1158        Mutex::Autolock autoLock(mLock);
1159        ++mAudioQueueGeneration;
1160        ++mVideoQueueGeneration;
1161        prepareForMediaRenderingStart();
1162        mPaused = true;
1163        setPauseStartedTimeRealUs(ALooper::GetNowUs());
1164    }
1165
1166    mDrainAudioQueuePending = false;
1167    mDrainVideoQueuePending = false;
1168
1169    if (mHasAudio) {
1170        mAudioSink->pause();
1171        startAudioOffloadPauseTimeout();
1172    }
1173
1174    ALOGV("now paused audio queue has %d entries, video has %d entries",
1175          mAudioQueue.size(), mVideoQueue.size());
1176}
1177
1178void NuPlayer::Renderer::onResume() {
1179    if (!mPaused) {
1180        return;
1181    }
1182
1183    if (mHasAudio) {
1184        cancelAudioOffloadPauseTimeout();
1185        mAudioSink->start();
1186    }
1187
1188    Mutex::Autolock autoLock(mLock);
1189    mPaused = false;
1190    if (mPauseStartedTimeRealUs != -1) {
1191        int64_t newAnchorRealUs =
1192            mAnchorTimeRealUs + ALooper::GetNowUs() - mPauseStartedTimeRealUs;
1193        setAnchorTime(
1194                mAnchorTimeMediaUs, newAnchorRealUs, mAnchorNumFramesWritten, true /* resume */);
1195    }
1196
1197    if (!mAudioQueue.empty()) {
1198        postDrainAudioQueue_l();
1199    }
1200
1201    if (!mVideoQueue.empty()) {
1202        postDrainVideoQueue();
1203    }
1204}
1205
1206void NuPlayer::Renderer::onSetVideoFrameRate(float fps) {
1207    if (mVideoScheduler == NULL) {
1208        mVideoScheduler = new VideoFrameScheduler();
1209    }
1210    mVideoScheduler->init(fps);
1211}
1212
1213// TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs()
1214// as it acquires locks and may query the audio driver.
1215//
1216// Some calls could conceivably retrieve extrapolated data instead of
1217// accessing getTimestamp() or getPosition() every time a data buffer with
1218// a media time is received.
1219//
1220int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) {
1221    uint32_t numFramesPlayed;
1222    int64_t numFramesPlayedAt;
1223    AudioTimestamp ts;
1224    static const int64_t kStaleTimestamp100ms = 100000;
1225
1226    status_t res = mAudioSink->getTimestamp(ts);
1227    if (res == OK) {                 // case 1: mixing audio tracks and offloaded tracks.
1228        numFramesPlayed = ts.mPosition;
1229        numFramesPlayedAt =
1230            ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000;
1231        const int64_t timestampAge = nowUs - numFramesPlayedAt;
1232        if (timestampAge > kStaleTimestamp100ms) {
1233            // This is an audio FIXME.
1234            // getTimestamp returns a timestamp which may come from audio mixing threads.
1235            // After pausing, the MixerThread may go idle, thus the mTime estimate may
1236            // become stale. Assuming that the MixerThread runs 20ms, with FastMixer at 5ms,
1237            // the max latency should be about 25ms with an average around 12ms (to be verified).
1238            // For safety we use 100ms.
1239            ALOGV("getTimestamp: returned stale timestamp nowUs(%lld) numFramesPlayedAt(%lld)",
1240                    (long long)nowUs, (long long)numFramesPlayedAt);
1241            numFramesPlayedAt = nowUs - kStaleTimestamp100ms;
1242        }
1243        //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAt);
1244    } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track
1245        numFramesPlayed = 0;
1246        numFramesPlayedAt = nowUs;
1247        //ALOGD("getTimestamp: WOULD_BLOCK %d %lld",
1248        //        numFramesPlayed, (long long)numFramesPlayedAt);
1249    } else {                         // case 3: transitory at new track or audio fast tracks.
1250        res = mAudioSink->getPosition(&numFramesPlayed);
1251        CHECK_EQ(res, (status_t)OK);
1252        numFramesPlayedAt = nowUs;
1253        numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */
1254        //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt);
1255    }
1256
1257    // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
1258    //CHECK_EQ(numFramesPlayed & (1 << 31), 0);  // can't be negative until 12.4 hrs, test
1259    int64_t durationUs = (int64_t)((int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame())
1260            + nowUs - numFramesPlayedAt;
1261    if (durationUs < 0) {
1262        // Occurs when numFramesPlayed position is very small and the following:
1263        // (1) In case 1, the time nowUs is computed before getTimestamp() is called and
1264        //     numFramesPlayedAt is greater than nowUs by time more than numFramesPlayed.
1265        // (2) In case 3, using getPosition and adding mAudioSink->latency() to
1266        //     numFramesPlayedAt, by a time amount greater than numFramesPlayed.
1267        //
1268        // Both of these are transitory conditions.
1269        ALOGV("getPlayedOutAudioDurationUs: negative duration %lld set to zero", (long long)durationUs);
1270        durationUs = 0;
1271    }
1272    ALOGV("getPlayedOutAudioDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)",
1273            (long long)durationUs, (long long)nowUs, numFramesPlayed, (long long)numFramesPlayedAt);
1274    return durationUs;
1275}
1276
1277void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reason) {
1278    if (mAudioOffloadTornDown) {
1279        return;
1280    }
1281    mAudioOffloadTornDown = true;
1282
1283    int64_t currentPositionUs;
1284    if (getCurrentPosition(&currentPositionUs) != OK) {
1285        currentPositionUs = 0;
1286    }
1287
1288    mAudioSink->stop();
1289    mAudioSink->flush();
1290
1291    sp<AMessage> notify = mNotify->dup();
1292    notify->setInt32("what", kWhatAudioOffloadTearDown);
1293    notify->setInt64("positionUs", currentPositionUs);
1294    notify->setInt32("reason", reason);
1295    notify->post();
1296}
1297
1298void NuPlayer::Renderer::startAudioOffloadPauseTimeout() {
1299    if (offloadingAudio()) {
1300        sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, id());
1301        msg->setInt32("generation", mAudioOffloadPauseTimeoutGeneration);
1302        msg->post(kOffloadPauseMaxUs);
1303    }
1304}
1305
1306void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() {
1307    if (offloadingAudio()) {
1308        ++mAudioOffloadPauseTimeoutGeneration;
1309    }
1310}
1311
1312bool NuPlayer::Renderer::onOpenAudioSink(
1313        const sp<AMessage> &format,
1314        bool offloadOnly,
1315        bool hasVideo,
1316        uint32_t flags) {
1317    ALOGV("openAudioSink: offloadOnly(%d) offloadingAudio(%d)",
1318            offloadOnly, offloadingAudio());
1319    bool audioSinkChanged = false;
1320
1321    int32_t numChannels;
1322    CHECK(format->findInt32("channel-count", &numChannels));
1323
1324    int32_t channelMask;
1325    if (!format->findInt32("channel-mask", &channelMask)) {
1326        // signal to the AudioSink to derive the mask from count.
1327        channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
1328    }
1329
1330    int32_t sampleRate;
1331    CHECK(format->findInt32("sample-rate", &sampleRate));
1332
1333    if (offloadingAudio()) {
1334        audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT;
1335        AString mime;
1336        CHECK(format->findString("mime", &mime));
1337        status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str());
1338
1339        if (err != OK) {
1340            ALOGE("Couldn't map mime \"%s\" to a valid "
1341                    "audio_format", mime.c_str());
1342            onDisableOffloadAudio();
1343        } else {
1344            ALOGV("Mime \"%s\" mapped to audio_format 0x%x",
1345                    mime.c_str(), audioFormat);
1346
1347            int avgBitRate = -1;
1348            format->findInt32("bit-rate", &avgBitRate);
1349
1350            int32_t aacProfile = -1;
1351            if (audioFormat == AUDIO_FORMAT_AAC
1352                    && format->findInt32("aac-profile", &aacProfile)) {
1353                // Redefine AAC format as per aac profile
1354                mapAACProfileToAudioFormat(
1355                        audioFormat,
1356                        aacProfile);
1357            }
1358
1359            audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER;
1360            offloadInfo.duration_us = -1;
1361            format->findInt64(
1362                    "durationUs", &offloadInfo.duration_us);
1363            offloadInfo.sample_rate = sampleRate;
1364            offloadInfo.channel_mask = channelMask;
1365            offloadInfo.format = audioFormat;
1366            offloadInfo.stream_type = AUDIO_STREAM_MUSIC;
1367            offloadInfo.bit_rate = avgBitRate;
1368            offloadInfo.has_video = hasVideo;
1369            offloadInfo.is_streaming = true;
1370
1371            if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) {
1372                ALOGV("openAudioSink: no change in offload mode");
1373                // no change from previous configuration, everything ok.
1374                return offloadingAudio();
1375            }
1376            ALOGV("openAudioSink: try to open AudioSink in offload mode");
1377            uint32_t offloadFlags = flags;
1378            offloadFlags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
1379            offloadFlags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
1380            audioSinkChanged = true;
1381            mAudioSink->close();
1382            err = mAudioSink->open(
1383                    sampleRate,
1384                    numChannels,
1385                    (audio_channel_mask_t)channelMask,
1386                    audioFormat,
1387                    8 /* bufferCount */,
1388                    &NuPlayer::Renderer::AudioSinkCallback,
1389                    this,
1390                    (audio_output_flags_t)offloadFlags,
1391                    &offloadInfo);
1392
1393            if (err == OK) {
1394                // If the playback is offloaded to h/w, we pass
1395                // the HAL some metadata information.
1396                // We don't want to do this for PCM because it
1397                // will be going through the AudioFlinger mixer
1398                // before reaching the hardware.
1399                // TODO
1400                mCurrentOffloadInfo = offloadInfo;
1401                err = mAudioSink->start();
1402                ALOGV_IF(err == OK, "openAudioSink: offload succeeded");
1403            }
1404            if (err != OK) {
1405                // Clean up, fall back to non offload mode.
1406                mAudioSink->close();
1407                onDisableOffloadAudio();
1408                mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
1409                ALOGV("openAudioSink: offload failed");
1410            }
1411        }
1412    }
1413    if (!offloadOnly && !offloadingAudio()) {
1414        ALOGV("openAudioSink: open AudioSink in NON-offload mode");
1415        uint32_t pcmFlags = flags;
1416        pcmFlags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
1417        audioSinkChanged = true;
1418        mAudioSink->close();
1419        mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
1420        CHECK_EQ(mAudioSink->open(
1421                    sampleRate,
1422                    numChannels,
1423                    (audio_channel_mask_t)channelMask,
1424                    AUDIO_FORMAT_PCM_16_BIT,
1425                    8 /* bufferCount */,
1426                    NULL,
1427                    NULL,
1428                    (audio_output_flags_t)pcmFlags),
1429                 (status_t)OK);
1430        mAudioSink->start();
1431    }
1432    if (audioSinkChanged) {
1433        onAudioSinkChanged();
1434    }
1435    if (offloadingAudio()) {
1436        mAudioOffloadTornDown = false;
1437    }
1438
1439    return offloadingAudio();
1440}
1441
1442void NuPlayer::Renderer::onCloseAudioSink() {
1443    mAudioSink->close();
1444    mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
1445}
1446
1447}  // namespace android
1448
1449