NuPlayerRenderer.cpp revision eecb7805bbbb712925d4372c505f8c7f5c4fb5ed
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "NuPlayerRenderer"
19#include <utils/Log.h>
20
21#include "NuPlayerRenderer.h"
22
23#include <cutils/properties.h>
24
25#include <media/stagefright/foundation/ABuffer.h>
26#include <media/stagefright/foundation/ADebug.h>
27#include <media/stagefright/foundation/AMessage.h>
28#include <media/stagefright/foundation/AUtils.h>
29#include <media/stagefright/MediaErrors.h>
30#include <media/stagefright/MetaData.h>
31#include <media/stagefright/Utils.h>
32
33#include <VideoFrameScheduler.h>
34
35#include <inttypes.h>
36
37namespace android {
38
39// Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink
40// is closed to allow the audio DSP to power down.
41static const int64_t kOffloadPauseMaxUs = 60000000ll;
42
43// static
44const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
45
46static bool sFrameAccurateAVsync = false;
47
48static void readProperties() {
49    char value[PROPERTY_VALUE_MAX];
50    if (property_get("persist.sys.media.avsync", value, NULL)) {
51        sFrameAccurateAVsync =
52            !strcmp("1", value) || !strcasecmp("true", value);
53    }
54}
55
56NuPlayer::Renderer::Renderer(
57        const sp<MediaPlayerBase::AudioSink> &sink,
58        const sp<AMessage> &notify,
59        uint32_t flags)
60    : mAudioSink(sink),
61      mNotify(notify),
62      mFlags(flags),
63      mNumFramesWritten(0),
64      mDrainAudioQueuePending(false),
65      mDrainVideoQueuePending(false),
66      mAudioQueueGeneration(0),
67      mVideoQueueGeneration(0),
68      mAudioFirstAnchorTimeMediaUs(-1),
69      mAnchorTimeMediaUs(-1),
70      mAnchorTimeRealUs(-1),
71      mVideoLateByUs(0ll),
72      mHasAudio(false),
73      mHasVideo(false),
74      mPauseStartedTimeRealUs(-1),
75      mFlushingAudio(false),
76      mFlushingVideo(false),
77      mSyncQueues(false),
78      mPaused(false),
79      mVideoSampleReceived(false),
80      mVideoRenderingStarted(false),
81      mVideoRenderingStartGeneration(0),
82      mAudioRenderingStartGeneration(0),
83      mAudioOffloadPauseTimeoutGeneration(0),
84      mAudioOffloadTornDown(false),
85      mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER) {
86    readProperties();
87}
88
89NuPlayer::Renderer::~Renderer() {
90    if (offloadingAudio()) {
91        mAudioSink->stop();
92        mAudioSink->flush();
93        mAudioSink->close();
94    }
95}
96
97void NuPlayer::Renderer::queueBuffer(
98        bool audio,
99        const sp<ABuffer> &buffer,
100        const sp<AMessage> &notifyConsumed) {
101    sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
102    msg->setInt32("audio", static_cast<int32_t>(audio));
103    msg->setBuffer("buffer", buffer);
104    msg->setMessage("notifyConsumed", notifyConsumed);
105    msg->post();
106}
107
108void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
109    CHECK_NE(finalResult, (status_t)OK);
110
111    sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
112    msg->setInt32("audio", static_cast<int32_t>(audio));
113    msg->setInt32("finalResult", finalResult);
114    msg->post();
115}
116
117void NuPlayer::Renderer::flush(bool audio) {
118    {
119        Mutex::Autolock autoLock(mFlushLock);
120        if (audio) {
121            if (mFlushingAudio) {
122                return;
123            }
124            mFlushingAudio = true;
125        } else {
126            if (mFlushingVideo) {
127                return;
128            }
129            mFlushingVideo = true;
130        }
131    }
132
133    sp<AMessage> msg = new AMessage(kWhatFlush, id());
134    msg->setInt32("audio", static_cast<int32_t>(audio));
135    msg->post();
136}
137
138void NuPlayer::Renderer::signalTimeDiscontinuity() {
139    Mutex::Autolock autoLock(mLock);
140    // CHECK(mAudioQueue.empty());
141    // CHECK(mVideoQueue.empty());
142    setAudioFirstAnchorTime(-1);
143    setAnchorTime(-1, -1);
144    setVideoLateByUs(0);
145    mSyncQueues = false;
146}
147
148void NuPlayer::Renderer::signalAudioSinkChanged() {
149    (new AMessage(kWhatAudioSinkChanged, id()))->post();
150}
151
152void NuPlayer::Renderer::signalDisableOffloadAudio() {
153    (new AMessage(kWhatDisableOffloadAudio, id()))->post();
154}
155
156void NuPlayer::Renderer::pause() {
157    (new AMessage(kWhatPause, id()))->post();
158}
159
160void NuPlayer::Renderer::resume() {
161    (new AMessage(kWhatResume, id()))->post();
162}
163
164void NuPlayer::Renderer::setVideoFrameRate(float fps) {
165    sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, id());
166    msg->setFloat("frame-rate", fps);
167    msg->post();
168}
169
170status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) {
171    return getCurrentPosition(mediaUs, ALooper::GetNowUs());
172}
173
174status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs, int64_t nowUs) {
175    Mutex::Autolock autoLock(mTimeLock);
176    if (!mHasAudio && !mHasVideo) {
177        return NO_INIT;
178    }
179
180    if (mAnchorTimeMediaUs < 0) {
181        return NO_INIT;
182    }
183    int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
184
185    if (mPauseStartedTimeRealUs != -1) {
186        positionUs -= (nowUs - mPauseStartedTimeRealUs);
187    }
188
189    if (positionUs < mAudioFirstAnchorTimeMediaUs) {
190        positionUs = mAudioFirstAnchorTimeMediaUs;
191    }
192
193    *mediaUs = (positionUs <= 0) ? 0 : positionUs;
194    return OK;
195}
196
197void NuPlayer::Renderer::setHasMedia(bool audio) {
198    Mutex::Autolock autoLock(mTimeLock);
199    if (audio) {
200        mHasAudio = true;
201    } else {
202        mHasVideo = true;
203    }
204}
205
206void NuPlayer::Renderer::setAudioFirstAnchorTime(int64_t mediaUs) {
207    Mutex::Autolock autoLock(mTimeLock);
208    mAudioFirstAnchorTimeMediaUs = mediaUs;
209}
210
211void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs) {
212    Mutex::Autolock autoLock(mTimeLock);
213    if (mAudioFirstAnchorTimeMediaUs == -1) {
214        mAudioFirstAnchorTimeMediaUs = mediaUs;
215    }
216}
217
218void NuPlayer::Renderer::setAnchorTime(int64_t mediaUs, int64_t realUs, bool resume) {
219    Mutex::Autolock autoLock(mTimeLock);
220    mAnchorTimeMediaUs = mediaUs;
221    mAnchorTimeRealUs = realUs;
222    if (resume) {
223        mPauseStartedTimeRealUs = -1;
224    }
225}
226
227void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) {
228    Mutex::Autolock autoLock(mTimeLock);
229    mVideoLateByUs = lateUs;
230}
231
232int64_t NuPlayer::Renderer::getVideoLateByUs() {
233    Mutex::Autolock autoLock(mTimeLock);
234    return mVideoLateByUs;
235}
236
237void NuPlayer::Renderer::setPauseStartedTimeRealUs(int64_t realUs) {
238    Mutex::Autolock autoLock(mTimeLock);
239    mPauseStartedTimeRealUs = realUs;
240}
241
242bool NuPlayer::Renderer::openAudioSink(
243        const sp<AMessage> &format,
244        bool offloadOnly,
245        bool hasVideo,
246        uint32_t flags) {
247    sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, id());
248    msg->setMessage("format", format);
249    msg->setInt32("offload-only", offloadOnly);
250    msg->setInt32("has-video", hasVideo);
251    msg->setInt32("flags", flags);
252
253    sp<AMessage> response;
254    msg->postAndAwaitResponse(&response);
255
256    int32_t offload;
257    CHECK(response->findInt32("offload", &offload));
258    return (offload != 0);
259}
260
261void NuPlayer::Renderer::closeAudioSink() {
262    sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, id());
263
264    sp<AMessage> response;
265    msg->postAndAwaitResponse(&response);
266}
267
268void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
269    switch (msg->what()) {
270        case kWhatOpenAudioSink:
271        {
272            sp<AMessage> format;
273            CHECK(msg->findMessage("format", &format));
274
275            int32_t offloadOnly;
276            CHECK(msg->findInt32("offload-only", &offloadOnly));
277
278            int32_t hasVideo;
279            CHECK(msg->findInt32("has-video", &hasVideo));
280
281            uint32_t flags;
282            CHECK(msg->findInt32("flags", (int32_t *)&flags));
283
284            bool offload = onOpenAudioSink(format, offloadOnly, hasVideo, flags);
285
286            sp<AMessage> response = new AMessage;
287            response->setInt32("offload", offload);
288
289            uint32_t replyID;
290            CHECK(msg->senderAwaitsResponse(&replyID));
291            response->postReply(replyID);
292
293            break;
294        }
295
296        case kWhatCloseAudioSink:
297        {
298            uint32_t replyID;
299            CHECK(msg->senderAwaitsResponse(&replyID));
300
301            onCloseAudioSink();
302
303            sp<AMessage> response = new AMessage;
304            response->postReply(replyID);
305            break;
306        }
307
308        case kWhatStopAudioSink:
309        {
310            mAudioSink->stop();
311            break;
312        }
313
314        case kWhatDrainAudioQueue:
315        {
316            int32_t generation;
317            CHECK(msg->findInt32("generation", &generation));
318            if (generation != mAudioQueueGeneration) {
319                break;
320            }
321
322            mDrainAudioQueuePending = false;
323
324            if (onDrainAudioQueue()) {
325                uint32_t numFramesPlayed;
326                CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
327                         (status_t)OK);
328
329                uint32_t numFramesPendingPlayout =
330                    mNumFramesWritten - numFramesPlayed;
331
332                // This is how long the audio sink will have data to
333                // play back.
334                int64_t delayUs =
335                    mAudioSink->msecsPerFrame()
336                        * numFramesPendingPlayout * 1000ll;
337
338                // Let's give it more data after about half that time
339                // has elapsed.
340                // kWhatDrainAudioQueue is used for non-offloading mode,
341                // and mLock is used only for offloading mode. Therefore,
342                // no need to acquire mLock here.
343                postDrainAudioQueue_l(delayUs / 2);
344            }
345            break;
346        }
347
348        case kWhatDrainVideoQueue:
349        {
350            int32_t generation;
351            CHECK(msg->findInt32("generation", &generation));
352            if (generation != mVideoQueueGeneration) {
353                break;
354            }
355
356            mDrainVideoQueuePending = false;
357
358            onDrainVideoQueue();
359
360            postDrainVideoQueue();
361            break;
362        }
363
364        case kWhatQueueBuffer:
365        {
366            onQueueBuffer(msg);
367            break;
368        }
369
370        case kWhatQueueEOS:
371        {
372            onQueueEOS(msg);
373            break;
374        }
375
376        case kWhatFlush:
377        {
378            onFlush(msg);
379            break;
380        }
381
382        case kWhatAudioSinkChanged:
383        {
384            onAudioSinkChanged();
385            break;
386        }
387
388        case kWhatDisableOffloadAudio:
389        {
390            onDisableOffloadAudio();
391            break;
392        }
393
394        case kWhatPause:
395        {
396            onPause();
397            break;
398        }
399
400        case kWhatResume:
401        {
402            onResume();
403            break;
404        }
405
406        case kWhatSetVideoFrameRate:
407        {
408            float fps;
409            CHECK(msg->findFloat("frame-rate", &fps));
410            onSetVideoFrameRate(fps);
411            break;
412        }
413
414        case kWhatAudioOffloadTearDown:
415        {
416            onAudioOffloadTearDown(kDueToError);
417            break;
418        }
419
420        case kWhatAudioOffloadPauseTimeout:
421        {
422            int32_t generation;
423            CHECK(msg->findInt32("generation", &generation));
424            if (generation != mAudioOffloadPauseTimeoutGeneration) {
425                break;
426            }
427            ALOGV("Audio Offload tear down due to pause timeout.");
428            onAudioOffloadTearDown(kDueToTimeout);
429            break;
430        }
431
432        default:
433            TRESPASS();
434            break;
435    }
436}
437
438void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
439    if (mDrainAudioQueuePending || mSyncQueues || mPaused
440            || offloadingAudio()) {
441        return;
442    }
443
444    if (mAudioQueue.empty()) {
445        return;
446    }
447
448    mDrainAudioQueuePending = true;
449    sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
450    msg->setInt32("generation", mAudioQueueGeneration);
451    msg->post(delayUs);
452}
453
454void NuPlayer::Renderer::prepareForMediaRenderingStart() {
455    mAudioRenderingStartGeneration = mAudioQueueGeneration;
456    mVideoRenderingStartGeneration = mVideoQueueGeneration;
457}
458
459void NuPlayer::Renderer::notifyIfMediaRenderingStarted() {
460    if (mVideoRenderingStartGeneration == mVideoQueueGeneration &&
461        mAudioRenderingStartGeneration == mAudioQueueGeneration) {
462        mVideoRenderingStartGeneration = -1;
463        mAudioRenderingStartGeneration = -1;
464
465        sp<AMessage> notify = mNotify->dup();
466        notify->setInt32("what", kWhatMediaRenderingStart);
467        notify->post();
468    }
469}
470
471// static
472size_t NuPlayer::Renderer::AudioSinkCallback(
473        MediaPlayerBase::AudioSink * /* audioSink */,
474        void *buffer,
475        size_t size,
476        void *cookie,
477        MediaPlayerBase::AudioSink::cb_event_t event) {
478    NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
479
480    switch (event) {
481        case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
482        {
483            return me->fillAudioBuffer(buffer, size);
484            break;
485        }
486
487        case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
488        {
489            me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM);
490            break;
491        }
492
493        case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
494        {
495            me->notifyAudioOffloadTearDown();
496            break;
497        }
498    }
499
500    return 0;
501}
502
503size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
504    Mutex::Autolock autoLock(mLock);
505
506    if (!offloadingAudio() || mPaused) {
507        return 0;
508    }
509
510    bool hasEOS = false;
511
512    size_t sizeCopied = 0;
513    bool firstEntry = true;
514    while (sizeCopied < size && !mAudioQueue.empty()) {
515        QueueEntry *entry = &*mAudioQueue.begin();
516
517        if (entry->mBuffer == NULL) { // EOS
518            hasEOS = true;
519            mAudioQueue.erase(mAudioQueue.begin());
520            entry = NULL;
521            break;
522        }
523
524        if (firstEntry && entry->mOffset == 0) {
525            firstEntry = false;
526            int64_t mediaTimeUs;
527            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
528            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
529            onNewAudioMediaTime(mediaTimeUs);
530        }
531
532        size_t copy = entry->mBuffer->size() - entry->mOffset;
533        size_t sizeRemaining = size - sizeCopied;
534        if (copy > sizeRemaining) {
535            copy = sizeRemaining;
536        }
537
538        memcpy((char *)buffer + sizeCopied,
539               entry->mBuffer->data() + entry->mOffset,
540               copy);
541
542        entry->mOffset += copy;
543        if (entry->mOffset == entry->mBuffer->size()) {
544            entry->mNotifyConsumed->post();
545            mAudioQueue.erase(mAudioQueue.begin());
546            entry = NULL;
547        }
548        sizeCopied += copy;
549        notifyIfMediaRenderingStarted();
550    }
551
552    if (hasEOS) {
553        (new AMessage(kWhatStopAudioSink, id()))->post();
554    }
555
556    return sizeCopied;
557}
558
559bool NuPlayer::Renderer::onDrainAudioQueue() {
560    uint32_t numFramesPlayed;
561    if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
562        return false;
563    }
564
565    ssize_t numFramesAvailableToWrite =
566        mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
567
568#if 0
569    if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
570        ALOGI("audio sink underrun");
571    } else {
572        ALOGV("audio queue has %d frames left to play",
573             mAudioSink->frameCount() - numFramesAvailableToWrite);
574    }
575#endif
576
577    size_t numBytesAvailableToWrite =
578        numFramesAvailableToWrite * mAudioSink->frameSize();
579
580    while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) {
581        QueueEntry *entry = &*mAudioQueue.begin();
582
583        if (entry->mBuffer == NULL) {
584            // EOS
585            int64_t postEOSDelayUs = 0;
586            if (mAudioSink->needsTrailingPadding()) {
587                postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
588            }
589            notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
590
591            mAudioQueue.erase(mAudioQueue.begin());
592            entry = NULL;
593            return false;
594        }
595
596        if (entry->mOffset == 0) {
597            int64_t mediaTimeUs;
598            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
599            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
600            onNewAudioMediaTime(mediaTimeUs);
601        }
602
603        size_t copy = entry->mBuffer->size() - entry->mOffset;
604        if (copy > numBytesAvailableToWrite) {
605            copy = numBytesAvailableToWrite;
606        }
607
608        ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy);
609        if (written < 0) {
610            // An error in AudioSink write is fatal here.
611            LOG_ALWAYS_FATAL("AudioSink write error(%zd) when writing %zu bytes", written, copy);
612        }
613
614        entry->mOffset += written;
615        if (entry->mOffset == entry->mBuffer->size()) {
616            entry->mNotifyConsumed->post();
617            mAudioQueue.erase(mAudioQueue.begin());
618
619            entry = NULL;
620        }
621
622        numBytesAvailableToWrite -= written;
623        size_t copiedFrames = written / mAudioSink->frameSize();
624        mNumFramesWritten += copiedFrames;
625
626        notifyIfMediaRenderingStarted();
627
628        if (written != (ssize_t)copy) {
629            // A short count was received from AudioSink::write()
630            //
631            // AudioSink write should block until exactly the number of bytes are delivered.
632            // But it may return with a short count (without an error) when:
633            //
634            // 1) Size to be copied is not a multiple of the frame size. We consider this fatal.
635            // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
636
637            // (Case 1)
638            // Must be a multiple of the frame size.  If it is not a multiple of a frame size, it
639            // needs to fail, as we should not carry over fractional frames between calls.
640            CHECK_EQ(copy % mAudioSink->frameSize(), 0);
641
642            // (Case 2)
643            // Return early to the caller.
644            // Beware of calling immediately again as this may busy-loop if you are not careful.
645            ALOGW("AudioSink write short frame count %zd < %zu", written, copy);
646            break;
647        }
648    }
649    return !mAudioQueue.empty();
650}
651
652int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
653    int64_t writtenAudioDurationUs =
654        mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame();
655    return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs);
656}
657
658int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
659    int64_t currentPositionUs;
660    if (getCurrentPosition(&currentPositionUs, nowUs) != OK) {
661        // If failed to get current position, e.g. due to audio clock is not ready, then just
662        // play out video immediately without delay.
663        return nowUs;
664    }
665    return (mediaTimeUs - currentPositionUs) + nowUs;
666}
667
668void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) {
669    // TRICKY: vorbis decoder generates multiple frames with the same
670    // timestamp, so only update on the first frame with a given timestamp
671    if (mediaTimeUs == mAnchorTimeMediaUs) {
672        return;
673    }
674    setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
675    int64_t nowUs = ALooper::GetNowUs();
676    setAnchorTime(mediaTimeUs, nowUs + getPendingAudioPlayoutDurationUs(nowUs));
677}
678
679void NuPlayer::Renderer::postDrainVideoQueue() {
680    if (mDrainVideoQueuePending
681            || mSyncQueues
682            || (mPaused && mVideoSampleReceived)) {
683        return;
684    }
685
686    if (mVideoQueue.empty()) {
687        return;
688    }
689
690    QueueEntry &entry = *mVideoQueue.begin();
691
692    sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
693    msg->setInt32("generation", mVideoQueueGeneration);
694
695    if (entry.mBuffer == NULL) {
696        // EOS doesn't carry a timestamp.
697        msg->post();
698        mDrainVideoQueuePending = true;
699        return;
700    }
701
702    int64_t delayUs;
703    int64_t nowUs = ALooper::GetNowUs();
704    int64_t realTimeUs;
705    if (mFlags & FLAG_REAL_TIME) {
706        int64_t mediaTimeUs;
707        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
708        realTimeUs = mediaTimeUs;
709    } else {
710        int64_t mediaTimeUs;
711        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
712
713        if (mAnchorTimeMediaUs < 0) {
714            setAnchorTime(mediaTimeUs, nowUs);
715            realTimeUs = nowUs;
716        } else {
717            realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
718        }
719    }
720
721    realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
722    int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
723
724    delayUs = realTimeUs - nowUs;
725
726    ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs);
727    // post 2 display refreshes before rendering is due
728    // FIXME currently this increases power consumption, so unless frame-accurate
729    // AV sync is requested, post closer to required render time (at 0.63 vsyncs)
730    if (!sFrameAccurateAVsync) {
731        twoVsyncsUs >>= 4;
732    }
733    msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
734
735    mDrainVideoQueuePending = true;
736}
737
738void NuPlayer::Renderer::onDrainVideoQueue() {
739    if (mVideoQueue.empty()) {
740        return;
741    }
742
743    QueueEntry *entry = &*mVideoQueue.begin();
744
745    if (entry->mBuffer == NULL) {
746        // EOS
747
748        notifyEOS(false /* audio */, entry->mFinalResult);
749
750        mVideoQueue.erase(mVideoQueue.begin());
751        entry = NULL;
752
753        setVideoLateByUs(0);
754        return;
755    }
756
757    int64_t nowUs = -1;
758    int64_t realTimeUs;
759    if (mFlags & FLAG_REAL_TIME) {
760        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
761    } else {
762        int64_t mediaTimeUs;
763        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
764
765        nowUs = ALooper::GetNowUs();
766        realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
767    }
768
769    bool tooLate = false;
770
771    if (!mPaused) {
772        if (nowUs == -1) {
773            nowUs = ALooper::GetNowUs();
774        }
775        setVideoLateByUs(nowUs - realTimeUs);
776        tooLate = (mVideoLateByUs > 40000);
777
778        if (tooLate) {
779            ALOGV("video late by %lld us (%.2f secs)",
780                 mVideoLateByUs, mVideoLateByUs / 1E6);
781        } else {
782            ALOGV("rendering video at media time %.2f secs",
783                    (mFlags & FLAG_REAL_TIME ? realTimeUs :
784                    (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
785        }
786    } else {
787        setVideoLateByUs(0);
788        if (!mVideoSampleReceived && !mHasAudio) {
789            // This will ensure that the first frame after a flush won't be used as anchor
790            // when renderer is in paused state, because resume can happen any time after seek.
791            setAnchorTime(-1, -1);
792        }
793    }
794
795    entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll);
796    entry->mNotifyConsumed->setInt32("render", !tooLate);
797    entry->mNotifyConsumed->post();
798    mVideoQueue.erase(mVideoQueue.begin());
799    entry = NULL;
800
801    mVideoSampleReceived = true;
802
803    if (!mPaused) {
804        if (!mVideoRenderingStarted) {
805            mVideoRenderingStarted = true;
806            notifyVideoRenderingStart();
807        }
808        notifyIfMediaRenderingStarted();
809    }
810}
811
812void NuPlayer::Renderer::notifyVideoRenderingStart() {
813    sp<AMessage> notify = mNotify->dup();
814    notify->setInt32("what", kWhatVideoRenderingStart);
815    notify->post();
816}
817
818void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) {
819    sp<AMessage> notify = mNotify->dup();
820    notify->setInt32("what", kWhatEOS);
821    notify->setInt32("audio", static_cast<int32_t>(audio));
822    notify->setInt32("finalResult", finalResult);
823    notify->post(delayUs);
824}
825
826void NuPlayer::Renderer::notifyAudioOffloadTearDown() {
827    (new AMessage(kWhatAudioOffloadTearDown, id()))->post();
828}
829
830void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
831    int32_t audio;
832    CHECK(msg->findInt32("audio", &audio));
833
834    setHasMedia(audio);
835
836    if (mHasVideo) {
837        if (mVideoScheduler == NULL) {
838            mVideoScheduler = new VideoFrameScheduler();
839            mVideoScheduler->init();
840        }
841    }
842
843    if (dropBufferWhileFlushing(audio, msg)) {
844        return;
845    }
846
847    sp<ABuffer> buffer;
848    CHECK(msg->findBuffer("buffer", &buffer));
849
850    sp<AMessage> notifyConsumed;
851    CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
852
853    QueueEntry entry;
854    entry.mBuffer = buffer;
855    entry.mNotifyConsumed = notifyConsumed;
856    entry.mOffset = 0;
857    entry.mFinalResult = OK;
858
859    if (audio) {
860        Mutex::Autolock autoLock(mLock);
861        mAudioQueue.push_back(entry);
862        postDrainAudioQueue_l();
863    } else {
864        mVideoQueue.push_back(entry);
865        postDrainVideoQueue();
866    }
867
868    Mutex::Autolock autoLock(mLock);
869    if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
870        return;
871    }
872
873    sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
874    sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
875
876    if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
877        // EOS signalled on either queue.
878        syncQueuesDone_l();
879        return;
880    }
881
882    int64_t firstAudioTimeUs;
883    int64_t firstVideoTimeUs;
884    CHECK(firstAudioBuffer->meta()
885            ->findInt64("timeUs", &firstAudioTimeUs));
886    CHECK(firstVideoBuffer->meta()
887            ->findInt64("timeUs", &firstVideoTimeUs));
888
889    int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
890
891    ALOGV("queueDiff = %.2f secs", diff / 1E6);
892
893    if (diff > 100000ll) {
894        // Audio data starts More than 0.1 secs before video.
895        // Drop some audio.
896
897        (*mAudioQueue.begin()).mNotifyConsumed->post();
898        mAudioQueue.erase(mAudioQueue.begin());
899        return;
900    }
901
902    syncQueuesDone_l();
903}
904
905void NuPlayer::Renderer::syncQueuesDone_l() {
906    if (!mSyncQueues) {
907        return;
908    }
909
910    mSyncQueues = false;
911
912    if (!mAudioQueue.empty()) {
913        postDrainAudioQueue_l();
914    }
915
916    if (!mVideoQueue.empty()) {
917        postDrainVideoQueue();
918    }
919}
920
921void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
922    int32_t audio;
923    CHECK(msg->findInt32("audio", &audio));
924
925    if (dropBufferWhileFlushing(audio, msg)) {
926        return;
927    }
928
929    int32_t finalResult;
930    CHECK(msg->findInt32("finalResult", &finalResult));
931
932    QueueEntry entry;
933    entry.mOffset = 0;
934    entry.mFinalResult = finalResult;
935
936    if (audio) {
937        Mutex::Autolock autoLock(mLock);
938        if (mAudioQueue.empty() && mSyncQueues) {
939            syncQueuesDone_l();
940        }
941        mAudioQueue.push_back(entry);
942        postDrainAudioQueue_l();
943    } else {
944        if (mVideoQueue.empty() && mSyncQueues) {
945            Mutex::Autolock autoLock(mLock);
946            syncQueuesDone_l();
947        }
948        mVideoQueue.push_back(entry);
949        postDrainVideoQueue();
950    }
951}
952
953void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
954    int32_t audio;
955    CHECK(msg->findInt32("audio", &audio));
956
957    {
958        Mutex::Autolock autoLock(mFlushLock);
959        if (audio) {
960            mFlushingAudio = false;
961        } else {
962            mFlushingVideo = false;
963        }
964    }
965
966    // If we're currently syncing the queues, i.e. dropping audio while
967    // aligning the first audio/video buffer times and only one of the
968    // two queues has data, we may starve that queue by not requesting
969    // more buffers from the decoder. If the other source then encounters
970    // a discontinuity that leads to flushing, we'll never find the
971    // corresponding discontinuity on the other queue.
972    // Therefore we'll stop syncing the queues if at least one of them
973    // is flushed.
974    {
975         Mutex::Autolock autoLock(mLock);
976         syncQueuesDone_l();
977         setPauseStartedTimeRealUs(-1);
978    }
979
980    ALOGV("flushing %s", audio ? "audio" : "video");
981    if (audio) {
982        {
983            Mutex::Autolock autoLock(mLock);
984            flushQueue(&mAudioQueue);
985
986            ++mAudioQueueGeneration;
987            prepareForMediaRenderingStart();
988
989            if (offloadingAudio()) {
990                setAudioFirstAnchorTime(-1);
991            }
992        }
993
994        mDrainAudioQueuePending = false;
995
996        if (offloadingAudio()) {
997            mAudioSink->pause();
998            mAudioSink->flush();
999            mAudioSink->start();
1000        }
1001    } else {
1002        flushQueue(&mVideoQueue);
1003
1004        mDrainVideoQueuePending = false;
1005        ++mVideoQueueGeneration;
1006
1007        if (mVideoScheduler != NULL) {
1008            mVideoScheduler->restart();
1009        }
1010
1011        prepareForMediaRenderingStart();
1012    }
1013
1014    mVideoSampleReceived = false;
1015    notifyFlushComplete(audio);
1016}
1017
1018void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
1019    while (!queue->empty()) {
1020        QueueEntry *entry = &*queue->begin();
1021
1022        if (entry->mBuffer != NULL) {
1023            entry->mNotifyConsumed->post();
1024        }
1025
1026        queue->erase(queue->begin());
1027        entry = NULL;
1028    }
1029}
1030
1031void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
1032    sp<AMessage> notify = mNotify->dup();
1033    notify->setInt32("what", kWhatFlushComplete);
1034    notify->setInt32("audio", static_cast<int32_t>(audio));
1035    notify->post();
1036}
1037
1038bool NuPlayer::Renderer::dropBufferWhileFlushing(
1039        bool audio, const sp<AMessage> &msg) {
1040    bool flushing = false;
1041
1042    {
1043        Mutex::Autolock autoLock(mFlushLock);
1044        if (audio) {
1045            flushing = mFlushingAudio;
1046        } else {
1047            flushing = mFlushingVideo;
1048        }
1049    }
1050
1051    if (!flushing) {
1052        return false;
1053    }
1054
1055    sp<AMessage> notifyConsumed;
1056    if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
1057        notifyConsumed->post();
1058    }
1059
1060    return true;
1061}
1062
1063void NuPlayer::Renderer::onAudioSinkChanged() {
1064    if (offloadingAudio()) {
1065        return;
1066    }
1067    CHECK(!mDrainAudioQueuePending);
1068    mNumFramesWritten = 0;
1069    uint32_t written;
1070    if (mAudioSink->getFramesWritten(&written) == OK) {
1071        mNumFramesWritten = written;
1072    }
1073}
1074
1075void NuPlayer::Renderer::onDisableOffloadAudio() {
1076    Mutex::Autolock autoLock(mLock);
1077    mFlags &= ~FLAG_OFFLOAD_AUDIO;
1078    ++mAudioQueueGeneration;
1079}
1080
1081void NuPlayer::Renderer::onPause() {
1082    if (mPaused) {
1083        ALOGW("Renderer::onPause() called while already paused!");
1084        return;
1085    }
1086    {
1087        Mutex::Autolock autoLock(mLock);
1088        ++mAudioQueueGeneration;
1089        ++mVideoQueueGeneration;
1090        prepareForMediaRenderingStart();
1091        mPaused = true;
1092        setPauseStartedTimeRealUs(ALooper::GetNowUs());
1093    }
1094
1095    mDrainAudioQueuePending = false;
1096    mDrainVideoQueuePending = false;
1097
1098    if (mHasAudio) {
1099        mAudioSink->pause();
1100        startAudioOffloadPauseTimeout();
1101    }
1102
1103    ALOGV("now paused audio queue has %d entries, video has %d entries",
1104          mAudioQueue.size(), mVideoQueue.size());
1105}
1106
1107void NuPlayer::Renderer::onResume() {
1108    readProperties();
1109
1110    if (!mPaused) {
1111        return;
1112    }
1113
1114    if (mHasAudio) {
1115        cancelAudioOffloadPauseTimeout();
1116        mAudioSink->start();
1117    }
1118
1119    Mutex::Autolock autoLock(mLock);
1120    mPaused = false;
1121    if (mPauseStartedTimeRealUs != -1) {
1122        int64_t newAnchorRealUs =
1123            mAnchorTimeRealUs + ALooper::GetNowUs() - mPauseStartedTimeRealUs;
1124        setAnchorTime(mAnchorTimeMediaUs, newAnchorRealUs, true /* resume */);
1125    }
1126
1127    if (!mAudioQueue.empty()) {
1128        postDrainAudioQueue_l();
1129    }
1130
1131    if (!mVideoQueue.empty()) {
1132        postDrainVideoQueue();
1133    }
1134}
1135
1136void NuPlayer::Renderer::onSetVideoFrameRate(float fps) {
1137    if (mVideoScheduler == NULL) {
1138        mVideoScheduler = new VideoFrameScheduler();
1139    }
1140    mVideoScheduler->init(fps);
1141}
1142
1143// TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs()
1144// as it acquires locks and may query the audio driver.
1145//
1146// Some calls could conceivably retrieve extrapolated data instead of
1147// accessing getTimestamp() or getPosition() every time a data buffer with
1148// a media time is received.
1149//
1150int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) {
1151    uint32_t numFramesPlayed;
1152    int64_t numFramesPlayedAt;
1153    AudioTimestamp ts;
1154    static const int64_t kStaleTimestamp100ms = 100000;
1155
1156    status_t res = mAudioSink->getTimestamp(ts);
1157    if (res == OK) {                 // case 1: mixing audio tracks and offloaded tracks.
1158        numFramesPlayed = ts.mPosition;
1159        numFramesPlayedAt =
1160            ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000;
1161        const int64_t timestampAge = nowUs - numFramesPlayedAt;
1162        if (timestampAge > kStaleTimestamp100ms) {
1163            // This is an audio FIXME.
1164            // getTimestamp returns a timestamp which may come from audio mixing threads.
1165            // After pausing, the MixerThread may go idle, thus the mTime estimate may
1166            // become stale. Assuming that the MixerThread runs 20ms, with FastMixer at 5ms,
1167            // the max latency should be about 25ms with an average around 12ms (to be verified).
1168            // For safety we use 100ms.
1169            ALOGV("getTimestamp: returned stale timestamp nowUs(%lld) numFramesPlayedAt(%lld)",
1170                    (long long)nowUs, (long long)numFramesPlayedAt);
1171            numFramesPlayedAt = nowUs - kStaleTimestamp100ms;
1172        }
1173        //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAt);
1174    } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track
1175        numFramesPlayed = 0;
1176        numFramesPlayedAt = nowUs;
1177        //ALOGD("getTimestamp: WOULD_BLOCK %d %lld",
1178        //        numFramesPlayed, (long long)numFramesPlayedAt);
1179    } else {                         // case 3: transitory at new track or audio fast tracks.
1180        res = mAudioSink->getPosition(&numFramesPlayed);
1181        CHECK_EQ(res, (status_t)OK);
1182        numFramesPlayedAt = nowUs;
1183        numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */
1184        //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt);
1185    }
1186
1187    // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
1188    //CHECK_EQ(numFramesPlayed & (1 << 31), 0);  // can't be negative until 12.4 hrs, test
1189    int64_t durationUs = (int64_t)((int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame())
1190            + nowUs - numFramesPlayedAt;
1191    if (durationUs < 0) {
1192        // Occurs when numFramesPlayed position is very small and the following:
1193        // (1) In case 1, the time nowUs is computed before getTimestamp() is called and
1194        //     numFramesPlayedAt is greater than nowUs by time more than numFramesPlayed.
1195        // (2) In case 3, using getPosition and adding mAudioSink->latency() to
1196        //     numFramesPlayedAt, by a time amount greater than numFramesPlayed.
1197        //
1198        // Both of these are transitory conditions.
1199        ALOGV("getPlayedOutAudioDurationUs: negative duration %lld set to zero", (long long)durationUs);
1200        durationUs = 0;
1201    }
1202    ALOGV("getPlayedOutAudioDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)",
1203            (long long)durationUs, (long long)nowUs, numFramesPlayed, (long long)numFramesPlayedAt);
1204    return durationUs;
1205}
1206
1207void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reason) {
1208    if (mAudioOffloadTornDown) {
1209        return;
1210    }
1211    mAudioOffloadTornDown = true;
1212
1213    int64_t currentPositionUs;
1214    if (getCurrentPosition(&currentPositionUs) != OK) {
1215        currentPositionUs = 0;
1216    }
1217
1218    mAudioSink->stop();
1219    mAudioSink->flush();
1220
1221    sp<AMessage> notify = mNotify->dup();
1222    notify->setInt32("what", kWhatAudioOffloadTearDown);
1223    notify->setInt64("positionUs", currentPositionUs);
1224    notify->setInt32("reason", reason);
1225    notify->post();
1226}
1227
1228void NuPlayer::Renderer::startAudioOffloadPauseTimeout() {
1229    if (offloadingAudio()) {
1230        sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, id());
1231        msg->setInt32("generation", mAudioOffloadPauseTimeoutGeneration);
1232        msg->post(kOffloadPauseMaxUs);
1233    }
1234}
1235
1236void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() {
1237    if (offloadingAudio()) {
1238        ++mAudioOffloadPauseTimeoutGeneration;
1239    }
1240}
1241
1242bool NuPlayer::Renderer::onOpenAudioSink(
1243        const sp<AMessage> &format,
1244        bool offloadOnly,
1245        bool hasVideo,
1246        uint32_t flags) {
1247    ALOGV("openAudioSink: offloadOnly(%d) offloadingAudio(%d)",
1248            offloadOnly, offloadingAudio());
1249    bool audioSinkChanged = false;
1250
1251    int32_t numChannels;
1252    CHECK(format->findInt32("channel-count", &numChannels));
1253
1254    int32_t channelMask;
1255    if (!format->findInt32("channel-mask", &channelMask)) {
1256        // signal to the AudioSink to derive the mask from count.
1257        channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
1258    }
1259
1260    int32_t sampleRate;
1261    CHECK(format->findInt32("sample-rate", &sampleRate));
1262
1263    if (offloadingAudio()) {
1264        audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT;
1265        AString mime;
1266        CHECK(format->findString("mime", &mime));
1267        status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str());
1268
1269        if (err != OK) {
1270            ALOGE("Couldn't map mime \"%s\" to a valid "
1271                    "audio_format", mime.c_str());
1272            onDisableOffloadAudio();
1273        } else {
1274            ALOGV("Mime \"%s\" mapped to audio_format 0x%x",
1275                    mime.c_str(), audioFormat);
1276
1277            int avgBitRate = -1;
1278            format->findInt32("bit-rate", &avgBitRate);
1279
1280            int32_t aacProfile = -1;
1281            if (audioFormat == AUDIO_FORMAT_AAC
1282                    && format->findInt32("aac-profile", &aacProfile)) {
1283                // Redefine AAC format as per aac profile
1284                mapAACProfileToAudioFormat(
1285                        audioFormat,
1286                        aacProfile);
1287            }
1288
1289            audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER;
1290            offloadInfo.duration_us = -1;
1291            format->findInt64(
1292                    "durationUs", &offloadInfo.duration_us);
1293            offloadInfo.sample_rate = sampleRate;
1294            offloadInfo.channel_mask = channelMask;
1295            offloadInfo.format = audioFormat;
1296            offloadInfo.stream_type = AUDIO_STREAM_MUSIC;
1297            offloadInfo.bit_rate = avgBitRate;
1298            offloadInfo.has_video = hasVideo;
1299            offloadInfo.is_streaming = true;
1300
1301            if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) {
1302                ALOGV("openAudioSink: no change in offload mode");
1303                // no change from previous configuration, everything ok.
1304                return offloadingAudio();
1305            }
1306            ALOGV("openAudioSink: try to open AudioSink in offload mode");
1307            flags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
1308            flags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
1309            audioSinkChanged = true;
1310            mAudioSink->close();
1311            err = mAudioSink->open(
1312                    sampleRate,
1313                    numChannels,
1314                    (audio_channel_mask_t)channelMask,
1315                    audioFormat,
1316                    8 /* bufferCount */,
1317                    &NuPlayer::Renderer::AudioSinkCallback,
1318                    this,
1319                    (audio_output_flags_t)flags,
1320                    &offloadInfo);
1321
1322            if (err == OK) {
1323                // If the playback is offloaded to h/w, we pass
1324                // the HAL some metadata information.
1325                // We don't want to do this for PCM because it
1326                // will be going through the AudioFlinger mixer
1327                // before reaching the hardware.
1328                // TODO
1329                mCurrentOffloadInfo = offloadInfo;
1330                err = mAudioSink->start();
1331                ALOGV_IF(err == OK, "openAudioSink: offload succeeded");
1332            }
1333            if (err != OK) {
1334                // Clean up, fall back to non offload mode.
1335                mAudioSink->close();
1336                onDisableOffloadAudio();
1337                mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
1338                ALOGV("openAudioSink: offload failed");
1339            }
1340        }
1341    }
1342    if (!offloadOnly && !offloadingAudio()) {
1343        flags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
1344        ALOGV("openAudioSink: open AudioSink in NON-offload mode");
1345
1346        audioSinkChanged = true;
1347        mAudioSink->close();
1348        mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
1349        CHECK_EQ(mAudioSink->open(
1350                    sampleRate,
1351                    numChannels,
1352                    (audio_channel_mask_t)channelMask,
1353                    AUDIO_FORMAT_PCM_16_BIT,
1354                    8 /* bufferCount */,
1355                    NULL,
1356                    NULL,
1357                    (audio_output_flags_t)flags),
1358                 (status_t)OK);
1359        mAudioSink->start();
1360    }
1361    if (audioSinkChanged) {
1362        onAudioSinkChanged();
1363    }
1364
1365    return offloadingAudio();
1366}
1367
1368void NuPlayer::Renderer::onCloseAudioSink() {
1369    mAudioSink->close();
1370    mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
1371}
1372
1373}  // namespace android
1374
1375