NuPlayerRenderer.cpp revision da38df5f080eb62a06b22c5bada4357cf756255e
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "NuPlayerRenderer"
19#include <utils/Log.h>
20
21#include "NuPlayerRenderer.h"
22
23#include "SoftwareRenderer.h"
24
25#include <media/stagefright/foundation/ABuffer.h>
26#include <media/stagefright/foundation/ADebug.h>
27#include <media/stagefright/foundation/AMessage.h>
28
29namespace android {
30
31// static
32const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
33
34NuPlayer::Renderer::Renderer(
35        const sp<MediaPlayerBase::AudioSink> &sink,
36        const sp<AMessage> &notify,
37        uint32_t flags)
38    : mAudioSink(sink),
39      mSoftRenderer(NULL),
40      mNotify(notify),
41      mFlags(flags),
42      mNumFramesWritten(0),
43      mDrainAudioQueuePending(false),
44      mDrainVideoQueuePending(false),
45      mAudioQueueGeneration(0),
46      mVideoQueueGeneration(0),
47      mAnchorTimeMediaUs(-1),
48      mAnchorTimeRealUs(-1),
49      mFlushingAudio(false),
50      mFlushingVideo(false),
51      mHasAudio(false),
52      mHasVideo(false),
53      mSyncQueues(false),
54      mPaused(false),
55      mVideoRenderingStarted(false),
56      mLastPositionUpdateUs(-1ll),
57      mVideoLateByUs(0ll) {
58}
59
60NuPlayer::Renderer::~Renderer() {
61    delete mSoftRenderer;
62}
63
64void NuPlayer::Renderer::setSoftRenderer(SoftwareRenderer *softRenderer) {
65    delete mSoftRenderer;
66    mSoftRenderer = softRenderer;
67}
68
69void NuPlayer::Renderer::queueBuffer(
70        bool audio,
71        const sp<ABuffer> &buffer,
72        const sp<AMessage> &notifyConsumed) {
73    sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
74    msg->setInt32("audio", static_cast<int32_t>(audio));
75    msg->setBuffer("buffer", buffer);
76    msg->setMessage("notifyConsumed", notifyConsumed);
77    msg->post();
78}
79
80void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
81    CHECK_NE(finalResult, (status_t)OK);
82
83    sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
84    msg->setInt32("audio", static_cast<int32_t>(audio));
85    msg->setInt32("finalResult", finalResult);
86    msg->post();
87}
88
89void NuPlayer::Renderer::flush(bool audio) {
90    {
91        Mutex::Autolock autoLock(mFlushLock);
92        if (audio) {
93            CHECK(!mFlushingAudio);
94            mFlushingAudio = true;
95        } else {
96            CHECK(!mFlushingVideo);
97            mFlushingVideo = true;
98        }
99    }
100
101    sp<AMessage> msg = new AMessage(kWhatFlush, id());
102    msg->setInt32("audio", static_cast<int32_t>(audio));
103    msg->post();
104}
105
106void NuPlayer::Renderer::signalTimeDiscontinuity() {
107    CHECK(mAudioQueue.empty());
108    CHECK(mVideoQueue.empty());
109    mAnchorTimeMediaUs = -1;
110    mAnchorTimeRealUs = -1;
111    mSyncQueues = mHasAudio && mHasVideo;
112}
113
114void NuPlayer::Renderer::pause() {
115    (new AMessage(kWhatPause, id()))->post();
116}
117
118void NuPlayer::Renderer::resume() {
119    (new AMessage(kWhatResume, id()))->post();
120}
121
122void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
123    switch (msg->what()) {
124        case kWhatDrainAudioQueue:
125        {
126            int32_t generation;
127            CHECK(msg->findInt32("generation", &generation));
128            if (generation != mAudioQueueGeneration) {
129                break;
130            }
131
132            mDrainAudioQueuePending = false;
133
134            if (onDrainAudioQueue()) {
135                uint32_t numFramesPlayed;
136                CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
137                         (status_t)OK);
138
139                uint32_t numFramesPendingPlayout =
140                    mNumFramesWritten - numFramesPlayed;
141
142                // This is how long the audio sink will have data to
143                // play back.
144                int64_t delayUs =
145                    mAudioSink->msecsPerFrame()
146                        * numFramesPendingPlayout * 1000ll;
147
148                // Let's give it more data after about half that time
149                // has elapsed.
150                postDrainAudioQueue(delayUs / 2);
151            }
152            break;
153        }
154
155        case kWhatDrainVideoQueue:
156        {
157            int32_t generation;
158            CHECK(msg->findInt32("generation", &generation));
159            if (generation != mVideoQueueGeneration) {
160                break;
161            }
162
163            mDrainVideoQueuePending = false;
164
165            onDrainVideoQueue();
166
167            postDrainVideoQueue();
168            break;
169        }
170
171        case kWhatQueueBuffer:
172        {
173            onQueueBuffer(msg);
174            break;
175        }
176
177        case kWhatQueueEOS:
178        {
179            onQueueEOS(msg);
180            break;
181        }
182
183        case kWhatFlush:
184        {
185            onFlush(msg);
186            break;
187        }
188
189        case kWhatAudioSinkChanged:
190        {
191            onAudioSinkChanged();
192            break;
193        }
194
195        case kWhatPause:
196        {
197            onPause();
198            break;
199        }
200
201        case kWhatResume:
202        {
203            onResume();
204            break;
205        }
206
207        default:
208            TRESPASS();
209            break;
210    }
211}
212
213void NuPlayer::Renderer::postDrainAudioQueue(int64_t delayUs) {
214    if (mDrainAudioQueuePending || mSyncQueues || mPaused) {
215        return;
216    }
217
218    if (mAudioQueue.empty()) {
219        return;
220    }
221
222    mDrainAudioQueuePending = true;
223    sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
224    msg->setInt32("generation", mAudioQueueGeneration);
225    msg->post(delayUs);
226}
227
228void NuPlayer::Renderer::signalAudioSinkChanged() {
229    (new AMessage(kWhatAudioSinkChanged, id()))->post();
230}
231
232bool NuPlayer::Renderer::onDrainAudioQueue() {
233    uint32_t numFramesPlayed;
234    if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
235        return false;
236    }
237
238    ssize_t numFramesAvailableToWrite =
239        mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
240
241#if 0
242    if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
243        ALOGI("audio sink underrun");
244    } else {
245        ALOGV("audio queue has %d frames left to play",
246             mAudioSink->frameCount() - numFramesAvailableToWrite);
247    }
248#endif
249
250    size_t numBytesAvailableToWrite =
251        numFramesAvailableToWrite * mAudioSink->frameSize();
252
253    while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) {
254        QueueEntry *entry = &*mAudioQueue.begin();
255
256        if (entry->mBuffer == NULL) {
257            // EOS
258
259            notifyEOS(true /* audio */, entry->mFinalResult);
260
261            mAudioQueue.erase(mAudioQueue.begin());
262            entry = NULL;
263            return false;
264        }
265
266        if (entry->mOffset == 0) {
267            int64_t mediaTimeUs;
268            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
269
270            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
271
272            mAnchorTimeMediaUs = mediaTimeUs;
273
274            uint32_t numFramesPlayed;
275            CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
276
277            uint32_t numFramesPendingPlayout =
278                mNumFramesWritten - numFramesPlayed;
279
280            int64_t realTimeOffsetUs =
281                (mAudioSink->latency() / 2  /* XXX */
282                    + numFramesPendingPlayout
283                        * mAudioSink->msecsPerFrame()) * 1000ll;
284
285            // ALOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs);
286
287            mAnchorTimeRealUs =
288                ALooper::GetNowUs() + realTimeOffsetUs;
289        }
290
291        size_t copy = entry->mBuffer->size() - entry->mOffset;
292        if (copy > numBytesAvailableToWrite) {
293            copy = numBytesAvailableToWrite;
294        }
295
296        CHECK_EQ(mAudioSink->write(
297                    entry->mBuffer->data() + entry->mOffset, copy),
298                 (ssize_t)copy);
299
300        entry->mOffset += copy;
301        if (entry->mOffset == entry->mBuffer->size()) {
302            entry->mNotifyConsumed->post();
303            mAudioQueue.erase(mAudioQueue.begin());
304
305            entry = NULL;
306        }
307
308        numBytesAvailableToWrite -= copy;
309        size_t copiedFrames = copy / mAudioSink->frameSize();
310        mNumFramesWritten += copiedFrames;
311    }
312
313    notifyPosition();
314
315    return !mAudioQueue.empty();
316}
317
318void NuPlayer::Renderer::postDrainVideoQueue() {
319    if (mDrainVideoQueuePending || mSyncQueues || mPaused) {
320        return;
321    }
322
323    if (mVideoQueue.empty()) {
324        return;
325    }
326
327    QueueEntry &entry = *mVideoQueue.begin();
328
329    sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
330    msg->setInt32("generation", mVideoQueueGeneration);
331
332    int64_t delayUs;
333
334    if (entry.mBuffer == NULL) {
335        // EOS doesn't carry a timestamp.
336        delayUs = 0;
337    } else if (mFlags & FLAG_REAL_TIME) {
338        int64_t mediaTimeUs;
339        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
340
341        delayUs = mediaTimeUs - ALooper::GetNowUs();
342    } else {
343        int64_t mediaTimeUs;
344        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
345
346        if (mAnchorTimeMediaUs < 0) {
347            delayUs = 0;
348
349            if (!mHasAudio) {
350                mAnchorTimeMediaUs = mediaTimeUs;
351                mAnchorTimeRealUs = ALooper::GetNowUs();
352            }
353        } else {
354            int64_t realTimeUs =
355                (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;
356
357            delayUs = realTimeUs - ALooper::GetNowUs();
358        }
359    }
360
361    msg->post(delayUs);
362
363    mDrainVideoQueuePending = true;
364}
365
366void NuPlayer::Renderer::onDrainVideoQueue() {
367    if (mVideoQueue.empty()) {
368        return;
369    }
370
371    QueueEntry *entry = &*mVideoQueue.begin();
372
373    if (entry->mBuffer == NULL) {
374        // EOS
375
376        notifyEOS(false /* audio */, entry->mFinalResult);
377
378        mVideoQueue.erase(mVideoQueue.begin());
379        entry = NULL;
380
381        mVideoLateByUs = 0ll;
382
383        notifyPosition();
384        return;
385    }
386
387    int64_t realTimeUs;
388    if (mFlags & FLAG_REAL_TIME) {
389        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
390    } else {
391        int64_t mediaTimeUs;
392        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
393
394        realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs;
395    }
396
397    mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;
398    bool tooLate = (mVideoLateByUs > 40000);
399
400    if (tooLate) {
401        ALOGV("video late by %lld us (%.2f secs)",
402             mVideoLateByUs, mVideoLateByUs / 1E6);
403    } else {
404        ALOGV("rendering video at media time %.2f secs", mediaTimeUs / 1E6);
405        if (mSoftRenderer != NULL) {
406            mSoftRenderer->render(entry->mBuffer->data(), entry->mBuffer->size(), NULL);
407        }
408    }
409
410    entry->mNotifyConsumed->setInt32("render", !tooLate);
411    entry->mNotifyConsumed->post();
412    mVideoQueue.erase(mVideoQueue.begin());
413    entry = NULL;
414
415    if (!mVideoRenderingStarted) {
416        mVideoRenderingStarted = true;
417        notifyVideoRenderingStart();
418    }
419
420    notifyPosition();
421}
422
423void NuPlayer::Renderer::notifyVideoRenderingStart() {
424    sp<AMessage> notify = mNotify->dup();
425    notify->setInt32("what", kWhatVideoRenderingStart);
426    notify->post();
427}
428
429void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult) {
430    sp<AMessage> notify = mNotify->dup();
431    notify->setInt32("what", kWhatEOS);
432    notify->setInt32("audio", static_cast<int32_t>(audio));
433    notify->setInt32("finalResult", finalResult);
434    notify->post();
435}
436
437void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
438    int32_t audio;
439    CHECK(msg->findInt32("audio", &audio));
440
441    if (audio) {
442        mHasAudio = true;
443    } else {
444        mHasVideo = true;
445    }
446
447    if (dropBufferWhileFlushing(audio, msg)) {
448        return;
449    }
450
451    sp<ABuffer> buffer;
452    CHECK(msg->findBuffer("buffer", &buffer));
453
454    sp<AMessage> notifyConsumed;
455    CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
456
457    QueueEntry entry;
458    entry.mBuffer = buffer;
459    entry.mNotifyConsumed = notifyConsumed;
460    entry.mOffset = 0;
461    entry.mFinalResult = OK;
462
463    if (audio) {
464        mAudioQueue.push_back(entry);
465        postDrainAudioQueue();
466    } else {
467        mVideoQueue.push_back(entry);
468        postDrainVideoQueue();
469    }
470
471    if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
472        return;
473    }
474
475    sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
476    sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
477
478    if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
479        // EOS signalled on either queue.
480        syncQueuesDone();
481        return;
482    }
483
484    int64_t firstAudioTimeUs;
485    int64_t firstVideoTimeUs;
486    CHECK(firstAudioBuffer->meta()
487            ->findInt64("timeUs", &firstAudioTimeUs));
488    CHECK(firstVideoBuffer->meta()
489            ->findInt64("timeUs", &firstVideoTimeUs));
490
491    int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
492
493    ALOGV("queueDiff = %.2f secs", diff / 1E6);
494
495    if (diff > 100000ll) {
496        // Audio data starts More than 0.1 secs before video.
497        // Drop some audio.
498
499        (*mAudioQueue.begin()).mNotifyConsumed->post();
500        mAudioQueue.erase(mAudioQueue.begin());
501        return;
502    }
503
504    syncQueuesDone();
505}
506
507void NuPlayer::Renderer::syncQueuesDone() {
508    if (!mSyncQueues) {
509        return;
510    }
511
512    mSyncQueues = false;
513
514    if (!mAudioQueue.empty()) {
515        postDrainAudioQueue();
516    }
517
518    if (!mVideoQueue.empty()) {
519        postDrainVideoQueue();
520    }
521}
522
523void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
524    int32_t audio;
525    CHECK(msg->findInt32("audio", &audio));
526
527    if (dropBufferWhileFlushing(audio, msg)) {
528        return;
529    }
530
531    int32_t finalResult;
532    CHECK(msg->findInt32("finalResult", &finalResult));
533
534    QueueEntry entry;
535    entry.mOffset = 0;
536    entry.mFinalResult = finalResult;
537
538    if (audio) {
539        if (mAudioQueue.empty() && mSyncQueues) {
540            syncQueuesDone();
541        }
542        mAudioQueue.push_back(entry);
543        postDrainAudioQueue();
544    } else {
545        if (mVideoQueue.empty() && mSyncQueues) {
546            syncQueuesDone();
547        }
548        mVideoQueue.push_back(entry);
549        postDrainVideoQueue();
550    }
551}
552
553void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
554    int32_t audio;
555    CHECK(msg->findInt32("audio", &audio));
556
557    // If we're currently syncing the queues, i.e. dropping audio while
558    // aligning the first audio/video buffer times and only one of the
559    // two queues has data, we may starve that queue by not requesting
560    // more buffers from the decoder. If the other source then encounters
561    // a discontinuity that leads to flushing, we'll never find the
562    // corresponding discontinuity on the other queue.
563    // Therefore we'll stop syncing the queues if at least one of them
564    // is flushed.
565    syncQueuesDone();
566
567    if (audio) {
568        flushQueue(&mAudioQueue);
569
570        Mutex::Autolock autoLock(mFlushLock);
571        mFlushingAudio = false;
572
573        mDrainAudioQueuePending = false;
574        ++mAudioQueueGeneration;
575    } else {
576        flushQueue(&mVideoQueue);
577
578        Mutex::Autolock autoLock(mFlushLock);
579        mFlushingVideo = false;
580
581        mDrainVideoQueuePending = false;
582        ++mVideoQueueGeneration;
583    }
584
585    notifyFlushComplete(audio);
586}
587
588void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
589    while (!queue->empty()) {
590        QueueEntry *entry = &*queue->begin();
591
592        if (entry->mBuffer != NULL) {
593            entry->mNotifyConsumed->post();
594        }
595
596        queue->erase(queue->begin());
597        entry = NULL;
598    }
599}
600
601void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
602    sp<AMessage> notify = mNotify->dup();
603    notify->setInt32("what", kWhatFlushComplete);
604    notify->setInt32("audio", static_cast<int32_t>(audio));
605    notify->post();
606}
607
608bool NuPlayer::Renderer::dropBufferWhileFlushing(
609        bool audio, const sp<AMessage> &msg) {
610    bool flushing = false;
611
612    {
613        Mutex::Autolock autoLock(mFlushLock);
614        if (audio) {
615            flushing = mFlushingAudio;
616        } else {
617            flushing = mFlushingVideo;
618        }
619    }
620
621    if (!flushing) {
622        return false;
623    }
624
625    sp<AMessage> notifyConsumed;
626    if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
627        notifyConsumed->post();
628    }
629
630    return true;
631}
632
633void NuPlayer::Renderer::onAudioSinkChanged() {
634    CHECK(!mDrainAudioQueuePending);
635    mNumFramesWritten = 0;
636    uint32_t written;
637    if (mAudioSink->getFramesWritten(&written) == OK) {
638        mNumFramesWritten = written;
639    }
640}
641
642void NuPlayer::Renderer::notifyPosition() {
643    if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) {
644        return;
645    }
646
647    int64_t nowUs = ALooper::GetNowUs();
648
649    if (mLastPositionUpdateUs >= 0
650            && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) {
651        return;
652    }
653    mLastPositionUpdateUs = nowUs;
654
655    int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
656
657    sp<AMessage> notify = mNotify->dup();
658    notify->setInt32("what", kWhatPosition);
659    notify->setInt64("positionUs", positionUs);
660    notify->setInt64("videoLateByUs", mVideoLateByUs);
661    notify->post();
662}
663
664void NuPlayer::Renderer::onPause() {
665    CHECK(!mPaused);
666
667    mDrainAudioQueuePending = false;
668    ++mAudioQueueGeneration;
669
670    mDrainVideoQueuePending = false;
671    ++mVideoQueueGeneration;
672
673    if (mHasAudio) {
674        mAudioSink->pause();
675    }
676
677    ALOGV("now paused audio queue has %d entries, video has %d entries",
678          mAudioQueue.size(), mVideoQueue.size());
679
680    mPaused = true;
681}
682
683void NuPlayer::Renderer::onResume() {
684    if (!mPaused) {
685        return;
686    }
687
688    if (mHasAudio) {
689        mAudioSink->start();
690    }
691
692    mPaused = false;
693
694    if (!mAudioQueue.empty()) {
695        postDrainAudioQueue();
696    }
697
698    if (!mVideoQueue.empty()) {
699        postDrainVideoQueue();
700    }
701}
702
703}  // namespace android
704
705