NuPlayerRenderer.cpp revision 0852917279f79a94907e9906d0533ae409a30f6a
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "NuPlayerRenderer" 19#include <utils/Log.h> 20 21#include "NuPlayerRenderer.h" 22 23#include <cutils/properties.h> 24 25#include <media/stagefright/foundation/ABuffer.h> 26#include <media/stagefright/foundation/ADebug.h> 27#include <media/stagefright/foundation/AMessage.h> 28#include <media/stagefright/MediaErrors.h> 29#include <media/stagefright/MetaData.h> 30 31#include <VideoFrameScheduler.h> 32 33#include <inttypes.h> 34 35namespace android { 36 37// Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink 38// is closed to allow the audio DSP to power down. 39static const int64_t kOffloadPauseMaxUs = 60000000ll; 40 41// static 42const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll; 43 44static bool sFrameAccurateAVsync = false; 45 46static void readProperties() { 47 char value[PROPERTY_VALUE_MAX]; 48 if (property_get("persist.sys.media.avsync", value, NULL)) { 49 sFrameAccurateAVsync = 50 !strcmp("1", value) || !strcasecmp("true", value); 51 } 52} 53 54NuPlayer::Renderer::Renderer( 55 const sp<MediaPlayerBase::AudioSink> &sink, 56 const sp<AMessage> ¬ify, 57 uint32_t flags) 58 : mAudioSink(sink), 59 mNotify(notify), 60 mFlags(flags), 61 mNumFramesWritten(0), 62 mDrainAudioQueuePending(false), 63 mDrainVideoQueuePending(false), 64 mAudioQueueGeneration(0), 65 mVideoQueueGeneration(0), 66 mFirstAnchorTimeMediaUs(-1), 67 mAnchorTimeMediaUs(-1), 68 mAnchorTimeRealUs(-1), 69 mFlushingAudio(false), 70 mFlushingVideo(false), 71 mHasAudio(false), 72 mHasVideo(false), 73 mSyncQueues(false), 74 mPaused(false), 75 mVideoSampleReceived(false), 76 mVideoRenderingStarted(false), 77 mVideoRenderingStartGeneration(0), 78 mAudioRenderingStartGeneration(0), 79 mLastPositionUpdateUs(-1ll), 80 mVideoLateByUs(0ll), 81 mAudioOffloadPauseTimeoutGeneration(0), 82 mAudioOffloadTornDown(false) { 83 readProperties(); 84} 85 86NuPlayer::Renderer::~Renderer() { 87 if (offloadingAudio()) { 88 mAudioSink->stop(); 89 mAudioSink->flush(); 90 mAudioSink->close(); 91 } 92} 93 94void NuPlayer::Renderer::queueBuffer( 95 bool audio, 96 const sp<ABuffer> &buffer, 97 const sp<AMessage> ¬ifyConsumed) { 98 sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id()); 99 msg->setInt32("audio", static_cast<int32_t>(audio)); 100 msg->setBuffer("buffer", buffer); 101 msg->setMessage("notifyConsumed", notifyConsumed); 102 msg->post(); 103} 104 105void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) { 106 CHECK_NE(finalResult, (status_t)OK); 107 108 sp<AMessage> msg = new AMessage(kWhatQueueEOS, id()); 109 msg->setInt32("audio", static_cast<int32_t>(audio)); 110 msg->setInt32("finalResult", finalResult); 111 msg->post(); 112} 113 114void NuPlayer::Renderer::flush(bool audio) { 115 { 116 Mutex::Autolock autoLock(mFlushLock); 117 if (audio) { 118 if (mFlushingAudio) { 119 return; 120 } 121 mFlushingAudio = true; 122 } else { 123 if (mFlushingVideo) { 124 return; 125 } 126 mFlushingVideo = true; 127 } 128 } 129 130 sp<AMessage> msg = new AMessage(kWhatFlush, id()); 131 msg->setInt32("audio", static_cast<int32_t>(audio)); 132 msg->post(); 133} 134 135void NuPlayer::Renderer::signalTimeDiscontinuity() { 136 Mutex::Autolock autoLock(mLock); 137 // CHECK(mAudioQueue.empty()); 138 // CHECK(mVideoQueue.empty()); 139 mFirstAnchorTimeMediaUs = -1; 140 mAnchorTimeMediaUs = -1; 141 mAnchorTimeRealUs = -1; 142 mSyncQueues = false; 143} 144 145void NuPlayer::Renderer::signalAudioSinkChanged() { 146 (new AMessage(kWhatAudioSinkChanged, id()))->post(); 147} 148 149void NuPlayer::Renderer::signalDisableOffloadAudio() { 150 (new AMessage(kWhatDisableOffloadAudio, id()))->post(); 151} 152 153void NuPlayer::Renderer::pause() { 154 (new AMessage(kWhatPause, id()))->post(); 155} 156 157void NuPlayer::Renderer::resume() { 158 (new AMessage(kWhatResume, id()))->post(); 159} 160 161void NuPlayer::Renderer::setVideoFrameRate(float fps) { 162 sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, id()); 163 msg->setFloat("frame-rate", fps); 164 msg->post(); 165} 166 167void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { 168 switch (msg->what()) { 169 case kWhatStopAudioSink: 170 { 171 mAudioSink->stop(); 172 break; 173 } 174 175 case kWhatDrainAudioQueue: 176 { 177 int32_t generation; 178 CHECK(msg->findInt32("generation", &generation)); 179 if (generation != mAudioQueueGeneration) { 180 break; 181 } 182 183 mDrainAudioQueuePending = false; 184 185 if (onDrainAudioQueue()) { 186 uint32_t numFramesPlayed; 187 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), 188 (status_t)OK); 189 190 uint32_t numFramesPendingPlayout = 191 mNumFramesWritten - numFramesPlayed; 192 193 // This is how long the audio sink will have data to 194 // play back. 195 int64_t delayUs = 196 mAudioSink->msecsPerFrame() 197 * numFramesPendingPlayout * 1000ll; 198 199 // Let's give it more data after about half that time 200 // has elapsed. 201 // kWhatDrainAudioQueue is used for non-offloading mode, 202 // and mLock is used only for offloading mode. Therefore, 203 // no need to acquire mLock here. 204 postDrainAudioQueue_l(delayUs / 2); 205 } 206 break; 207 } 208 209 case kWhatDrainVideoQueue: 210 { 211 int32_t generation; 212 CHECK(msg->findInt32("generation", &generation)); 213 if (generation != mVideoQueueGeneration) { 214 break; 215 } 216 217 mDrainVideoQueuePending = false; 218 219 onDrainVideoQueue(); 220 221 postDrainVideoQueue(); 222 break; 223 } 224 225 case kWhatQueueBuffer: 226 { 227 onQueueBuffer(msg); 228 break; 229 } 230 231 case kWhatQueueEOS: 232 { 233 onQueueEOS(msg); 234 break; 235 } 236 237 case kWhatFlush: 238 { 239 onFlush(msg); 240 break; 241 } 242 243 case kWhatAudioSinkChanged: 244 { 245 onAudioSinkChanged(); 246 break; 247 } 248 249 case kWhatDisableOffloadAudio: 250 { 251 onDisableOffloadAudio(); 252 break; 253 } 254 255 case kWhatPause: 256 { 257 onPause(); 258 break; 259 } 260 261 case kWhatResume: 262 { 263 onResume(); 264 break; 265 } 266 267 case kWhatSetVideoFrameRate: 268 { 269 float fps; 270 CHECK(msg->findFloat("frame-rate", &fps)); 271 onSetVideoFrameRate(fps); 272 break; 273 } 274 275 case kWhatAudioOffloadTearDown: 276 { 277 onAudioOffloadTearDown(kDueToError); 278 break; 279 } 280 281 case kWhatAudioOffloadPauseTimeout: 282 { 283 int32_t generation; 284 CHECK(msg->findInt32("generation", &generation)); 285 if (generation != mAudioOffloadPauseTimeoutGeneration) { 286 break; 287 } 288 ALOGV("Audio Offload tear down due to pause timeout."); 289 onAudioOffloadTearDown(kDueToTimeout); 290 break; 291 } 292 293 default: 294 TRESPASS(); 295 break; 296 } 297} 298 299void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) { 300 if (mDrainAudioQueuePending || mSyncQueues || mPaused 301 || offloadingAudio()) { 302 return; 303 } 304 305 if (mAudioQueue.empty()) { 306 return; 307 } 308 309 mDrainAudioQueuePending = true; 310 sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id()); 311 msg->setInt32("generation", mAudioQueueGeneration); 312 msg->post(delayUs); 313} 314 315void NuPlayer::Renderer::prepareForMediaRenderingStart() { 316 mAudioRenderingStartGeneration = mAudioQueueGeneration; 317 mVideoRenderingStartGeneration = mVideoQueueGeneration; 318} 319 320void NuPlayer::Renderer::notifyIfMediaRenderingStarted() { 321 if (mVideoRenderingStartGeneration == mVideoQueueGeneration && 322 mAudioRenderingStartGeneration == mAudioQueueGeneration) { 323 mVideoRenderingStartGeneration = -1; 324 mAudioRenderingStartGeneration = -1; 325 326 sp<AMessage> notify = mNotify->dup(); 327 notify->setInt32("what", kWhatMediaRenderingStart); 328 notify->post(); 329 } 330} 331 332// static 333size_t NuPlayer::Renderer::AudioSinkCallback( 334 MediaPlayerBase::AudioSink * /* audioSink */, 335 void *buffer, 336 size_t size, 337 void *cookie, 338 MediaPlayerBase::AudioSink::cb_event_t event) { 339 NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie; 340 341 switch (event) { 342 case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER: 343 { 344 return me->fillAudioBuffer(buffer, size); 345 break; 346 } 347 348 case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END: 349 { 350 me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM); 351 break; 352 } 353 354 case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN: 355 { 356 me->notifyAudioOffloadTearDown(); 357 break; 358 } 359 } 360 361 return 0; 362} 363 364size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) { 365 Mutex::Autolock autoLock(mLock); 366 367 if (!offloadingAudio() || mPaused) { 368 return 0; 369 } 370 371 bool hasEOS = false; 372 373 size_t sizeCopied = 0; 374 bool firstEntry = true; 375 while (sizeCopied < size && !mAudioQueue.empty()) { 376 QueueEntry *entry = &*mAudioQueue.begin(); 377 378 if (entry->mBuffer == NULL) { // EOS 379 hasEOS = true; 380 mAudioQueue.erase(mAudioQueue.begin()); 381 entry = NULL; 382 break; 383 } 384 385 if (firstEntry && entry->mOffset == 0) { 386 firstEntry = false; 387 int64_t mediaTimeUs; 388 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 389 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 390 if (mFirstAnchorTimeMediaUs == -1) { 391 mFirstAnchorTimeMediaUs = mediaTimeUs; 392 } 393 394 int64_t nowUs = ALooper::GetNowUs(); 395 mAnchorTimeMediaUs = 396 mFirstAnchorTimeMediaUs + getPlayedOutAudioDurationUs(nowUs); 397 mAnchorTimeRealUs = nowUs; 398 399 notifyPosition(); 400 } 401 402 size_t copy = entry->mBuffer->size() - entry->mOffset; 403 size_t sizeRemaining = size - sizeCopied; 404 if (copy > sizeRemaining) { 405 copy = sizeRemaining; 406 } 407 408 memcpy((char *)buffer + sizeCopied, 409 entry->mBuffer->data() + entry->mOffset, 410 copy); 411 412 entry->mOffset += copy; 413 if (entry->mOffset == entry->mBuffer->size()) { 414 entry->mNotifyConsumed->post(); 415 mAudioQueue.erase(mAudioQueue.begin()); 416 entry = NULL; 417 } 418 sizeCopied += copy; 419 notifyIfMediaRenderingStarted(); 420 } 421 422 if (hasEOS) { 423 (new AMessage(kWhatStopAudioSink, id()))->post(); 424 } 425 426 return sizeCopied; 427} 428 429bool NuPlayer::Renderer::onDrainAudioQueue() { 430 uint32_t numFramesPlayed; 431 if (mAudioSink->getPosition(&numFramesPlayed) != OK) { 432 return false; 433 } 434 435 ssize_t numFramesAvailableToWrite = 436 mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed); 437 438#if 0 439 if (numFramesAvailableToWrite == mAudioSink->frameCount()) { 440 ALOGI("audio sink underrun"); 441 } else { 442 ALOGV("audio queue has %d frames left to play", 443 mAudioSink->frameCount() - numFramesAvailableToWrite); 444 } 445#endif 446 447 size_t numBytesAvailableToWrite = 448 numFramesAvailableToWrite * mAudioSink->frameSize(); 449 450 while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) { 451 QueueEntry *entry = &*mAudioQueue.begin(); 452 453 if (entry->mBuffer == NULL) { 454 // EOS 455 int64_t postEOSDelayUs = 0; 456 if (mAudioSink->needsTrailingPadding()) { 457 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs()); 458 } 459 notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs); 460 461 mAudioQueue.erase(mAudioQueue.begin()); 462 entry = NULL; 463 return false; 464 } 465 466 if (entry->mOffset == 0) { 467 int64_t mediaTimeUs; 468 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 469 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 470 if (mFirstAnchorTimeMediaUs == -1) { 471 mFirstAnchorTimeMediaUs = mediaTimeUs; 472 } 473 mAnchorTimeMediaUs = mediaTimeUs; 474 475 int64_t nowUs = ALooper::GetNowUs(); 476 mAnchorTimeRealUs = nowUs + getPendingAudioPlayoutDurationUs(nowUs); 477 478 notifyPosition(); 479 } 480 481 size_t copy = entry->mBuffer->size() - entry->mOffset; 482 if (copy > numBytesAvailableToWrite) { 483 copy = numBytesAvailableToWrite; 484 } 485 486 ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy); 487 if (written < 0) { 488 // An error in AudioSink write is fatal here. 489 LOG_ALWAYS_FATAL("AudioSink write error(%zd) when writing %zu bytes", written, copy); 490 } 491 492 entry->mOffset += written; 493 if (entry->mOffset == entry->mBuffer->size()) { 494 entry->mNotifyConsumed->post(); 495 mAudioQueue.erase(mAudioQueue.begin()); 496 497 entry = NULL; 498 } 499 500 numBytesAvailableToWrite -= written; 501 size_t copiedFrames = written / mAudioSink->frameSize(); 502 mNumFramesWritten += copiedFrames; 503 504 notifyIfMediaRenderingStarted(); 505 506 if (written != (ssize_t)copy) { 507 // A short count was received from AudioSink::write() 508 // 509 // AudioSink write should block until exactly the number of bytes are delivered. 510 // But it may return with a short count (without an error) when: 511 // 512 // 1) Size to be copied is not a multiple of the frame size. We consider this fatal. 513 // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded. 514 515 // (Case 1) 516 // Must be a multiple of the frame size. If it is not a multiple of a frame size, it 517 // needs to fail, as we should not carry over fractional frames between calls. 518 CHECK_EQ(copy % mAudioSink->frameSize(), 0); 519 520 // (Case 2) 521 // Return early to the caller. 522 // Beware of calling immediately again as this may busy-loop if you are not careful. 523 ALOGW("AudioSink write short frame count %zd < %zu", written, copy); 524 break; 525 } 526 } 527 return !mAudioQueue.empty(); 528} 529 530int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) { 531 int64_t writtenAudioDurationUs = 532 mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame(); 533 return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs); 534} 535 536void NuPlayer::Renderer::postDrainVideoQueue() { 537 if (mDrainVideoQueuePending 538 || mSyncQueues 539 || (mPaused && mVideoSampleReceived)) { 540 return; 541 } 542 543 if (mVideoQueue.empty()) { 544 return; 545 } 546 547 QueueEntry &entry = *mVideoQueue.begin(); 548 549 sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id()); 550 msg->setInt32("generation", mVideoQueueGeneration); 551 552 if (entry.mBuffer == NULL) { 553 // EOS doesn't carry a timestamp. 554 msg->post(); 555 mDrainVideoQueuePending = true; 556 return; 557 } 558 559 int64_t delayUs; 560 int64_t nowUs = ALooper::GetNowUs(); 561 int64_t realTimeUs; 562 if (mFlags & FLAG_REAL_TIME) { 563 int64_t mediaTimeUs; 564 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 565 realTimeUs = mediaTimeUs; 566 } else { 567 int64_t mediaTimeUs; 568 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 569 570 if (mFirstAnchorTimeMediaUs == -1 && !mHasAudio) { 571 mFirstAnchorTimeMediaUs = mediaTimeUs; 572 } 573 if (mAnchorTimeMediaUs < 0) { 574 if (!mHasAudio) { 575 mAnchorTimeMediaUs = mediaTimeUs; 576 mAnchorTimeRealUs = nowUs; 577 notifyPosition(); 578 } 579 realTimeUs = nowUs; 580 } else { 581 realTimeUs = 582 (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs; 583 } 584 } 585 586 realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000; 587 int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000); 588 589 delayUs = realTimeUs - nowUs; 590 591 ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs); 592 // post 2 display refreshes before rendering is due 593 // FIXME currently this increases power consumption, so unless frame-accurate 594 // AV sync is requested, post closer to required render time (at 0.63 vsyncs) 595 if (!sFrameAccurateAVsync) { 596 twoVsyncsUs >>= 4; 597 } 598 msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0); 599 600 mDrainVideoQueuePending = true; 601} 602 603void NuPlayer::Renderer::onDrainVideoQueue() { 604 if (mVideoQueue.empty()) { 605 return; 606 } 607 608 QueueEntry *entry = &*mVideoQueue.begin(); 609 610 if (entry->mBuffer == NULL) { 611 // EOS 612 613 notifyEOS(false /* audio */, entry->mFinalResult); 614 615 mVideoQueue.erase(mVideoQueue.begin()); 616 entry = NULL; 617 618 mVideoLateByUs = 0ll; 619 return; 620 } 621 622 int64_t realTimeUs; 623 if (mFlags & FLAG_REAL_TIME) { 624 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs)); 625 } else { 626 int64_t mediaTimeUs; 627 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 628 629 realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs; 630 } 631 632 bool tooLate = false; 633 634 if (!mPaused) { 635 mVideoLateByUs = ALooper::GetNowUs() - realTimeUs; 636 tooLate = (mVideoLateByUs > 40000); 637 638 if (tooLate) { 639 ALOGV("video late by %lld us (%.2f secs)", 640 mVideoLateByUs, mVideoLateByUs / 1E6); 641 } else { 642 ALOGV("rendering video at media time %.2f secs", 643 (mFlags & FLAG_REAL_TIME ? realTimeUs : 644 (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6); 645 } 646 } else { 647 mVideoLateByUs = 0ll; 648 } 649 650 entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll); 651 entry->mNotifyConsumed->setInt32("render", !tooLate); 652 entry->mNotifyConsumed->post(); 653 mVideoQueue.erase(mVideoQueue.begin()); 654 entry = NULL; 655 656 mVideoSampleReceived = true; 657 658 if (!mPaused) { 659 if (!mVideoRenderingStarted) { 660 mVideoRenderingStarted = true; 661 notifyVideoRenderingStart(); 662 } 663 notifyIfMediaRenderingStarted(); 664 } 665} 666 667void NuPlayer::Renderer::notifyVideoRenderingStart() { 668 sp<AMessage> notify = mNotify->dup(); 669 notify->setInt32("what", kWhatVideoRenderingStart); 670 notify->post(); 671} 672 673void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) { 674 sp<AMessage> notify = mNotify->dup(); 675 notify->setInt32("what", kWhatEOS); 676 notify->setInt32("audio", static_cast<int32_t>(audio)); 677 notify->setInt32("finalResult", finalResult); 678 notify->post(delayUs); 679} 680 681void NuPlayer::Renderer::notifyAudioOffloadTearDown() { 682 (new AMessage(kWhatAudioOffloadTearDown, id()))->post(); 683} 684 685void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) { 686 int32_t audio; 687 CHECK(msg->findInt32("audio", &audio)); 688 689 if (audio) { 690 mHasAudio = true; 691 } else { 692 mHasVideo = true; 693 if (mVideoScheduler == NULL) { 694 mVideoScheduler = new VideoFrameScheduler(); 695 mVideoScheduler->init(); 696 } 697 } 698 699 if (dropBufferWhileFlushing(audio, msg)) { 700 return; 701 } 702 703 sp<ABuffer> buffer; 704 CHECK(msg->findBuffer("buffer", &buffer)); 705 706 sp<AMessage> notifyConsumed; 707 CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed)); 708 709 QueueEntry entry; 710 entry.mBuffer = buffer; 711 entry.mNotifyConsumed = notifyConsumed; 712 entry.mOffset = 0; 713 entry.mFinalResult = OK; 714 715 if (audio) { 716 Mutex::Autolock autoLock(mLock); 717 mAudioQueue.push_back(entry); 718 postDrainAudioQueue_l(); 719 } else { 720 mVideoQueue.push_back(entry); 721 postDrainVideoQueue(); 722 } 723 724 Mutex::Autolock autoLock(mLock); 725 if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) { 726 return; 727 } 728 729 sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer; 730 sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer; 731 732 if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) { 733 // EOS signalled on either queue. 734 syncQueuesDone_l(); 735 return; 736 } 737 738 int64_t firstAudioTimeUs; 739 int64_t firstVideoTimeUs; 740 CHECK(firstAudioBuffer->meta() 741 ->findInt64("timeUs", &firstAudioTimeUs)); 742 CHECK(firstVideoBuffer->meta() 743 ->findInt64("timeUs", &firstVideoTimeUs)); 744 745 int64_t diff = firstVideoTimeUs - firstAudioTimeUs; 746 747 ALOGV("queueDiff = %.2f secs", diff / 1E6); 748 749 if (diff > 100000ll) { 750 // Audio data starts More than 0.1 secs before video. 751 // Drop some audio. 752 753 (*mAudioQueue.begin()).mNotifyConsumed->post(); 754 mAudioQueue.erase(mAudioQueue.begin()); 755 return; 756 } 757 758 syncQueuesDone_l(); 759} 760 761void NuPlayer::Renderer::syncQueuesDone_l() { 762 if (!mSyncQueues) { 763 return; 764 } 765 766 mSyncQueues = false; 767 768 if (!mAudioQueue.empty()) { 769 postDrainAudioQueue_l(); 770 } 771 772 if (!mVideoQueue.empty()) { 773 postDrainVideoQueue(); 774 } 775} 776 777void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) { 778 int32_t audio; 779 CHECK(msg->findInt32("audio", &audio)); 780 781 if (dropBufferWhileFlushing(audio, msg)) { 782 return; 783 } 784 785 int32_t finalResult; 786 CHECK(msg->findInt32("finalResult", &finalResult)); 787 788 QueueEntry entry; 789 entry.mOffset = 0; 790 entry.mFinalResult = finalResult; 791 792 if (audio) { 793 Mutex::Autolock autoLock(mLock); 794 if (mAudioQueue.empty() && mSyncQueues) { 795 syncQueuesDone_l(); 796 } 797 mAudioQueue.push_back(entry); 798 postDrainAudioQueue_l(); 799 } else { 800 if (mVideoQueue.empty() && mSyncQueues) { 801 Mutex::Autolock autoLock(mLock); 802 syncQueuesDone_l(); 803 } 804 mVideoQueue.push_back(entry); 805 postDrainVideoQueue(); 806 } 807} 808 809void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { 810 int32_t audio; 811 CHECK(msg->findInt32("audio", &audio)); 812 813 { 814 Mutex::Autolock autoLock(mFlushLock); 815 if (audio) { 816 mFlushingAudio = false; 817 } else { 818 mFlushingVideo = false; 819 } 820 } 821 822 // If we're currently syncing the queues, i.e. dropping audio while 823 // aligning the first audio/video buffer times and only one of the 824 // two queues has data, we may starve that queue by not requesting 825 // more buffers from the decoder. If the other source then encounters 826 // a discontinuity that leads to flushing, we'll never find the 827 // corresponding discontinuity on the other queue. 828 // Therefore we'll stop syncing the queues if at least one of them 829 // is flushed. 830 { 831 Mutex::Autolock autoLock(mLock); 832 syncQueuesDone_l(); 833 } 834 835 ALOGV("flushing %s", audio ? "audio" : "video"); 836 if (audio) { 837 { 838 Mutex::Autolock autoLock(mLock); 839 flushQueue(&mAudioQueue); 840 841 ++mAudioQueueGeneration; 842 prepareForMediaRenderingStart(); 843 844 if (offloadingAudio()) { 845 mFirstAnchorTimeMediaUs = -1; 846 } 847 } 848 849 mDrainAudioQueuePending = false; 850 851 if (offloadingAudio()) { 852 mAudioSink->pause(); 853 mAudioSink->flush(); 854 mAudioSink->start(); 855 } 856 } else { 857 flushQueue(&mVideoQueue); 858 859 mDrainVideoQueuePending = false; 860 ++mVideoQueueGeneration; 861 862 if (mVideoScheduler != NULL) { 863 mVideoScheduler->restart(); 864 } 865 866 prepareForMediaRenderingStart(); 867 } 868 869 mVideoSampleReceived = false; 870 notifyFlushComplete(audio); 871} 872 873void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) { 874 while (!queue->empty()) { 875 QueueEntry *entry = &*queue->begin(); 876 877 if (entry->mBuffer != NULL) { 878 entry->mNotifyConsumed->post(); 879 } 880 881 queue->erase(queue->begin()); 882 entry = NULL; 883 } 884} 885 886void NuPlayer::Renderer::notifyFlushComplete(bool audio) { 887 sp<AMessage> notify = mNotify->dup(); 888 notify->setInt32("what", kWhatFlushComplete); 889 notify->setInt32("audio", static_cast<int32_t>(audio)); 890 notify->post(); 891} 892 893bool NuPlayer::Renderer::dropBufferWhileFlushing( 894 bool audio, const sp<AMessage> &msg) { 895 bool flushing = false; 896 897 { 898 Mutex::Autolock autoLock(mFlushLock); 899 if (audio) { 900 flushing = mFlushingAudio; 901 } else { 902 flushing = mFlushingVideo; 903 } 904 } 905 906 if (!flushing) { 907 return false; 908 } 909 910 sp<AMessage> notifyConsumed; 911 if (msg->findMessage("notifyConsumed", ¬ifyConsumed)) { 912 notifyConsumed->post(); 913 } 914 915 return true; 916} 917 918void NuPlayer::Renderer::onAudioSinkChanged() { 919 if (offloadingAudio()) { 920 return; 921 } 922 CHECK(!mDrainAudioQueuePending); 923 mNumFramesWritten = 0; 924 uint32_t written; 925 if (mAudioSink->getFramesWritten(&written) == OK) { 926 mNumFramesWritten = written; 927 } 928} 929 930void NuPlayer::Renderer::onDisableOffloadAudio() { 931 Mutex::Autolock autoLock(mLock); 932 mFlags &= ~FLAG_OFFLOAD_AUDIO; 933 ++mAudioQueueGeneration; 934} 935 936void NuPlayer::Renderer::notifyPosition() { 937 // notifyPosition() must be called only after setting mAnchorTimeRealUs 938 // and mAnchorTimeMediaUs, and must not be paused as it extrapolates position. 939 //CHECK_GE(mAnchorTimeRealUs, 0); 940 //CHECK_GE(mAnchorTimeMediaUs, 0); 941 //CHECK(!mPaused || !mHasAudio); // video-only does display in paused mode. 942 943 int64_t nowUs = ALooper::GetNowUs(); 944 945 if (mLastPositionUpdateUs >= 0 946 && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) { 947 return; 948 } 949 mLastPositionUpdateUs = nowUs; 950 951 int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs; 952 953 //ALOGD("notifyPosition: positionUs(%lld) nowUs(%lld) mAnchorTimeRealUs(%lld)" 954 // " mAnchorTimeMediaUs(%lld) mFirstAnchorTimeMediaUs(%lld)", 955 // (long long)positionUs, (long long)nowUs, (long long)mAnchorTimeRealUs, 956 // (long long)mAnchorTimeMediaUs, (long long)mFirstAnchorTimeMediaUs); 957 958 // Due to adding the latency to mAnchorTimeRealUs in onDrainAudioQueue(), 959 // positionUs may be less than the first media time. This is avoided 960 // here to prevent potential retrograde motion of the position bar 961 // when starting up after a seek. 962 if (positionUs < mFirstAnchorTimeMediaUs) { 963 positionUs = mFirstAnchorTimeMediaUs; 964 } 965 sp<AMessage> notify = mNotify->dup(); 966 notify->setInt32("what", kWhatPosition); 967 notify->setInt64("positionUs", positionUs); 968 notify->setInt64("videoLateByUs", mVideoLateByUs); 969 notify->post(); 970} 971 972void NuPlayer::Renderer::onPause() { 973 if (mPaused) { 974 ALOGW("Renderer::onPause() called while already paused!"); 975 return; 976 } 977 { 978 Mutex::Autolock autoLock(mLock); 979 ++mAudioQueueGeneration; 980 ++mVideoQueueGeneration; 981 prepareForMediaRenderingStart(); 982 mPaused = true; 983 } 984 985 mDrainAudioQueuePending = false; 986 mDrainVideoQueuePending = false; 987 988 if (mHasAudio) { 989 mAudioSink->pause(); 990 startAudioOffloadPauseTimeout(); 991 } 992 993 ALOGV("now paused audio queue has %d entries, video has %d entries", 994 mAudioQueue.size(), mVideoQueue.size()); 995} 996 997void NuPlayer::Renderer::onResume() { 998 readProperties(); 999 1000 if (!mPaused) { 1001 return; 1002 } 1003 1004 if (mHasAudio) { 1005 cancelAudioOffloadPauseTimeout(); 1006 mAudioSink->start(); 1007 } 1008 1009 Mutex::Autolock autoLock(mLock); 1010 mPaused = false; 1011 1012 if (!mAudioQueue.empty()) { 1013 postDrainAudioQueue_l(); 1014 } 1015 1016 if (!mVideoQueue.empty()) { 1017 postDrainVideoQueue(); 1018 } 1019} 1020 1021void NuPlayer::Renderer::onSetVideoFrameRate(float fps) { 1022 if (mVideoScheduler == NULL) { 1023 mVideoScheduler = new VideoFrameScheduler(); 1024 } 1025 mVideoScheduler->init(fps); 1026} 1027 1028// TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs() 1029// as it acquires locks and may query the audio driver. 1030// 1031// Some calls are not needed since notifyPosition() doesn't always deliver a message. 1032// Some calls could conceivably retrieve extrapolated data instead of 1033// accessing getTimestamp() or getPosition() every time a data buffer with 1034// a media time is received. 1035// 1036int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) { 1037 uint32_t numFramesPlayed; 1038 int64_t numFramesPlayedAt; 1039 AudioTimestamp ts; 1040 static const int64_t kStaleTimestamp100ms = 100000; 1041 1042 status_t res = mAudioSink->getTimestamp(ts); 1043 if (res == OK) { // case 1: mixing audio tracks and offloaded tracks. 1044 numFramesPlayed = ts.mPosition; 1045 numFramesPlayedAt = 1046 ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000; 1047 const int64_t timestampAge = nowUs - numFramesPlayedAt; 1048 if (timestampAge > kStaleTimestamp100ms) { 1049 // This is an audio FIXME. 1050 // getTimestamp returns a timestamp which may come from audio mixing threads. 1051 // After pausing, the MixerThread may go idle, thus the mTime estimate may 1052 // become stale. Assuming that the MixerThread runs 20ms, with FastMixer at 5ms, 1053 // the max latency should be about 25ms with an average around 12ms (to be verified). 1054 // For safety we use 100ms. 1055 ALOGW("getTimestamp: returned stale timestamp nowUs(%lld) numFramesPlayedAt(%lld)", 1056 (long long)nowUs, (long long)numFramesPlayedAt); 1057 numFramesPlayedAt = nowUs - kStaleTimestamp100ms; 1058 } 1059 //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAt); 1060 } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track 1061 numFramesPlayed = 0; 1062 numFramesPlayedAt = nowUs; 1063 //ALOGD("getTimestamp: WOULD_BLOCK %d %lld", 1064 // numFramesPlayed, (long long)numFramesPlayedAt); 1065 } else { // case 3: transitory at new track or audio fast tracks. 1066 res = mAudioSink->getPosition(&numFramesPlayed); 1067 CHECK_EQ(res, (status_t)OK); 1068 numFramesPlayedAt = nowUs; 1069 numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */ 1070 //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt); 1071 } 1072 1073 // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours. 1074 //CHECK_EQ(numFramesPlayed & (1 << 31), 0); // can't be negative until 12.4 hrs, test 1075 int64_t durationUs = (int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame() 1076 + nowUs - numFramesPlayedAt; 1077 if (durationUs < 0) { 1078 // Occurs when numFramesPlayed position is very small and the following: 1079 // (1) In case 1, the time nowUs is computed before getTimestamp() is called and 1080 // numFramesPlayedAt is greater than nowUs by time more than numFramesPlayed. 1081 // (2) In case 3, using getPosition and adding mAudioSink->latency() to 1082 // numFramesPlayedAt, by a time amount greater than numFramesPlayed. 1083 // 1084 // Both of these are transitory conditions. 1085 ALOGW("getPlayedOutAudioDurationUs: negative timestamp %lld set to zero", (long long)durationUs); 1086 durationUs = 0; 1087 } 1088 ALOGV("getPlayedOutAudioDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)", 1089 (long long)durationUs, (long long)nowUs, numFramesPlayed, (long long)numFramesPlayedAt); 1090 return durationUs; 1091} 1092 1093void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reason) { 1094 if (mAudioOffloadTornDown) { 1095 return; 1096 } 1097 mAudioOffloadTornDown = true; 1098 1099 int64_t firstAudioTimeUs; 1100 { 1101 Mutex::Autolock autoLock(mLock); 1102 firstAudioTimeUs = mFirstAnchorTimeMediaUs; 1103 } 1104 1105 int64_t currentPositionUs = 1106 firstAudioTimeUs + getPlayedOutAudioDurationUs(ALooper::GetNowUs()); 1107 1108 mAudioSink->stop(); 1109 mAudioSink->flush(); 1110 1111 sp<AMessage> notify = mNotify->dup(); 1112 notify->setInt32("what", kWhatAudioOffloadTearDown); 1113 notify->setInt64("positionUs", currentPositionUs); 1114 notify->setInt32("reason", reason); 1115 notify->post(); 1116} 1117 1118void NuPlayer::Renderer::startAudioOffloadPauseTimeout() { 1119 if (offloadingAudio()) { 1120 sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, id()); 1121 msg->setInt32("generation", mAudioOffloadPauseTimeoutGeneration); 1122 msg->post(kOffloadPauseMaxUs); 1123 } 1124} 1125 1126void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() { 1127 if (offloadingAudio()) { 1128 ++mAudioOffloadPauseTimeoutGeneration; 1129 } 1130} 1131 1132} // namespace android 1133 1134