NuPlayerRenderer.cpp revision f5b1db11734358d979a23a1ac4903872186ef60b
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "NuPlayerRenderer" 19#include <utils/Log.h> 20 21#include "NuPlayerRenderer.h" 22 23#include <media/stagefright/foundation/ABuffer.h> 24#include <media/stagefright/foundation/ADebug.h> 25#include <media/stagefright/foundation/AMessage.h> 26#include <media/stagefright/MediaErrors.h> 27#include <media/stagefright/MetaData.h> 28 29#include <inttypes.h> 30 31namespace android { 32 33// Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink 34// is closed to allow the audio DSP to power down. 35static const int64_t kOffloadPauseMaxUs = 60000000ll; 36 37// static 38const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll; 39 40NuPlayer::Renderer::Renderer( 41 const sp<MediaPlayerBase::AudioSink> &sink, 42 const sp<AMessage> ¬ify, 43 uint32_t flags) 44 : mAudioSink(sink), 45 mNotify(notify), 46 mFlags(flags), 47 mNumFramesWritten(0), 48 mDrainAudioQueuePending(false), 49 mDrainVideoQueuePending(false), 50 mAudioQueueGeneration(0), 51 mVideoQueueGeneration(0), 52 mFirstAnchorTimeMediaUs(-1), 53 mAnchorTimeMediaUs(-1), 54 mAnchorTimeRealUs(-1), 55 mFlushingAudio(false), 56 mFlushingVideo(false), 57 mHasAudio(false), 58 mHasVideo(false), 59 mSyncQueues(false), 60 mPaused(false), 61 mVideoSampleReceived(false), 62 mVideoRenderingStarted(false), 63 mVideoRenderingStartGeneration(0), 64 mAudioRenderingStartGeneration(0), 65 mLastPositionUpdateUs(-1ll), 66 mVideoLateByUs(0ll), 67 mAudioOffloadPauseTimeoutGeneration(0), 68 mAudioOffloadTornDown(false) { 69} 70 71NuPlayer::Renderer::~Renderer() { 72 if (offloadingAudio()) { 73 mAudioSink->stop(); 74 mAudioSink->flush(); 75 mAudioSink->close(); 76 } 77} 78 79void NuPlayer::Renderer::queueBuffer( 80 bool audio, 81 const sp<ABuffer> &buffer, 82 const sp<AMessage> ¬ifyConsumed) { 83 sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id()); 84 msg->setInt32("audio", static_cast<int32_t>(audio)); 85 msg->setBuffer("buffer", buffer); 86 msg->setMessage("notifyConsumed", notifyConsumed); 87 msg->post(); 88} 89 90void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) { 91 CHECK_NE(finalResult, (status_t)OK); 92 93 sp<AMessage> msg = new AMessage(kWhatQueueEOS, id()); 94 msg->setInt32("audio", static_cast<int32_t>(audio)); 95 msg->setInt32("finalResult", finalResult); 96 msg->post(); 97} 98 99void NuPlayer::Renderer::flush(bool audio) { 100 { 101 Mutex::Autolock autoLock(mFlushLock); 102 if (audio) { 103 if (mFlushingAudio) { 104 return; 105 } 106 mFlushingAudio = true; 107 } else { 108 if (mFlushingVideo) { 109 return; 110 } 111 mFlushingVideo = true; 112 } 113 } 114 115 sp<AMessage> msg = new AMessage(kWhatFlush, id()); 116 msg->setInt32("audio", static_cast<int32_t>(audio)); 117 msg->post(); 118} 119 120void NuPlayer::Renderer::signalTimeDiscontinuity() { 121 Mutex::Autolock autoLock(mLock); 122 // CHECK(mAudioQueue.empty()); 123 // CHECK(mVideoQueue.empty()); 124 mFirstAnchorTimeMediaUs = -1; 125 mAnchorTimeMediaUs = -1; 126 mAnchorTimeRealUs = -1; 127 mSyncQueues = false; 128} 129 130void NuPlayer::Renderer::signalAudioSinkChanged() { 131 (new AMessage(kWhatAudioSinkChanged, id()))->post(); 132} 133 134void NuPlayer::Renderer::signalDisableOffloadAudio() { 135 (new AMessage(kWhatDisableOffloadAudio, id()))->post(); 136} 137 138void NuPlayer::Renderer::pause() { 139 (new AMessage(kWhatPause, id()))->post(); 140} 141 142void NuPlayer::Renderer::resume() { 143 (new AMessage(kWhatResume, id()))->post(); 144} 145 146void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { 147 switch (msg->what()) { 148 case kWhatStopAudioSink: 149 { 150 mAudioSink->stop(); 151 break; 152 } 153 154 case kWhatDrainAudioQueue: 155 { 156 int32_t generation; 157 CHECK(msg->findInt32("generation", &generation)); 158 if (generation != mAudioQueueGeneration) { 159 break; 160 } 161 162 mDrainAudioQueuePending = false; 163 164 if (onDrainAudioQueue()) { 165 uint32_t numFramesPlayed; 166 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), 167 (status_t)OK); 168 169 uint32_t numFramesPendingPlayout = 170 mNumFramesWritten - numFramesPlayed; 171 172 // This is how long the audio sink will have data to 173 // play back. 174 int64_t delayUs = 175 mAudioSink->msecsPerFrame() 176 * numFramesPendingPlayout * 1000ll; 177 178 // Let's give it more data after about half that time 179 // has elapsed. 180 // kWhatDrainAudioQueue is used for non-offloading mode, 181 // and mLock is used only for offloading mode. Therefore, 182 // no need to acquire mLock here. 183 postDrainAudioQueue_l(delayUs / 2); 184 } 185 break; 186 } 187 188 case kWhatDrainVideoQueue: 189 { 190 int32_t generation; 191 CHECK(msg->findInt32("generation", &generation)); 192 if (generation != mVideoQueueGeneration) { 193 break; 194 } 195 196 mDrainVideoQueuePending = false; 197 198 onDrainVideoQueue(); 199 200 postDrainVideoQueue(); 201 break; 202 } 203 204 case kWhatQueueBuffer: 205 { 206 onQueueBuffer(msg); 207 break; 208 } 209 210 case kWhatQueueEOS: 211 { 212 onQueueEOS(msg); 213 break; 214 } 215 216 case kWhatFlush: 217 { 218 onFlush(msg); 219 break; 220 } 221 222 case kWhatAudioSinkChanged: 223 { 224 onAudioSinkChanged(); 225 break; 226 } 227 228 case kWhatDisableOffloadAudio: 229 { 230 onDisableOffloadAudio(); 231 break; 232 } 233 234 case kWhatPause: 235 { 236 onPause(); 237 break; 238 } 239 240 case kWhatResume: 241 { 242 onResume(); 243 break; 244 } 245 246 case kWhatAudioOffloadTearDown: 247 { 248 onAudioOffloadTearDown(); 249 break; 250 } 251 252 case kWhatAudioOffloadPauseTimeout: 253 { 254 int32_t generation; 255 CHECK(msg->findInt32("generation", &generation)); 256 if (generation != mAudioOffloadPauseTimeoutGeneration) { 257 break; 258 } 259 onAudioOffloadTearDown(); 260 break; 261 } 262 263 default: 264 TRESPASS(); 265 break; 266 } 267} 268 269void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) { 270 if (mDrainAudioQueuePending || mSyncQueues || mPaused 271 || offloadingAudio()) { 272 return; 273 } 274 275 if (mAudioQueue.empty()) { 276 return; 277 } 278 279 mDrainAudioQueuePending = true; 280 sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id()); 281 msg->setInt32("generation", mAudioQueueGeneration); 282 msg->post(delayUs); 283} 284 285void NuPlayer::Renderer::prepareForMediaRenderingStart() { 286 mAudioRenderingStartGeneration = mAudioQueueGeneration; 287 mVideoRenderingStartGeneration = mVideoQueueGeneration; 288} 289 290void NuPlayer::Renderer::notifyIfMediaRenderingStarted() { 291 if (mVideoRenderingStartGeneration == mVideoQueueGeneration && 292 mAudioRenderingStartGeneration == mAudioQueueGeneration) { 293 mVideoRenderingStartGeneration = -1; 294 mAudioRenderingStartGeneration = -1; 295 296 sp<AMessage> notify = mNotify->dup(); 297 notify->setInt32("what", kWhatMediaRenderingStart); 298 notify->post(); 299 } 300} 301 302// static 303size_t NuPlayer::Renderer::AudioSinkCallback( 304 MediaPlayerBase::AudioSink * /* audioSink */, 305 void *buffer, 306 size_t size, 307 void *cookie, 308 MediaPlayerBase::AudioSink::cb_event_t event) { 309 NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie; 310 311 switch (event) { 312 case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER: 313 { 314 return me->fillAudioBuffer(buffer, size); 315 break; 316 } 317 318 case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END: 319 { 320 me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM); 321 break; 322 } 323 324 case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN: 325 { 326 me->notifyAudioOffloadTearDown(); 327 break; 328 } 329 } 330 331 return 0; 332} 333 334size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) { 335 Mutex::Autolock autoLock(mLock); 336 337 if (!offloadingAudio() || mPaused) { 338 return 0; 339 } 340 341 bool hasEOS = false; 342 343 size_t sizeCopied = 0; 344 bool firstEntry = true; 345 while (sizeCopied < size && !mAudioQueue.empty()) { 346 QueueEntry *entry = &*mAudioQueue.begin(); 347 348 if (entry->mBuffer == NULL) { // EOS 349 hasEOS = true; 350 mAudioQueue.erase(mAudioQueue.begin()); 351 entry = NULL; 352 break; 353 } 354 355 if (firstEntry && entry->mOffset == 0) { 356 firstEntry = false; 357 int64_t mediaTimeUs; 358 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 359 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 360 if (mFirstAnchorTimeMediaUs == -1) { 361 mFirstAnchorTimeMediaUs = mediaTimeUs; 362 } 363 364 int64_t nowUs = ALooper::GetNowUs(); 365 mAnchorTimeMediaUs = 366 mFirstAnchorTimeMediaUs + getPlayedOutAudioDurationUs(nowUs); 367 mAnchorTimeRealUs = nowUs; 368 369 notifyPosition(); 370 } 371 372 size_t copy = entry->mBuffer->size() - entry->mOffset; 373 size_t sizeRemaining = size - sizeCopied; 374 if (copy > sizeRemaining) { 375 copy = sizeRemaining; 376 } 377 378 memcpy((char *)buffer + sizeCopied, 379 entry->mBuffer->data() + entry->mOffset, 380 copy); 381 382 entry->mOffset += copy; 383 if (entry->mOffset == entry->mBuffer->size()) { 384 entry->mNotifyConsumed->post(); 385 mAudioQueue.erase(mAudioQueue.begin()); 386 entry = NULL; 387 } 388 sizeCopied += copy; 389 notifyIfMediaRenderingStarted(); 390 } 391 392 if (hasEOS) { 393 (new AMessage(kWhatStopAudioSink, id()))->post(); 394 } 395 396 return sizeCopied; 397} 398 399bool NuPlayer::Renderer::onDrainAudioQueue() { 400 uint32_t numFramesPlayed; 401 if (mAudioSink->getPosition(&numFramesPlayed) != OK) { 402 return false; 403 } 404 405 ssize_t numFramesAvailableToWrite = 406 mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed); 407 408#if 0 409 if (numFramesAvailableToWrite == mAudioSink->frameCount()) { 410 ALOGI("audio sink underrun"); 411 } else { 412 ALOGV("audio queue has %d frames left to play", 413 mAudioSink->frameCount() - numFramesAvailableToWrite); 414 } 415#endif 416 417 size_t numBytesAvailableToWrite = 418 numFramesAvailableToWrite * mAudioSink->frameSize(); 419 420 while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) { 421 QueueEntry *entry = &*mAudioQueue.begin(); 422 423 if (entry->mBuffer == NULL) { 424 // EOS 425 int64_t postEOSDelayUs = 0; 426 if (mAudioSink->needsTrailingPadding()) { 427 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs()); 428 } 429 notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs); 430 431 mAudioQueue.erase(mAudioQueue.begin()); 432 entry = NULL; 433 return false; 434 } 435 436 if (entry->mOffset == 0) { 437 int64_t mediaTimeUs; 438 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 439 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 440 if (mFirstAnchorTimeMediaUs == -1) { 441 mFirstAnchorTimeMediaUs = mediaTimeUs; 442 } 443 mAnchorTimeMediaUs = mediaTimeUs; 444 445 int64_t nowUs = ALooper::GetNowUs(); 446 mAnchorTimeRealUs = nowUs + getPendingAudioPlayoutDurationUs(nowUs); 447 448 notifyPosition(); 449 } 450 451 size_t copy = entry->mBuffer->size() - entry->mOffset; 452 if (copy > numBytesAvailableToWrite) { 453 copy = numBytesAvailableToWrite; 454 } 455 456 ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy); 457 if (written < 0) { 458 // An error in AudioSink write is fatal here. 459 LOG_ALWAYS_FATAL("AudioSink write error(%zd) when writing %zu bytes", written, copy); 460 } 461 462 entry->mOffset += written; 463 if (entry->mOffset == entry->mBuffer->size()) { 464 entry->mNotifyConsumed->post(); 465 mAudioQueue.erase(mAudioQueue.begin()); 466 467 entry = NULL; 468 } 469 470 numBytesAvailableToWrite -= written; 471 size_t copiedFrames = written / mAudioSink->frameSize(); 472 mNumFramesWritten += copiedFrames; 473 474 notifyIfMediaRenderingStarted(); 475 476 if (written != (ssize_t)copy) { 477 // A short count was received from AudioSink::write() 478 // 479 // AudioSink write should block until exactly the number of bytes are delivered. 480 // But it may return with a short count (without an error) when: 481 // 482 // 1) Size to be copied is not a multiple of the frame size. We consider this fatal. 483 // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded. 484 485 // (Case 1) 486 // Must be a multiple of the frame size. If it is not a multiple of a frame size, it 487 // needs to fail, as we should not carry over fractional frames between calls. 488 CHECK_EQ(copy % mAudioSink->frameSize(), 0); 489 490 // (Case 2) 491 // Return early to the caller. 492 // Beware of calling immediately again as this may busy-loop if you are not careful. 493 ALOGW("AudioSink write short frame count %zd < %zu", written, copy); 494 break; 495 } 496 } 497 return !mAudioQueue.empty(); 498} 499 500int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) { 501 int64_t writtenAudioDurationUs = 502 mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame(); 503 return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs); 504} 505 506void NuPlayer::Renderer::postDrainVideoQueue() { 507 if (mDrainVideoQueuePending 508 || mSyncQueues 509 || (mPaused && mVideoSampleReceived)) { 510 return; 511 } 512 513 if (mVideoQueue.empty()) { 514 return; 515 } 516 517 QueueEntry &entry = *mVideoQueue.begin(); 518 519 sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id()); 520 msg->setInt32("generation", mVideoQueueGeneration); 521 522 int64_t delayUs; 523 524 if (entry.mBuffer == NULL) { 525 // EOS doesn't carry a timestamp. 526 delayUs = 0; 527 } else if (mFlags & FLAG_REAL_TIME) { 528 int64_t mediaTimeUs; 529 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 530 531 delayUs = mediaTimeUs - ALooper::GetNowUs(); 532 } else { 533 int64_t mediaTimeUs; 534 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 535 536 if (mFirstAnchorTimeMediaUs == -1 && !mHasAudio) { 537 mFirstAnchorTimeMediaUs = mediaTimeUs; 538 } 539 if (mAnchorTimeMediaUs < 0) { 540 delayUs = 0; 541 542 if (!mHasAudio) { 543 mAnchorTimeMediaUs = mediaTimeUs; 544 mAnchorTimeRealUs = ALooper::GetNowUs(); 545 notifyPosition(); 546 } 547 } else { 548 int64_t realTimeUs = 549 (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs; 550 551 delayUs = realTimeUs - ALooper::GetNowUs(); 552 } 553 } 554 555 ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs); 556 msg->post(delayUs); 557 558 mDrainVideoQueuePending = true; 559} 560 561void NuPlayer::Renderer::onDrainVideoQueue() { 562 if (mVideoQueue.empty()) { 563 return; 564 } 565 566 QueueEntry *entry = &*mVideoQueue.begin(); 567 568 if (entry->mBuffer == NULL) { 569 // EOS 570 571 notifyEOS(false /* audio */, entry->mFinalResult); 572 573 mVideoQueue.erase(mVideoQueue.begin()); 574 entry = NULL; 575 576 mVideoLateByUs = 0ll; 577 return; 578 } 579 580 int64_t realTimeUs; 581 if (mFlags & FLAG_REAL_TIME) { 582 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs)); 583 } else { 584 int64_t mediaTimeUs; 585 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 586 587 realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs; 588 } 589 590 bool tooLate = false; 591 592 if (!mPaused) { 593 mVideoLateByUs = ALooper::GetNowUs() - realTimeUs; 594 tooLate = (mVideoLateByUs > 40000); 595 596 if (tooLate) { 597 ALOGV("video late by %lld us (%.2f secs)", 598 mVideoLateByUs, mVideoLateByUs / 1E6); 599 } else { 600 ALOGV("rendering video at media time %.2f secs", 601 (mFlags & FLAG_REAL_TIME ? realTimeUs : 602 (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6); 603 } 604 } else { 605 mVideoLateByUs = 0ll; 606 } 607 608 entry->mNotifyConsumed->setInt32("render", !tooLate); 609 entry->mNotifyConsumed->post(); 610 mVideoQueue.erase(mVideoQueue.begin()); 611 entry = NULL; 612 613 mVideoSampleReceived = true; 614 615 if (!mPaused) { 616 if (!mVideoRenderingStarted) { 617 mVideoRenderingStarted = true; 618 notifyVideoRenderingStart(); 619 } 620 notifyIfMediaRenderingStarted(); 621 } 622} 623 624void NuPlayer::Renderer::notifyVideoRenderingStart() { 625 sp<AMessage> notify = mNotify->dup(); 626 notify->setInt32("what", kWhatVideoRenderingStart); 627 notify->post(); 628} 629 630void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) { 631 sp<AMessage> notify = mNotify->dup(); 632 notify->setInt32("what", kWhatEOS); 633 notify->setInt32("audio", static_cast<int32_t>(audio)); 634 notify->setInt32("finalResult", finalResult); 635 notify->post(delayUs); 636} 637 638void NuPlayer::Renderer::notifyAudioOffloadTearDown() { 639 (new AMessage(kWhatAudioOffloadTearDown, id()))->post(); 640} 641 642void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) { 643 int32_t audio; 644 CHECK(msg->findInt32("audio", &audio)); 645 646 if (audio) { 647 mHasAudio = true; 648 } else { 649 mHasVideo = true; 650 } 651 652 if (dropBufferWhileFlushing(audio, msg)) { 653 return; 654 } 655 656 sp<ABuffer> buffer; 657 CHECK(msg->findBuffer("buffer", &buffer)); 658 659 sp<AMessage> notifyConsumed; 660 CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed)); 661 662 QueueEntry entry; 663 entry.mBuffer = buffer; 664 entry.mNotifyConsumed = notifyConsumed; 665 entry.mOffset = 0; 666 entry.mFinalResult = OK; 667 668 if (audio) { 669 Mutex::Autolock autoLock(mLock); 670 mAudioQueue.push_back(entry); 671 postDrainAudioQueue_l(); 672 } else { 673 mVideoQueue.push_back(entry); 674 postDrainVideoQueue(); 675 } 676 677 Mutex::Autolock autoLock(mLock); 678 if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) { 679 return; 680 } 681 682 sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer; 683 sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer; 684 685 if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) { 686 // EOS signalled on either queue. 687 syncQueuesDone_l(); 688 return; 689 } 690 691 int64_t firstAudioTimeUs; 692 int64_t firstVideoTimeUs; 693 CHECK(firstAudioBuffer->meta() 694 ->findInt64("timeUs", &firstAudioTimeUs)); 695 CHECK(firstVideoBuffer->meta() 696 ->findInt64("timeUs", &firstVideoTimeUs)); 697 698 int64_t diff = firstVideoTimeUs - firstAudioTimeUs; 699 700 ALOGV("queueDiff = %.2f secs", diff / 1E6); 701 702 if (diff > 100000ll) { 703 // Audio data starts More than 0.1 secs before video. 704 // Drop some audio. 705 706 (*mAudioQueue.begin()).mNotifyConsumed->post(); 707 mAudioQueue.erase(mAudioQueue.begin()); 708 return; 709 } 710 711 syncQueuesDone_l(); 712} 713 714void NuPlayer::Renderer::syncQueuesDone_l() { 715 if (!mSyncQueues) { 716 return; 717 } 718 719 mSyncQueues = false; 720 721 if (!mAudioQueue.empty()) { 722 postDrainAudioQueue_l(); 723 } 724 725 if (!mVideoQueue.empty()) { 726 postDrainVideoQueue(); 727 } 728} 729 730void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) { 731 int32_t audio; 732 CHECK(msg->findInt32("audio", &audio)); 733 734 if (dropBufferWhileFlushing(audio, msg)) { 735 return; 736 } 737 738 int32_t finalResult; 739 CHECK(msg->findInt32("finalResult", &finalResult)); 740 741 QueueEntry entry; 742 entry.mOffset = 0; 743 entry.mFinalResult = finalResult; 744 745 if (audio) { 746 Mutex::Autolock autoLock(mLock); 747 if (mAudioQueue.empty() && mSyncQueues) { 748 syncQueuesDone_l(); 749 } 750 mAudioQueue.push_back(entry); 751 postDrainAudioQueue_l(); 752 } else { 753 if (mVideoQueue.empty() && mSyncQueues) { 754 Mutex::Autolock autoLock(mLock); 755 syncQueuesDone_l(); 756 } 757 mVideoQueue.push_back(entry); 758 postDrainVideoQueue(); 759 } 760} 761 762void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { 763 int32_t audio; 764 CHECK(msg->findInt32("audio", &audio)); 765 766 { 767 Mutex::Autolock autoLock(mFlushLock); 768 if (audio) { 769 mFlushingAudio = false; 770 } else { 771 mFlushingVideo = false; 772 } 773 } 774 775 // If we're currently syncing the queues, i.e. dropping audio while 776 // aligning the first audio/video buffer times and only one of the 777 // two queues has data, we may starve that queue by not requesting 778 // more buffers from the decoder. If the other source then encounters 779 // a discontinuity that leads to flushing, we'll never find the 780 // corresponding discontinuity on the other queue. 781 // Therefore we'll stop syncing the queues if at least one of them 782 // is flushed. 783 { 784 Mutex::Autolock autoLock(mLock); 785 syncQueuesDone_l(); 786 } 787 788 ALOGV("flushing %s", audio ? "audio" : "video"); 789 if (audio) { 790 { 791 Mutex::Autolock autoLock(mLock); 792 flushQueue(&mAudioQueue); 793 794 ++mAudioQueueGeneration; 795 prepareForMediaRenderingStart(); 796 797 if (offloadingAudio()) { 798 mFirstAnchorTimeMediaUs = -1; 799 } 800 } 801 802 mDrainAudioQueuePending = false; 803 804 if (offloadingAudio()) { 805 mAudioSink->pause(); 806 mAudioSink->flush(); 807 mAudioSink->start(); 808 } 809 } else { 810 flushQueue(&mVideoQueue); 811 812 mDrainVideoQueuePending = false; 813 ++mVideoQueueGeneration; 814 815 prepareForMediaRenderingStart(); 816 } 817 818 mVideoSampleReceived = false; 819 notifyFlushComplete(audio); 820} 821 822void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) { 823 while (!queue->empty()) { 824 QueueEntry *entry = &*queue->begin(); 825 826 if (entry->mBuffer != NULL) { 827 entry->mNotifyConsumed->post(); 828 } 829 830 queue->erase(queue->begin()); 831 entry = NULL; 832 } 833} 834 835void NuPlayer::Renderer::notifyFlushComplete(bool audio) { 836 sp<AMessage> notify = mNotify->dup(); 837 notify->setInt32("what", kWhatFlushComplete); 838 notify->setInt32("audio", static_cast<int32_t>(audio)); 839 notify->post(); 840} 841 842bool NuPlayer::Renderer::dropBufferWhileFlushing( 843 bool audio, const sp<AMessage> &msg) { 844 bool flushing = false; 845 846 { 847 Mutex::Autolock autoLock(mFlushLock); 848 if (audio) { 849 flushing = mFlushingAudio; 850 } else { 851 flushing = mFlushingVideo; 852 } 853 } 854 855 if (!flushing) { 856 return false; 857 } 858 859 sp<AMessage> notifyConsumed; 860 if (msg->findMessage("notifyConsumed", ¬ifyConsumed)) { 861 notifyConsumed->post(); 862 } 863 864 return true; 865} 866 867void NuPlayer::Renderer::onAudioSinkChanged() { 868 if (offloadingAudio()) { 869 return; 870 } 871 CHECK(!mDrainAudioQueuePending); 872 mNumFramesWritten = 0; 873 uint32_t written; 874 if (mAudioSink->getFramesWritten(&written) == OK) { 875 mNumFramesWritten = written; 876 } 877} 878 879void NuPlayer::Renderer::onDisableOffloadAudio() { 880 Mutex::Autolock autoLock(mLock); 881 mFlags &= ~FLAG_OFFLOAD_AUDIO; 882 ++mAudioQueueGeneration; 883} 884 885void NuPlayer::Renderer::notifyPosition() { 886 // notifyPosition() must be called only after setting mAnchorTimeRealUs 887 // and mAnchorTimeMediaUs, and must not be paused as it extrapolates position. 888 //CHECK_GE(mAnchorTimeRealUs, 0); 889 //CHECK_GE(mAnchorTimeMediaUs, 0); 890 //CHECK(!mPaused || !mHasAudio); // video-only does display in paused mode. 891 892 int64_t nowUs = ALooper::GetNowUs(); 893 894 if (mLastPositionUpdateUs >= 0 895 && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) { 896 return; 897 } 898 mLastPositionUpdateUs = nowUs; 899 900 int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs; 901 902 //ALOGD("notifyPosition: positionUs(%lld) nowUs(%lld) mAnchorTimeRealUs(%lld)" 903 // " mAnchorTimeMediaUs(%lld) mFirstAnchorTimeMediaUs(%lld)", 904 // (long long)positionUs, (long long)nowUs, (long long)mAnchorTimeRealUs, 905 // (long long)mAnchorTimeMediaUs, (long long)mFirstAnchorTimeMediaUs); 906 907 // Due to adding the latency to mAnchorTimeRealUs in onDrainAudioQueue(), 908 // positionUs may be less than the first media time. This is avoided 909 // here to prevent potential retrograde motion of the position bar 910 // when starting up after a seek. 911 if (positionUs < mFirstAnchorTimeMediaUs) { 912 positionUs = mFirstAnchorTimeMediaUs; 913 } 914 sp<AMessage> notify = mNotify->dup(); 915 notify->setInt32("what", kWhatPosition); 916 notify->setInt64("positionUs", positionUs); 917 notify->setInt64("videoLateByUs", mVideoLateByUs); 918 notify->post(); 919} 920 921void NuPlayer::Renderer::onPause() { 922 if (mPaused) { 923 ALOGW("Renderer::onPause() called while already paused!"); 924 return; 925 } 926 { 927 Mutex::Autolock autoLock(mLock); 928 ++mAudioQueueGeneration; 929 ++mVideoQueueGeneration; 930 prepareForMediaRenderingStart(); 931 mPaused = true; 932 } 933 934 mDrainAudioQueuePending = false; 935 mDrainVideoQueuePending = false; 936 937 if (mHasAudio) { 938 mAudioSink->pause(); 939 startAudioOffloadPauseTimeout(); 940 } 941 942 ALOGV("now paused audio queue has %d entries, video has %d entries", 943 mAudioQueue.size(), mVideoQueue.size()); 944} 945 946void NuPlayer::Renderer::onResume() { 947 if (!mPaused) { 948 return; 949 } 950 951 if (mHasAudio) { 952 cancelAudioOffloadPauseTimeout(); 953 mAudioSink->start(); 954 } 955 956 Mutex::Autolock autoLock(mLock); 957 mPaused = false; 958 959 if (!mAudioQueue.empty()) { 960 postDrainAudioQueue_l(); 961 } 962 963 if (!mVideoQueue.empty()) { 964 postDrainVideoQueue(); 965 } 966} 967 968// TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs() 969// as it acquires locks and may query the audio driver. 970// 971// Some calls are not needed since notifyPosition() doesn't always deliver a message. 972// Some calls could conceivably retrieve extrapolated data instead of 973// accessing getTimestamp() or getPosition() every time a data buffer with 974// a media time is received. 975// 976int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) { 977 uint32_t numFramesPlayed; 978 int64_t numFramesPlayedAt; 979 AudioTimestamp ts; 980 static const int64_t kStaleTimestamp100ms = 100000; 981 982 status_t res = mAudioSink->getTimestamp(ts); 983 if (res == OK) { // case 1: mixing audio tracks and offloaded tracks. 984 numFramesPlayed = ts.mPosition; 985 numFramesPlayedAt = 986 ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000; 987 const int64_t timestampAge = nowUs - numFramesPlayedAt; 988 if (timestampAge > kStaleTimestamp100ms) { 989 // This is an audio FIXME. 990 // getTimestamp returns a timestamp which may come from audio mixing threads. 991 // After pausing, the MixerThread may go idle, thus the mTime estimate may 992 // become stale. Assuming that the MixerThread runs 20ms, with FastMixer at 5ms, 993 // the max latency should be about 25ms with an average around 12ms (to be verified). 994 // For safety we use 100ms. 995 ALOGW("getTimestamp: returned stale timestamp nowUs(%lld) numFramesPlayedAt(%lld)", 996 (long long)nowUs, (long long)numFramesPlayedAt); 997 numFramesPlayedAt = nowUs - kStaleTimestamp100ms; 998 } 999 //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAt); 1000 } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track 1001 numFramesPlayed = 0; 1002 numFramesPlayedAt = nowUs; 1003 //ALOGD("getTimestamp: WOULD_BLOCK %d %lld", 1004 // numFramesPlayed, (long long)numFramesPlayedAt); 1005 } else { // case 3: transitory at new track or audio fast tracks. 1006 res = mAudioSink->getPosition(&numFramesPlayed); 1007 CHECK_EQ(res, (status_t)OK); 1008 numFramesPlayedAt = nowUs; 1009 numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */ 1010 //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt); 1011 } 1012 1013 // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours. 1014 //CHECK_EQ(numFramesPlayed & (1 << 31), 0); // can't be negative until 12.4 hrs, test 1015 int64_t durationUs = (int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame() 1016 + nowUs - numFramesPlayedAt; 1017 if (durationUs < 0) { 1018 // Occurs when numFramesPlayed position is very small and the following: 1019 // (1) In case 1, the time nowUs is computed before getTimestamp() is called and 1020 // numFramesPlayedAt is greater than nowUs by time more than numFramesPlayed. 1021 // (2) In case 3, using getPosition and adding mAudioSink->latency() to 1022 // numFramesPlayedAt, by a time amount greater than numFramesPlayed. 1023 // 1024 // Both of these are transitory conditions. 1025 ALOGW("getPlayedOutAudioDurationUs: negative timestamp %lld set to zero", (long long)durationUs); 1026 durationUs = 0; 1027 } 1028 ALOGV("getPlayedOutAudioDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)", 1029 (long long)durationUs, (long long)nowUs, numFramesPlayed, (long long)numFramesPlayedAt); 1030 return durationUs; 1031} 1032 1033void NuPlayer::Renderer::onAudioOffloadTearDown() { 1034 if (mAudioOffloadTornDown) { 1035 return; 1036 } 1037 mAudioOffloadTornDown = true; 1038 1039 int64_t firstAudioTimeUs; 1040 { 1041 Mutex::Autolock autoLock(mLock); 1042 firstAudioTimeUs = mFirstAnchorTimeMediaUs; 1043 } 1044 1045 int64_t currentPositionUs = 1046 firstAudioTimeUs + getPlayedOutAudioDurationUs(ALooper::GetNowUs()); 1047 1048 mAudioSink->stop(); 1049 mAudioSink->flush(); 1050 1051 sp<AMessage> notify = mNotify->dup(); 1052 notify->setInt32("what", kWhatAudioOffloadTearDown); 1053 notify->setInt64("positionUs", currentPositionUs); 1054 notify->post(); 1055} 1056 1057void NuPlayer::Renderer::startAudioOffloadPauseTimeout() { 1058 if (offloadingAudio()) { 1059 sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, id()); 1060 msg->setInt32("generation", mAudioOffloadPauseTimeoutGeneration); 1061 msg->post(kOffloadPauseMaxUs); 1062 } 1063} 1064 1065void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() { 1066 if (offloadingAudio()) { 1067 ++mAudioOffloadPauseTimeoutGeneration; 1068 } 1069} 1070 1071} // namespace android 1072 1073