NuPlayerRenderer.cpp revision 09e0c3646362d29c78bc26c8b23b7a753c412e6c
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "NuPlayerRenderer" 19#include <utils/Log.h> 20 21#include "NuPlayerRenderer.h" 22 23#include <media/stagefright/foundation/ABuffer.h> 24#include <media/stagefright/foundation/ADebug.h> 25#include <media/stagefright/foundation/AMessage.h> 26#include <media/stagefright/MediaErrors.h> 27#include <media/stagefright/MetaData.h> 28 29#include <inttypes.h> 30 31namespace android { 32 33// static 34const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll; 35 36NuPlayer::Renderer::Renderer( 37 const sp<MediaPlayerBase::AudioSink> &sink, 38 const sp<AMessage> ¬ify, 39 uint32_t flags) 40 : mAudioSink(sink), 41 mNotify(notify), 42 mFlags(flags), 43 mNumFramesWritten(0), 44 mDrainAudioQueuePending(false), 45 mDrainVideoQueuePending(false), 46 mAudioQueueGeneration(0), 47 mVideoQueueGeneration(0), 48 mFirstAnchorTimeMediaUs(-1), 49 mAnchorTimeMediaUs(-1), 50 mAnchorTimeRealUs(-1), 51 mFlushingAudio(false), 52 mFlushingVideo(false), 53 mHasAudio(false), 54 mHasVideo(false), 55 mSyncQueues(false), 56 mPaused(false), 57 mVideoSampleReceived(false), 58 mVideoRenderingStarted(false), 59 mVideoRenderingStartGeneration(0), 60 mAudioRenderingStartGeneration(0), 61 mLastPositionUpdateUs(-1ll), 62 mVideoLateByUs(0ll) { 63} 64 65NuPlayer::Renderer::~Renderer() { 66 if (offloadingAudio()) { 67 mAudioSink->stop(); 68 mAudioSink->flush(); 69 mAudioSink->close(); 70 } 71} 72 73void NuPlayer::Renderer::queueBuffer( 74 bool audio, 75 const sp<ABuffer> &buffer, 76 const sp<AMessage> ¬ifyConsumed) { 77 sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id()); 78 msg->setInt32("audio", static_cast<int32_t>(audio)); 79 msg->setBuffer("buffer", buffer); 80 msg->setMessage("notifyConsumed", notifyConsumed); 81 msg->post(); 82} 83 84void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) { 85 CHECK_NE(finalResult, (status_t)OK); 86 87 sp<AMessage> msg = new AMessage(kWhatQueueEOS, id()); 88 msg->setInt32("audio", static_cast<int32_t>(audio)); 89 msg->setInt32("finalResult", finalResult); 90 msg->post(); 91} 92 93void NuPlayer::Renderer::flush(bool audio) { 94 { 95 Mutex::Autolock autoLock(mFlushLock); 96 if (audio) { 97 if (mFlushingAudio) { 98 return; 99 } 100 mFlushingAudio = true; 101 } else { 102 if (mFlushingVideo) { 103 return; 104 } 105 mFlushingVideo = true; 106 } 107 } 108 109 sp<AMessage> msg = new AMessage(kWhatFlush, id()); 110 msg->setInt32("audio", static_cast<int32_t>(audio)); 111 msg->post(); 112} 113 114void NuPlayer::Renderer::signalTimeDiscontinuity() { 115 Mutex::Autolock autoLock(mLock); 116 // CHECK(mAudioQueue.empty()); 117 // CHECK(mVideoQueue.empty()); 118 mFirstAnchorTimeMediaUs = -1; 119 mAnchorTimeMediaUs = -1; 120 mAnchorTimeRealUs = -1; 121 mSyncQueues = false; 122} 123 124void NuPlayer::Renderer::signalAudioSinkChanged() { 125 (new AMessage(kWhatAudioSinkChanged, id()))->post(); 126} 127 128void NuPlayer::Renderer::signalDisableOffloadAudio() { 129 (new AMessage(kWhatDisableOffloadAudio, id()))->post(); 130} 131 132void NuPlayer::Renderer::pause() { 133 (new AMessage(kWhatPause, id()))->post(); 134} 135 136void NuPlayer::Renderer::resume() { 137 (new AMessage(kWhatResume, id()))->post(); 138} 139 140void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { 141 switch (msg->what()) { 142 case kWhatStopAudioSink: 143 { 144 mAudioSink->stop(); 145 break; 146 } 147 148 case kWhatDrainAudioQueue: 149 { 150 int32_t generation; 151 CHECK(msg->findInt32("generation", &generation)); 152 if (generation != mAudioQueueGeneration) { 153 break; 154 } 155 156 mDrainAudioQueuePending = false; 157 158 if (onDrainAudioQueue()) { 159 uint32_t numFramesPlayed; 160 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), 161 (status_t)OK); 162 163 uint32_t numFramesPendingPlayout = 164 mNumFramesWritten - numFramesPlayed; 165 166 // This is how long the audio sink will have data to 167 // play back. 168 int64_t delayUs = 169 mAudioSink->msecsPerFrame() 170 * numFramesPendingPlayout * 1000ll; 171 172 // Let's give it more data after about half that time 173 // has elapsed. 174 // kWhatDrainAudioQueue is used for non-offloading mode, 175 // and mLock is used only for offloading mode. Therefore, 176 // no need to acquire mLock here. 177 postDrainAudioQueue_l(delayUs / 2); 178 } 179 break; 180 } 181 182 case kWhatDrainVideoQueue: 183 { 184 int32_t generation; 185 CHECK(msg->findInt32("generation", &generation)); 186 if (generation != mVideoQueueGeneration) { 187 break; 188 } 189 190 mDrainVideoQueuePending = false; 191 192 onDrainVideoQueue(); 193 194 postDrainVideoQueue(); 195 break; 196 } 197 198 case kWhatQueueBuffer: 199 { 200 onQueueBuffer(msg); 201 break; 202 } 203 204 case kWhatQueueEOS: 205 { 206 onQueueEOS(msg); 207 break; 208 } 209 210 case kWhatFlush: 211 { 212 onFlush(msg); 213 break; 214 } 215 216 case kWhatAudioSinkChanged: 217 { 218 onAudioSinkChanged(); 219 break; 220 } 221 222 case kWhatDisableOffloadAudio: 223 { 224 onDisableOffloadAudio(); 225 break; 226 } 227 228 case kWhatPause: 229 { 230 onPause(); 231 break; 232 } 233 234 case kWhatResume: 235 { 236 onResume(); 237 break; 238 } 239 240 case kWhatAudioOffloadTearDown: 241 { 242 onAudioOffloadTearDown(); 243 break; 244 } 245 246 default: 247 TRESPASS(); 248 break; 249 } 250} 251 252void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) { 253 if (mDrainAudioQueuePending || mSyncQueues || mPaused 254 || offloadingAudio()) { 255 return; 256 } 257 258 if (mAudioQueue.empty()) { 259 return; 260 } 261 262 mDrainAudioQueuePending = true; 263 sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id()); 264 msg->setInt32("generation", mAudioQueueGeneration); 265 msg->post(delayUs); 266} 267 268void NuPlayer::Renderer::prepareForMediaRenderingStart() { 269 mAudioRenderingStartGeneration = mAudioQueueGeneration; 270 mVideoRenderingStartGeneration = mVideoQueueGeneration; 271} 272 273void NuPlayer::Renderer::notifyIfMediaRenderingStarted() { 274 if (mVideoRenderingStartGeneration == mVideoQueueGeneration && 275 mAudioRenderingStartGeneration == mAudioQueueGeneration) { 276 mVideoRenderingStartGeneration = -1; 277 mAudioRenderingStartGeneration = -1; 278 279 sp<AMessage> notify = mNotify->dup(); 280 notify->setInt32("what", kWhatMediaRenderingStart); 281 notify->post(); 282 } 283} 284 285// static 286size_t NuPlayer::Renderer::AudioSinkCallback( 287 MediaPlayerBase::AudioSink * /* audioSink */, 288 void *buffer, 289 size_t size, 290 void *cookie, 291 MediaPlayerBase::AudioSink::cb_event_t event) { 292 NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie; 293 294 switch (event) { 295 case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER: 296 { 297 return me->fillAudioBuffer(buffer, size); 298 break; 299 } 300 301 case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END: 302 { 303 me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM); 304 break; 305 } 306 307 case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN: 308 { 309 me->notifyAudioOffloadTearDown(); 310 break; 311 } 312 } 313 314 return 0; 315} 316 317size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) { 318 Mutex::Autolock autoLock(mLock); 319 320 if (!offloadingAudio() || mPaused) { 321 return 0; 322 } 323 324 bool hasEOS = false; 325 326 size_t sizeCopied = 0; 327 bool firstEntry = true; 328 while (sizeCopied < size && !mAudioQueue.empty()) { 329 QueueEntry *entry = &*mAudioQueue.begin(); 330 331 if (entry->mBuffer == NULL) { // EOS 332 hasEOS = true; 333 mAudioQueue.erase(mAudioQueue.begin()); 334 entry = NULL; 335 break; 336 } 337 338 if (firstEntry && entry->mOffset == 0) { 339 firstEntry = false; 340 int64_t mediaTimeUs; 341 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 342 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 343 if (mFirstAnchorTimeMediaUs == -1) { 344 mFirstAnchorTimeMediaUs = mediaTimeUs; 345 } 346 347 int64_t nowUs = ALooper::GetNowUs(); 348 mAnchorTimeMediaUs = 349 mFirstAnchorTimeMediaUs + getPlayedOutAudioDurationUs(nowUs); 350 mAnchorTimeRealUs = nowUs; 351 352 notifyPosition(); 353 } 354 355 size_t copy = entry->mBuffer->size() - entry->mOffset; 356 size_t sizeRemaining = size - sizeCopied; 357 if (copy > sizeRemaining) { 358 copy = sizeRemaining; 359 } 360 361 memcpy((char *)buffer + sizeCopied, 362 entry->mBuffer->data() + entry->mOffset, 363 copy); 364 365 entry->mOffset += copy; 366 if (entry->mOffset == entry->mBuffer->size()) { 367 entry->mNotifyConsumed->post(); 368 mAudioQueue.erase(mAudioQueue.begin()); 369 entry = NULL; 370 } 371 sizeCopied += copy; 372 notifyIfMediaRenderingStarted(); 373 } 374 375 if (hasEOS) { 376 (new AMessage(kWhatStopAudioSink, id()))->post(); 377 } 378 379 return sizeCopied; 380} 381 382bool NuPlayer::Renderer::onDrainAudioQueue() { 383 uint32_t numFramesPlayed; 384 if (mAudioSink->getPosition(&numFramesPlayed) != OK) { 385 return false; 386 } 387 388 ssize_t numFramesAvailableToWrite = 389 mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed); 390 391#if 0 392 if (numFramesAvailableToWrite == mAudioSink->frameCount()) { 393 ALOGI("audio sink underrun"); 394 } else { 395 ALOGV("audio queue has %d frames left to play", 396 mAudioSink->frameCount() - numFramesAvailableToWrite); 397 } 398#endif 399 400 size_t numBytesAvailableToWrite = 401 numFramesAvailableToWrite * mAudioSink->frameSize(); 402 403 while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) { 404 QueueEntry *entry = &*mAudioQueue.begin(); 405 406 if (entry->mBuffer == NULL) { 407 // EOS 408 int64_t postEOSDelayUs = 0; 409 if (mAudioSink->needsTrailingPadding()) { 410 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs()); 411 } 412 notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs); 413 414 mAudioQueue.erase(mAudioQueue.begin()); 415 entry = NULL; 416 return false; 417 } 418 419 if (entry->mOffset == 0) { 420 int64_t mediaTimeUs; 421 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 422 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 423 if (mFirstAnchorTimeMediaUs == -1) { 424 mFirstAnchorTimeMediaUs = mediaTimeUs; 425 } 426 mAnchorTimeMediaUs = mediaTimeUs; 427 428 int64_t nowUs = ALooper::GetNowUs(); 429 mAnchorTimeRealUs = nowUs + getPendingAudioPlayoutDurationUs(nowUs); 430 431 notifyPosition(); 432 } 433 434 size_t copy = entry->mBuffer->size() - entry->mOffset; 435 if (copy > numBytesAvailableToWrite) { 436 copy = numBytesAvailableToWrite; 437 } 438 439 ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy); 440 if (written < 0) { 441 // An error in AudioSink write is fatal here. 442 LOG_ALWAYS_FATAL("AudioSink write error(%zd) when writing %zu bytes", written, copy); 443 } 444 445 entry->mOffset += written; 446 if (entry->mOffset == entry->mBuffer->size()) { 447 entry->mNotifyConsumed->post(); 448 mAudioQueue.erase(mAudioQueue.begin()); 449 450 entry = NULL; 451 } 452 453 numBytesAvailableToWrite -= written; 454 size_t copiedFrames = written / mAudioSink->frameSize(); 455 mNumFramesWritten += copiedFrames; 456 457 notifyIfMediaRenderingStarted(); 458 459 if (written != (ssize_t)copy) { 460 // A short count was received from AudioSink::write() 461 // 462 // AudioSink write should block until exactly the number of bytes are delivered. 463 // But it may return with a short count (without an error) when: 464 // 465 // 1) Size to be copied is not a multiple of the frame size. We consider this fatal. 466 // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded. 467 468 // (Case 1) 469 // Must be a multiple of the frame size. If it is not a multiple of a frame size, it 470 // needs to fail, as we should not carry over fractional frames between calls. 471 CHECK_EQ(copy % mAudioSink->frameSize(), 0); 472 473 // (Case 2) 474 // Return early to the caller. 475 // Beware of calling immediately again as this may busy-loop if you are not careful. 476 ALOGW("AudioSink write short frame count %zd < %zu", written, copy); 477 break; 478 } 479 } 480 return !mAudioQueue.empty(); 481} 482 483int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) { 484 int64_t writtenAudioDurationUs = 485 mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame(); 486 return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs); 487} 488 489void NuPlayer::Renderer::postDrainVideoQueue() { 490 if (mDrainVideoQueuePending 491 || mSyncQueues 492 || (mPaused && mVideoSampleReceived)) { 493 return; 494 } 495 496 if (mVideoQueue.empty()) { 497 return; 498 } 499 500 QueueEntry &entry = *mVideoQueue.begin(); 501 502 sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id()); 503 msg->setInt32("generation", mVideoQueueGeneration); 504 505 int64_t delayUs; 506 507 if (entry.mBuffer == NULL) { 508 // EOS doesn't carry a timestamp. 509 delayUs = 0; 510 } else if (mFlags & FLAG_REAL_TIME) { 511 int64_t mediaTimeUs; 512 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 513 514 delayUs = mediaTimeUs - ALooper::GetNowUs(); 515 } else { 516 int64_t mediaTimeUs; 517 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 518 519 if (mFirstAnchorTimeMediaUs == -1 && !mHasAudio) { 520 mFirstAnchorTimeMediaUs = mediaTimeUs; 521 } 522 if (mAnchorTimeMediaUs < 0) { 523 delayUs = 0; 524 525 if (!mHasAudio) { 526 mAnchorTimeMediaUs = mediaTimeUs; 527 mAnchorTimeRealUs = ALooper::GetNowUs(); 528 notifyPosition(); 529 } 530 } else { 531 int64_t realTimeUs = 532 (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs; 533 534 delayUs = realTimeUs - ALooper::GetNowUs(); 535 } 536 } 537 538 ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs); 539 msg->post(delayUs); 540 541 mDrainVideoQueuePending = true; 542} 543 544void NuPlayer::Renderer::onDrainVideoQueue() { 545 if (mVideoQueue.empty()) { 546 return; 547 } 548 549 QueueEntry *entry = &*mVideoQueue.begin(); 550 551 if (entry->mBuffer == NULL) { 552 // EOS 553 554 notifyEOS(false /* audio */, entry->mFinalResult); 555 556 mVideoQueue.erase(mVideoQueue.begin()); 557 entry = NULL; 558 559 mVideoLateByUs = 0ll; 560 return; 561 } 562 563 int64_t realTimeUs; 564 if (mFlags & FLAG_REAL_TIME) { 565 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs)); 566 } else { 567 int64_t mediaTimeUs; 568 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 569 570 realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs; 571 } 572 573 bool tooLate = false; 574 575 if (!mPaused) { 576 mVideoLateByUs = ALooper::GetNowUs() - realTimeUs; 577 tooLate = (mVideoLateByUs > 40000); 578 579 if (tooLate) { 580 ALOGV("video late by %lld us (%.2f secs)", 581 mVideoLateByUs, mVideoLateByUs / 1E6); 582 } else { 583 ALOGV("rendering video at media time %.2f secs", 584 (mFlags & FLAG_REAL_TIME ? realTimeUs : 585 (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6); 586 } 587 } else { 588 mVideoLateByUs = 0ll; 589 } 590 591 entry->mNotifyConsumed->setInt32("render", !tooLate); 592 entry->mNotifyConsumed->post(); 593 mVideoQueue.erase(mVideoQueue.begin()); 594 entry = NULL; 595 596 mVideoSampleReceived = true; 597 598 if (!mPaused) { 599 if (!mVideoRenderingStarted) { 600 mVideoRenderingStarted = true; 601 notifyVideoRenderingStart(); 602 } 603 notifyIfMediaRenderingStarted(); 604 } 605} 606 607void NuPlayer::Renderer::notifyVideoRenderingStart() { 608 sp<AMessage> notify = mNotify->dup(); 609 notify->setInt32("what", kWhatVideoRenderingStart); 610 notify->post(); 611} 612 613void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) { 614 sp<AMessage> notify = mNotify->dup(); 615 notify->setInt32("what", kWhatEOS); 616 notify->setInt32("audio", static_cast<int32_t>(audio)); 617 notify->setInt32("finalResult", finalResult); 618 notify->post(delayUs); 619} 620 621void NuPlayer::Renderer::notifyAudioOffloadTearDown() { 622 (new AMessage(kWhatAudioOffloadTearDown, id()))->post(); 623} 624 625void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) { 626 int32_t audio; 627 CHECK(msg->findInt32("audio", &audio)); 628 629 if (audio) { 630 mHasAudio = true; 631 } else { 632 mHasVideo = true; 633 } 634 635 if (dropBufferWhileFlushing(audio, msg)) { 636 return; 637 } 638 639 sp<ABuffer> buffer; 640 CHECK(msg->findBuffer("buffer", &buffer)); 641 642 sp<AMessage> notifyConsumed; 643 CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed)); 644 645 QueueEntry entry; 646 entry.mBuffer = buffer; 647 entry.mNotifyConsumed = notifyConsumed; 648 entry.mOffset = 0; 649 entry.mFinalResult = OK; 650 651 if (audio) { 652 Mutex::Autolock autoLock(mLock); 653 mAudioQueue.push_back(entry); 654 postDrainAudioQueue_l(); 655 } else { 656 mVideoQueue.push_back(entry); 657 postDrainVideoQueue(); 658 } 659 660 Mutex::Autolock autoLock(mLock); 661 if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) { 662 return; 663 } 664 665 sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer; 666 sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer; 667 668 if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) { 669 // EOS signalled on either queue. 670 syncQueuesDone_l(); 671 return; 672 } 673 674 int64_t firstAudioTimeUs; 675 int64_t firstVideoTimeUs; 676 CHECK(firstAudioBuffer->meta() 677 ->findInt64("timeUs", &firstAudioTimeUs)); 678 CHECK(firstVideoBuffer->meta() 679 ->findInt64("timeUs", &firstVideoTimeUs)); 680 681 int64_t diff = firstVideoTimeUs - firstAudioTimeUs; 682 683 ALOGV("queueDiff = %.2f secs", diff / 1E6); 684 685 if (diff > 100000ll) { 686 // Audio data starts More than 0.1 secs before video. 687 // Drop some audio. 688 689 (*mAudioQueue.begin()).mNotifyConsumed->post(); 690 mAudioQueue.erase(mAudioQueue.begin()); 691 return; 692 } 693 694 syncQueuesDone_l(); 695} 696 697void NuPlayer::Renderer::syncQueuesDone_l() { 698 if (!mSyncQueues) { 699 return; 700 } 701 702 mSyncQueues = false; 703 704 if (!mAudioQueue.empty()) { 705 postDrainAudioQueue_l(); 706 } 707 708 if (!mVideoQueue.empty()) { 709 postDrainVideoQueue(); 710 } 711} 712 713void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) { 714 int32_t audio; 715 CHECK(msg->findInt32("audio", &audio)); 716 717 if (dropBufferWhileFlushing(audio, msg)) { 718 return; 719 } 720 721 int32_t finalResult; 722 CHECK(msg->findInt32("finalResult", &finalResult)); 723 724 QueueEntry entry; 725 entry.mOffset = 0; 726 entry.mFinalResult = finalResult; 727 728 if (audio) { 729 Mutex::Autolock autoLock(mLock); 730 if (mAudioQueue.empty() && mSyncQueues) { 731 syncQueuesDone_l(); 732 } 733 mAudioQueue.push_back(entry); 734 postDrainAudioQueue_l(); 735 } else { 736 if (mVideoQueue.empty() && mSyncQueues) { 737 Mutex::Autolock autoLock(mLock); 738 syncQueuesDone_l(); 739 } 740 mVideoQueue.push_back(entry); 741 postDrainVideoQueue(); 742 } 743} 744 745void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { 746 int32_t audio; 747 CHECK(msg->findInt32("audio", &audio)); 748 749 { 750 Mutex::Autolock autoLock(mFlushLock); 751 if (audio) { 752 mFlushingAudio = false; 753 } else { 754 mFlushingVideo = false; 755 } 756 } 757 758 // If we're currently syncing the queues, i.e. dropping audio while 759 // aligning the first audio/video buffer times and only one of the 760 // two queues has data, we may starve that queue by not requesting 761 // more buffers from the decoder. If the other source then encounters 762 // a discontinuity that leads to flushing, we'll never find the 763 // corresponding discontinuity on the other queue. 764 // Therefore we'll stop syncing the queues if at least one of them 765 // is flushed. 766 { 767 Mutex::Autolock autoLock(mLock); 768 syncQueuesDone_l(); 769 } 770 771 ALOGV("flushing %s", audio ? "audio" : "video"); 772 if (audio) { 773 { 774 Mutex::Autolock autoLock(mLock); 775 flushQueue(&mAudioQueue); 776 777 ++mAudioQueueGeneration; 778 prepareForMediaRenderingStart(); 779 780 if (offloadingAudio()) { 781 mFirstAnchorTimeMediaUs = -1; 782 } 783 } 784 785 mDrainAudioQueuePending = false; 786 787 if (offloadingAudio()) { 788 mAudioSink->pause(); 789 mAudioSink->flush(); 790 mAudioSink->start(); 791 } 792 } else { 793 flushQueue(&mVideoQueue); 794 795 mDrainVideoQueuePending = false; 796 ++mVideoQueueGeneration; 797 798 prepareForMediaRenderingStart(); 799 } 800 801 mVideoSampleReceived = false; 802 notifyFlushComplete(audio); 803} 804 805void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) { 806 while (!queue->empty()) { 807 QueueEntry *entry = &*queue->begin(); 808 809 if (entry->mBuffer != NULL) { 810 entry->mNotifyConsumed->post(); 811 } 812 813 queue->erase(queue->begin()); 814 entry = NULL; 815 } 816} 817 818void NuPlayer::Renderer::notifyFlushComplete(bool audio) { 819 sp<AMessage> notify = mNotify->dup(); 820 notify->setInt32("what", kWhatFlushComplete); 821 notify->setInt32("audio", static_cast<int32_t>(audio)); 822 notify->post(); 823} 824 825bool NuPlayer::Renderer::dropBufferWhileFlushing( 826 bool audio, const sp<AMessage> &msg) { 827 bool flushing = false; 828 829 { 830 Mutex::Autolock autoLock(mFlushLock); 831 if (audio) { 832 flushing = mFlushingAudio; 833 } else { 834 flushing = mFlushingVideo; 835 } 836 } 837 838 if (!flushing) { 839 return false; 840 } 841 842 sp<AMessage> notifyConsumed; 843 if (msg->findMessage("notifyConsumed", ¬ifyConsumed)) { 844 notifyConsumed->post(); 845 } 846 847 return true; 848} 849 850void NuPlayer::Renderer::onAudioSinkChanged() { 851 if (offloadingAudio()) { 852 return; 853 } 854 CHECK(!mDrainAudioQueuePending); 855 mNumFramesWritten = 0; 856 uint32_t written; 857 if (mAudioSink->getFramesWritten(&written) == OK) { 858 mNumFramesWritten = written; 859 } 860} 861 862void NuPlayer::Renderer::onDisableOffloadAudio() { 863 Mutex::Autolock autoLock(mLock); 864 mFlags &= ~FLAG_OFFLOAD_AUDIO; 865 ++mAudioQueueGeneration; 866} 867 868void NuPlayer::Renderer::notifyPosition() { 869 // notifyPosition() must be called only after setting mAnchorTimeRealUs 870 // and mAnchorTimeMediaUs, and must not be paused as it extrapolates position. 871 //CHECK_GE(mAnchorTimeRealUs, 0); 872 //CHECK_GE(mAnchorTimeMediaUs, 0); 873 //CHECK(!mPaused || !mHasAudio); // video-only does display in paused mode. 874 875 int64_t nowUs = ALooper::GetNowUs(); 876 877 if (mLastPositionUpdateUs >= 0 878 && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) { 879 return; 880 } 881 mLastPositionUpdateUs = nowUs; 882 883 int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs; 884 885 //ALOGD("notifyPosition: positionUs(%lld) nowUs(%lld) mAnchorTimeRealUs(%lld)" 886 // " mAnchorTimeMediaUs(%lld) mFirstAnchorTimeMediaUs(%lld)", 887 // (long long)positionUs, (long long)nowUs, (long long)mAnchorTimeRealUs, 888 // (long long)mAnchorTimeMediaUs, (long long)mFirstAnchorTimeMediaUs); 889 890 // Due to adding the latency to mAnchorTimeRealUs in onDrainAudioQueue(), 891 // positionUs may be less than the first media time. This is avoided 892 // here to prevent potential retrograde motion of the position bar 893 // when starting up after a seek. 894 if (positionUs < mFirstAnchorTimeMediaUs) { 895 positionUs = mFirstAnchorTimeMediaUs; 896 } 897 sp<AMessage> notify = mNotify->dup(); 898 notify->setInt32("what", kWhatPosition); 899 notify->setInt64("positionUs", positionUs); 900 notify->setInt64("videoLateByUs", mVideoLateByUs); 901 notify->post(); 902} 903 904void NuPlayer::Renderer::onPause() { 905 if (mPaused) { 906 ALOGW("Renderer::onPause() called while already paused!"); 907 return; 908 } 909 { 910 Mutex::Autolock autoLock(mLock); 911 ++mAudioQueueGeneration; 912 ++mVideoQueueGeneration; 913 prepareForMediaRenderingStart(); 914 mPaused = true; 915 } 916 917 mDrainAudioQueuePending = false; 918 mDrainVideoQueuePending = false; 919 920 if (mHasAudio) { 921 mAudioSink->pause(); 922 } 923 924 ALOGV("now paused audio queue has %d entries, video has %d entries", 925 mAudioQueue.size(), mVideoQueue.size()); 926} 927 928void NuPlayer::Renderer::onResume() { 929 if (!mPaused) { 930 return; 931 } 932 933 if (mHasAudio) { 934 mAudioSink->start(); 935 } 936 937 Mutex::Autolock autoLock(mLock); 938 mPaused = false; 939 940 if (!mAudioQueue.empty()) { 941 postDrainAudioQueue_l(); 942 } 943 944 if (!mVideoQueue.empty()) { 945 postDrainVideoQueue(); 946 } 947} 948 949// TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs() 950// as it acquires locks and may query the audio driver. 951// 952// Some calls are not needed since notifyPosition() doesn't always deliver a message. 953// Some calls could conceivably retrieve extrapolated data instead of 954// accessing getTimestamp() or getPosition() every time a data buffer with 955// a media time is received. 956// 957int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) { 958 uint32_t numFramesPlayed; 959 int64_t numFramesPlayedAt; 960 AudioTimestamp ts; 961 static const int64_t kStaleTimestamp100ms = 100000; 962 963 status_t res = mAudioSink->getTimestamp(ts); 964 if (res == OK) { // case 1: mixing audio tracks and offloaded tracks. 965 numFramesPlayed = ts.mPosition; 966 numFramesPlayedAt = 967 ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000; 968 const int64_t timestampAge = nowUs - numFramesPlayedAt; 969 if (timestampAge > kStaleTimestamp100ms) { 970 // This is an audio FIXME. 971 // getTimestamp returns a timestamp which may come from audio mixing threads. 972 // After pausing, the MixerThread may go idle, thus the mTime estimate may 973 // become stale. Assuming that the MixerThread runs 20ms, with FastMixer at 5ms, 974 // the max latency should be about 25ms with an average around 12ms (to be verified). 975 // For safety we use 100ms. 976 ALOGW("getTimestamp: returned stale timestamp nowUs(%lld) numFramesPlayedAt(%lld)", 977 (long long)nowUs, (long long)numFramesPlayedAt); 978 numFramesPlayedAt = nowUs - kStaleTimestamp100ms; 979 } 980 //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAt); 981 } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track 982 numFramesPlayed = 0; 983 numFramesPlayedAt = nowUs; 984 //ALOGD("getTimestamp: WOULD_BLOCK %d %lld", 985 // numFramesPlayed, (long long)numFramesPlayedAt); 986 } else { // case 3: transitory at new track or audio fast tracks. 987 res = mAudioSink->getPosition(&numFramesPlayed); 988 CHECK_EQ(res, (status_t)OK); 989 numFramesPlayedAt = nowUs; 990 numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */ 991 //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt); 992 } 993 994 // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours. 995 //CHECK_EQ(numFramesPlayed & (1 << 31), 0); // can't be negative until 12.4 hrs, test 996 int64_t durationUs = (int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame() 997 + nowUs - numFramesPlayedAt; 998 if (durationUs < 0) { 999 // Occurs when numFramesPlayed position is very small and the following: 1000 // (1) In case 1, the time nowUs is computed before getTimestamp() is called and 1001 // numFramesPlayedAt is greater than nowUs by time more than numFramesPlayed. 1002 // (2) In case 3, using getPosition and adding mAudioSink->latency() to 1003 // numFramesPlayedAt, by a time amount greater than numFramesPlayed. 1004 // 1005 // Both of these are transitory conditions. 1006 ALOGW("getPlayedOutAudioDurationUs: negative timestamp %lld set to zero", (long long)durationUs); 1007 durationUs = 0; 1008 } 1009 ALOGV("getPlayedOutAudioDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)", 1010 (long long)durationUs, (long long)nowUs, numFramesPlayed, (long long)numFramesPlayedAt); 1011 return durationUs; 1012} 1013 1014void NuPlayer::Renderer::onAudioOffloadTearDown() { 1015 int64_t firstAudioTimeUs; 1016 { 1017 Mutex::Autolock autoLock(mLock); 1018 firstAudioTimeUs = mFirstAnchorTimeMediaUs; 1019 } 1020 1021 int64_t currentPositionUs = 1022 firstAudioTimeUs + getPlayedOutAudioDurationUs(ALooper::GetNowUs()); 1023 1024 mAudioSink->stop(); 1025 mAudioSink->flush(); 1026 1027 sp<AMessage> notify = mNotify->dup(); 1028 notify->setInt32("what", kWhatAudioOffloadTearDown); 1029 notify->setInt64("positionUs", currentPositionUs); 1030 notify->post(); 1031} 1032 1033} // namespace android 1034 1035