NuPlayerRenderer.cpp revision a31335a4ec96ba351f25f3b26fa79a78c2723a13
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "NuPlayerRenderer" 19#include <utils/Log.h> 20 21#include "NuPlayerRenderer.h" 22 23#include <media/stagefright/foundation/ABuffer.h> 24#include <media/stagefright/foundation/ADebug.h> 25#include <media/stagefright/foundation/AMessage.h> 26#include <media/stagefright/MediaErrors.h> 27#include <media/stagefright/MetaData.h> 28 29#include <inttypes.h> 30 31namespace android { 32 33// static 34const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll; 35 36NuPlayer::Renderer::Renderer( 37 const sp<MediaPlayerBase::AudioSink> &sink, 38 const sp<AMessage> ¬ify, 39 uint32_t flags) 40 : mAudioSink(sink), 41 mNotify(notify), 42 mFlags(flags), 43 mNumFramesWritten(0), 44 mDrainAudioQueuePending(false), 45 mDrainVideoQueuePending(false), 46 mAudioQueueGeneration(0), 47 mVideoQueueGeneration(0), 48 mFirstAudioTimeUs(-1), 49 mAnchorTimeMediaUs(-1), 50 mAnchorTimeRealUs(-1), 51 mFlushingAudio(false), 52 mFlushingVideo(false), 53 mHasAudio(false), 54 mHasVideo(false), 55 mSyncQueues(false), 56 mPaused(false), 57 mVideoRenderingStarted(false), 58 mVideoRenderingStartGeneration(0), 59 mAudioRenderingStartGeneration(0), 60 mLastPositionUpdateUs(-1ll), 61 mVideoLateByUs(0ll) { 62} 63 64NuPlayer::Renderer::~Renderer() { 65 if (offloadingAudio()) { 66 mAudioSink->stop(); 67 mAudioSink->flush(); 68 mAudioSink->close(); 69 } 70} 71 72void NuPlayer::Renderer::queueBuffer( 73 bool audio, 74 const sp<ABuffer> &buffer, 75 const sp<AMessage> ¬ifyConsumed) { 76 sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id()); 77 msg->setInt32("audio", static_cast<int32_t>(audio)); 78 msg->setBuffer("buffer", buffer); 79 msg->setMessage("notifyConsumed", notifyConsumed); 80 msg->post(); 81} 82 83void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) { 84 CHECK_NE(finalResult, (status_t)OK); 85 86 sp<AMessage> msg = new AMessage(kWhatQueueEOS, id()); 87 msg->setInt32("audio", static_cast<int32_t>(audio)); 88 msg->setInt32("finalResult", finalResult); 89 msg->post(); 90} 91 92void NuPlayer::Renderer::flush(bool audio) { 93 { 94 Mutex::Autolock autoLock(mFlushLock); 95 if (audio) { 96 CHECK(!mFlushingAudio); 97 mFlushingAudio = true; 98 } else { 99 CHECK(!mFlushingVideo); 100 mFlushingVideo = true; 101 } 102 } 103 104 sp<AMessage> msg = new AMessage(kWhatFlush, id()); 105 msg->setInt32("audio", static_cast<int32_t>(audio)); 106 msg->post(); 107} 108 109void NuPlayer::Renderer::signalTimeDiscontinuity() { 110 Mutex::Autolock autoLock(mLock); 111 // CHECK(mAudioQueue.empty()); 112 // CHECK(mVideoQueue.empty()); 113 mAnchorTimeMediaUs = -1; 114 mAnchorTimeRealUs = -1; 115 mSyncQueues = false; 116} 117 118void NuPlayer::Renderer::pause() { 119 (new AMessage(kWhatPause, id()))->post(); 120} 121 122void NuPlayer::Renderer::resume() { 123 (new AMessage(kWhatResume, id()))->post(); 124} 125 126void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { 127 switch (msg->what()) { 128 case kWhatStopAudioSink: 129 { 130 mAudioSink->stop(); 131 break; 132 } 133 134 case kWhatDrainAudioQueue: 135 { 136 int32_t generation; 137 CHECK(msg->findInt32("generation", &generation)); 138 if (generation != mAudioQueueGeneration) { 139 break; 140 } 141 142 mDrainAudioQueuePending = false; 143 144 if (onDrainAudioQueue()) { 145 uint32_t numFramesPlayed; 146 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), 147 (status_t)OK); 148 149 uint32_t numFramesPendingPlayout = 150 mNumFramesWritten - numFramesPlayed; 151 152 // This is how long the audio sink will have data to 153 // play back. 154 int64_t delayUs = 155 mAudioSink->msecsPerFrame() 156 * numFramesPendingPlayout * 1000ll; 157 158 // Let's give it more data after about half that time 159 // has elapsed. 160 // kWhatDrainAudioQueue is used for non-offloading mode, 161 // and mLock is used only for offloading mode. Therefore, 162 // no need to acquire mLock here. 163 postDrainAudioQueue_l(delayUs / 2); 164 } 165 break; 166 } 167 168 case kWhatDrainVideoQueue: 169 { 170 int32_t generation; 171 CHECK(msg->findInt32("generation", &generation)); 172 if (generation != mVideoQueueGeneration) { 173 break; 174 } 175 176 mDrainVideoQueuePending = false; 177 178 onDrainVideoQueue(); 179 180 postDrainVideoQueue(); 181 break; 182 } 183 184 case kWhatQueueBuffer: 185 { 186 onQueueBuffer(msg); 187 break; 188 } 189 190 case kWhatQueueEOS: 191 { 192 onQueueEOS(msg); 193 break; 194 } 195 196 case kWhatFlush: 197 { 198 onFlush(msg); 199 break; 200 } 201 202 case kWhatAudioSinkChanged: 203 { 204 onAudioSinkChanged(); 205 break; 206 } 207 208 case kWhatDisableOffloadAudio: 209 { 210 onDisableOffloadAudio(); 211 break; 212 } 213 214 case kWhatPause: 215 { 216 onPause(); 217 break; 218 } 219 220 case kWhatResume: 221 { 222 onResume(); 223 break; 224 } 225 226 case kWhatAudioOffloadTearDown: 227 { 228 onAudioOffloadTearDown(); 229 break; 230 } 231 232 default: 233 TRESPASS(); 234 break; 235 } 236} 237 238void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) { 239 if (mDrainAudioQueuePending || mSyncQueues || mPaused 240 || offloadingAudio()) { 241 return; 242 } 243 244 if (mAudioQueue.empty()) { 245 return; 246 } 247 248 mDrainAudioQueuePending = true; 249 sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id()); 250 msg->setInt32("generation", mAudioQueueGeneration); 251 msg->post(delayUs); 252} 253 254void NuPlayer::Renderer::signalAudioSinkChanged() { 255 (new AMessage(kWhatAudioSinkChanged, id()))->post(); 256} 257 258void NuPlayer::Renderer::signalDisableOffloadAudio() { 259 (new AMessage(kWhatDisableOffloadAudio, id()))->post(); 260} 261 262void NuPlayer::Renderer::prepareForMediaRenderingStart() { 263 mAudioRenderingStartGeneration = mAudioQueueGeneration; 264 mVideoRenderingStartGeneration = mVideoQueueGeneration; 265} 266 267void NuPlayer::Renderer::notifyIfMediaRenderingStarted() { 268 if (mVideoRenderingStartGeneration == mVideoQueueGeneration && 269 mAudioRenderingStartGeneration == mAudioQueueGeneration) { 270 mVideoRenderingStartGeneration = -1; 271 mAudioRenderingStartGeneration = -1; 272 273 sp<AMessage> notify = mNotify->dup(); 274 notify->setInt32("what", kWhatMediaRenderingStart); 275 notify->post(); 276 } 277} 278 279// static 280size_t NuPlayer::Renderer::AudioSinkCallback( 281 MediaPlayerBase::AudioSink * /* audioSink */, 282 void *buffer, 283 size_t size, 284 void *cookie, 285 MediaPlayerBase::AudioSink::cb_event_t event) { 286 NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie; 287 288 switch (event) { 289 case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER: 290 { 291 return me->fillAudioBuffer(buffer, size); 292 break; 293 } 294 295 case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END: 296 { 297 me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM); 298 break; 299 } 300 301 case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN: 302 { 303 me->notifyAudioOffloadTearDown(); 304 break; 305 } 306 } 307 308 return 0; 309} 310 311size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) { 312 Mutex::Autolock autoLock(mLock); 313 314 if (!offloadingAudio()) { 315 return 0; 316 } 317 318 bool hasEOS = false; 319 320 size_t sizeCopied = 0; 321 bool firstEntry = true; 322 while (sizeCopied < size && !mAudioQueue.empty()) { 323 QueueEntry *entry = &*mAudioQueue.begin(); 324 325 if (entry->mBuffer == NULL) { // EOS 326 hasEOS = true; 327 mAudioQueue.erase(mAudioQueue.begin()); 328 entry = NULL; 329 break; 330 } 331 332 if (firstEntry && entry->mOffset == 0) { 333 firstEntry = false; 334 int64_t mediaTimeUs; 335 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 336 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 337 if (mFirstAudioTimeUs == -1) { 338 mFirstAudioTimeUs = mediaTimeUs; 339 } 340 341 uint32_t numFramesPlayed; 342 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); 343 344 // TODO: figure out how to calculate initial latency. 345 // Otherwise, the initial time is not correct till the first sample 346 // is played. 347 mAnchorTimeMediaUs = mFirstAudioTimeUs 348 + (numFramesPlayed * mAudioSink->msecsPerFrame()) * 1000ll; 349 mAnchorTimeRealUs = ALooper::GetNowUs(); 350 } 351 352 size_t copy = entry->mBuffer->size() - entry->mOffset; 353 size_t sizeRemaining = size - sizeCopied; 354 if (copy > sizeRemaining) { 355 copy = sizeRemaining; 356 } 357 358 memcpy((char *)buffer + sizeCopied, 359 entry->mBuffer->data() + entry->mOffset, 360 copy); 361 362 entry->mOffset += copy; 363 if (entry->mOffset == entry->mBuffer->size()) { 364 entry->mNotifyConsumed->post(); 365 mAudioQueue.erase(mAudioQueue.begin()); 366 entry = NULL; 367 } 368 sizeCopied += copy; 369 notifyIfMediaRenderingStarted(); 370 } 371 372 if (sizeCopied != 0) { 373 notifyPosition(); 374 } 375 376 if (hasEOS) { 377 (new AMessage(kWhatStopAudioSink, id()))->post(); 378 } 379 380 return sizeCopied; 381} 382 383bool NuPlayer::Renderer::onDrainAudioQueue() { 384 uint32_t numFramesPlayed; 385 if (mAudioSink->getPosition(&numFramesPlayed) != OK) { 386 return false; 387 } 388 389 ssize_t numFramesAvailableToWrite = 390 mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed); 391 392#if 0 393 if (numFramesAvailableToWrite == mAudioSink->frameCount()) { 394 ALOGI("audio sink underrun"); 395 } else { 396 ALOGV("audio queue has %d frames left to play", 397 mAudioSink->frameCount() - numFramesAvailableToWrite); 398 } 399#endif 400 401 size_t numBytesAvailableToWrite = 402 numFramesAvailableToWrite * mAudioSink->frameSize(); 403 404 while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) { 405 QueueEntry *entry = &*mAudioQueue.begin(); 406 407 if (entry->mBuffer == NULL) { 408 // EOS 409 410 notifyEOS(true /* audio */, entry->mFinalResult); 411 412 mAudioQueue.erase(mAudioQueue.begin()); 413 entry = NULL; 414 return false; 415 } 416 417 if (entry->mOffset == 0) { 418 int64_t mediaTimeUs; 419 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 420 421 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 422 423 mAnchorTimeMediaUs = mediaTimeUs; 424 425 uint32_t numFramesPlayed; 426 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); 427 428 uint32_t numFramesPendingPlayout = 429 mNumFramesWritten - numFramesPlayed; 430 431 int64_t realTimeOffsetUs = 432 (mAudioSink->latency() / 2 /* XXX */ 433 + numFramesPendingPlayout 434 * mAudioSink->msecsPerFrame()) * 1000ll; 435 436 // ALOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs); 437 438 mAnchorTimeRealUs = 439 ALooper::GetNowUs() + realTimeOffsetUs; 440 } 441 442 size_t copy = entry->mBuffer->size() - entry->mOffset; 443 if (copy > numBytesAvailableToWrite) { 444 copy = numBytesAvailableToWrite; 445 } 446 447 ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy); 448 if (written < 0) { 449 // An error in AudioSink write is fatal here. 450 LOG_ALWAYS_FATAL("AudioSink write error(%zd) when writing %zu bytes", written, copy); 451 } 452 453 entry->mOffset += written; 454 if (entry->mOffset == entry->mBuffer->size()) { 455 entry->mNotifyConsumed->post(); 456 mAudioQueue.erase(mAudioQueue.begin()); 457 458 entry = NULL; 459 } 460 461 numBytesAvailableToWrite -= written; 462 size_t copiedFrames = written / mAudioSink->frameSize(); 463 mNumFramesWritten += copiedFrames; 464 465 notifyIfMediaRenderingStarted(); 466 467 if (written != (ssize_t)copy) { 468 // A short count was received from AudioSink::write() 469 // 470 // AudioSink write should block until exactly the number of bytes are delivered. 471 // But it may return with a short count (without an error) when: 472 // 473 // 1) Size to be copied is not a multiple of the frame size. We consider this fatal. 474 // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded. 475 476 // (Case 1) 477 // Must be a multiple of the frame size. If it is not a multiple of a frame size, it 478 // needs to fail, as we should not carry over fractional frames between calls. 479 CHECK_EQ(copy % mAudioSink->frameSize(), 0); 480 481 // (Case 2) 482 // Return early to the caller. 483 // Beware of calling immediately again as this may busy-loop if you are not careful. 484 ALOGW("AudioSink write short frame count %zd < %zu", written, copy); 485 break; 486 } 487 } 488 notifyPosition(); 489 490 return !mAudioQueue.empty(); 491} 492 493void NuPlayer::Renderer::postDrainVideoQueue() { 494 if (mDrainVideoQueuePending || mSyncQueues || mPaused) { 495 return; 496 } 497 498 if (mVideoQueue.empty()) { 499 return; 500 } 501 502 QueueEntry &entry = *mVideoQueue.begin(); 503 504 sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id()); 505 msg->setInt32("generation", mVideoQueueGeneration); 506 507 int64_t delayUs; 508 509 if (entry.mBuffer == NULL) { 510 // EOS doesn't carry a timestamp. 511 delayUs = 0; 512 } else if (mFlags & FLAG_REAL_TIME) { 513 int64_t mediaTimeUs; 514 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 515 516 delayUs = mediaTimeUs - ALooper::GetNowUs(); 517 } else { 518 int64_t mediaTimeUs; 519 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 520 521 if (mAnchorTimeMediaUs < 0) { 522 delayUs = 0; 523 524 if (!mHasAudio) { 525 mAnchorTimeMediaUs = mediaTimeUs; 526 mAnchorTimeRealUs = ALooper::GetNowUs(); 527 } 528 } else { 529 int64_t realTimeUs = 530 (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs; 531 532 delayUs = realTimeUs - ALooper::GetNowUs(); 533 } 534 } 535 536 ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs); 537 msg->post(delayUs); 538 539 mDrainVideoQueuePending = true; 540} 541 542void NuPlayer::Renderer::onDrainVideoQueue() { 543 if (mVideoQueue.empty()) { 544 return; 545 } 546 547 QueueEntry *entry = &*mVideoQueue.begin(); 548 549 if (entry->mBuffer == NULL) { 550 // EOS 551 552 notifyEOS(false /* audio */, entry->mFinalResult); 553 554 mVideoQueue.erase(mVideoQueue.begin()); 555 entry = NULL; 556 557 mVideoLateByUs = 0ll; 558 559 notifyPosition(); 560 return; 561 } 562 563 int64_t realTimeUs; 564 if (mFlags & FLAG_REAL_TIME) { 565 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs)); 566 } else { 567 int64_t mediaTimeUs; 568 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 569 570 realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs; 571 } 572 573 mVideoLateByUs = ALooper::GetNowUs() - realTimeUs; 574 bool tooLate = (mVideoLateByUs > 40000); 575 576 if (tooLate) { 577 ALOGV("video late by %lld us (%.2f secs)", 578 mVideoLateByUs, mVideoLateByUs / 1E6); 579 } else { 580 ALOGV("rendering video at media time %.2f secs", 581 (mFlags & FLAG_REAL_TIME ? realTimeUs : 582 (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6); 583 } 584 585 entry->mNotifyConsumed->setInt32("render", !tooLate); 586 entry->mNotifyConsumed->post(); 587 mVideoQueue.erase(mVideoQueue.begin()); 588 entry = NULL; 589 590 if (!mVideoRenderingStarted) { 591 mVideoRenderingStarted = true; 592 notifyVideoRenderingStart(); 593 } 594 595 notifyIfMediaRenderingStarted(); 596 597 notifyPosition(); 598} 599 600void NuPlayer::Renderer::notifyVideoRenderingStart() { 601 sp<AMessage> notify = mNotify->dup(); 602 notify->setInt32("what", kWhatVideoRenderingStart); 603 notify->post(); 604} 605 606void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult) { 607 sp<AMessage> notify = mNotify->dup(); 608 notify->setInt32("what", kWhatEOS); 609 notify->setInt32("audio", static_cast<int32_t>(audio)); 610 notify->setInt32("finalResult", finalResult); 611 notify->post(); 612} 613 614void NuPlayer::Renderer::notifyAudioOffloadTearDown() { 615 (new AMessage(kWhatAudioOffloadTearDown, id()))->post(); 616} 617 618void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) { 619 int32_t audio; 620 CHECK(msg->findInt32("audio", &audio)); 621 622 if (audio) { 623 mHasAudio = true; 624 } else { 625 mHasVideo = true; 626 } 627 628 if (dropBufferWhileFlushing(audio, msg)) { 629 return; 630 } 631 632 sp<ABuffer> buffer; 633 CHECK(msg->findBuffer("buffer", &buffer)); 634 635 sp<AMessage> notifyConsumed; 636 CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed)); 637 638 QueueEntry entry; 639 entry.mBuffer = buffer; 640 entry.mNotifyConsumed = notifyConsumed; 641 entry.mOffset = 0; 642 entry.mFinalResult = OK; 643 644 if (audio) { 645 Mutex::Autolock autoLock(mLock); 646 mAudioQueue.push_back(entry); 647 postDrainAudioQueue_l(); 648 } else { 649 mVideoQueue.push_back(entry); 650 postDrainVideoQueue(); 651 } 652 653 Mutex::Autolock autoLock(mLock); 654 if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) { 655 return; 656 } 657 658 sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer; 659 sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer; 660 661 if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) { 662 // EOS signalled on either queue. 663 syncQueuesDone_l(); 664 return; 665 } 666 667 int64_t firstAudioTimeUs; 668 int64_t firstVideoTimeUs; 669 CHECK(firstAudioBuffer->meta() 670 ->findInt64("timeUs", &firstAudioTimeUs)); 671 CHECK(firstVideoBuffer->meta() 672 ->findInt64("timeUs", &firstVideoTimeUs)); 673 674 int64_t diff = firstVideoTimeUs - firstAudioTimeUs; 675 676 ALOGV("queueDiff = %.2f secs", diff / 1E6); 677 678 if (diff > 100000ll) { 679 // Audio data starts More than 0.1 secs before video. 680 // Drop some audio. 681 682 (*mAudioQueue.begin()).mNotifyConsumed->post(); 683 mAudioQueue.erase(mAudioQueue.begin()); 684 return; 685 } 686 687 syncQueuesDone_l(); 688} 689 690void NuPlayer::Renderer::syncQueuesDone_l() { 691 if (!mSyncQueues) { 692 return; 693 } 694 695 mSyncQueues = false; 696 697 if (!mAudioQueue.empty()) { 698 postDrainAudioQueue_l(); 699 } 700 701 if (!mVideoQueue.empty()) { 702 postDrainVideoQueue(); 703 } 704} 705 706void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) { 707 int32_t audio; 708 CHECK(msg->findInt32("audio", &audio)); 709 710 if (dropBufferWhileFlushing(audio, msg)) { 711 return; 712 } 713 714 int32_t finalResult; 715 CHECK(msg->findInt32("finalResult", &finalResult)); 716 717 QueueEntry entry; 718 entry.mOffset = 0; 719 entry.mFinalResult = finalResult; 720 721 if (audio) { 722 Mutex::Autolock autoLock(mLock); 723 if (mAudioQueue.empty() && mSyncQueues) { 724 syncQueuesDone_l(); 725 } 726 mAudioQueue.push_back(entry); 727 postDrainAudioQueue_l(); 728 } else { 729 if (mVideoQueue.empty() && mSyncQueues) { 730 Mutex::Autolock autoLock(mLock); 731 syncQueuesDone_l(); 732 } 733 mVideoQueue.push_back(entry); 734 postDrainVideoQueue(); 735 } 736} 737 738void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { 739 int32_t audio; 740 CHECK(msg->findInt32("audio", &audio)); 741 742 // If we're currently syncing the queues, i.e. dropping audio while 743 // aligning the first audio/video buffer times and only one of the 744 // two queues has data, we may starve that queue by not requesting 745 // more buffers from the decoder. If the other source then encounters 746 // a discontinuity that leads to flushing, we'll never find the 747 // corresponding discontinuity on the other queue. 748 // Therefore we'll stop syncing the queues if at least one of them 749 // is flushed. 750 { 751 Mutex::Autolock autoLock(mLock); 752 syncQueuesDone_l(); 753 } 754 755 ALOGV("flushing %s", audio ? "audio" : "video"); 756 if (audio) { 757 { 758 Mutex::Autolock autoLock(mLock); 759 flushQueue(&mAudioQueue); 760 } 761 762 Mutex::Autolock autoLock(mFlushLock); 763 mFlushingAudio = false; 764 765 mDrainAudioQueuePending = false; 766 ++mAudioQueueGeneration; 767 768 prepareForMediaRenderingStart(); 769 if (offloadingAudio()) { 770 mFirstAudioTimeUs = -1; 771 mAudioSink->pause(); 772 mAudioSink->flush(); 773 mAudioSink->start(); 774 } 775 } else { 776 flushQueue(&mVideoQueue); 777 778 Mutex::Autolock autoLock(mFlushLock); 779 mFlushingVideo = false; 780 781 mDrainVideoQueuePending = false; 782 ++mVideoQueueGeneration; 783 784 prepareForMediaRenderingStart(); 785 } 786 787 notifyFlushComplete(audio); 788} 789 790void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) { 791 while (!queue->empty()) { 792 QueueEntry *entry = &*queue->begin(); 793 794 if (entry->mBuffer != NULL) { 795 entry->mNotifyConsumed->post(); 796 } 797 798 queue->erase(queue->begin()); 799 entry = NULL; 800 } 801} 802 803void NuPlayer::Renderer::notifyFlushComplete(bool audio) { 804 sp<AMessage> notify = mNotify->dup(); 805 notify->setInt32("what", kWhatFlushComplete); 806 notify->setInt32("audio", static_cast<int32_t>(audio)); 807 notify->post(); 808} 809 810bool NuPlayer::Renderer::dropBufferWhileFlushing( 811 bool audio, const sp<AMessage> &msg) { 812 bool flushing = false; 813 814 { 815 Mutex::Autolock autoLock(mFlushLock); 816 if (audio) { 817 flushing = mFlushingAudio; 818 } else { 819 flushing = mFlushingVideo; 820 } 821 } 822 823 if (!flushing) { 824 return false; 825 } 826 827 sp<AMessage> notifyConsumed; 828 if (msg->findMessage("notifyConsumed", ¬ifyConsumed)) { 829 notifyConsumed->post(); 830 } 831 832 return true; 833} 834 835void NuPlayer::Renderer::onAudioSinkChanged() { 836 if (offloadingAudio()) { 837 return; 838 } 839 CHECK(!mDrainAudioQueuePending); 840 mNumFramesWritten = 0; 841 uint32_t written; 842 if (mAudioSink->getFramesWritten(&written) == OK) { 843 mNumFramesWritten = written; 844 } 845} 846 847void NuPlayer::Renderer::onDisableOffloadAudio() { 848 Mutex::Autolock autoLock(mLock); 849 mFlags &= ~FLAG_OFFLOAD_AUDIO; 850 ++mAudioQueueGeneration; 851} 852 853void NuPlayer::Renderer::notifyPosition() { 854 if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) { 855 return; 856 } 857 858 int64_t nowUs = ALooper::GetNowUs(); 859 860 if (mLastPositionUpdateUs >= 0 861 && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) { 862 return; 863 } 864 mLastPositionUpdateUs = nowUs; 865 866 int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs; 867 868 sp<AMessage> notify = mNotify->dup(); 869 notify->setInt32("what", kWhatPosition); 870 notify->setInt64("positionUs", positionUs); 871 notify->setInt64("videoLateByUs", mVideoLateByUs); 872 notify->post(); 873} 874 875void NuPlayer::Renderer::onPause() { 876 CHECK(!mPaused); 877 878 mDrainAudioQueuePending = false; 879 ++mAudioQueueGeneration; 880 881 mDrainVideoQueuePending = false; 882 ++mVideoQueueGeneration; 883 884 prepareForMediaRenderingStart(); 885 886 if (mHasAudio) { 887 mAudioSink->pause(); 888 } 889 890 ALOGV("now paused audio queue has %d entries, video has %d entries", 891 mAudioQueue.size(), mVideoQueue.size()); 892 893 mPaused = true; 894} 895 896void NuPlayer::Renderer::onResume() { 897 if (!mPaused) { 898 return; 899 } 900 901 if (mHasAudio) { 902 mAudioSink->start(); 903 } 904 905 mPaused = false; 906 907 Mutex::Autolock autoLock(mLock); 908 if (!mAudioQueue.empty()) { 909 postDrainAudioQueue_l(); 910 } 911 912 if (!mVideoQueue.empty()) { 913 postDrainVideoQueue(); 914 } 915} 916 917void NuPlayer::Renderer::onAudioOffloadTearDown() { 918 uint32_t numFramesPlayed; 919 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); 920 921 int64_t currentPositionUs = mFirstAudioTimeUs 922 + (numFramesPlayed * mAudioSink->msecsPerFrame()) * 1000ll; 923 924 mAudioSink->stop(); 925 mAudioSink->flush(); 926 927 sp<AMessage> notify = mNotify->dup(); 928 notify->setInt32("what", kWhatAudioOffloadTearDown); 929 notify->setInt64("positionUs", currentPositionUs); 930 notify->post(); 931} 932 933} // namespace android 934 935