NuPlayerRenderer.cpp revision bc2fb720bbd0acd122bacc67e844e982d068f6f9
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "NuPlayerRenderer" 19#include <utils/Log.h> 20 21#include "NuPlayerRenderer.h" 22 23#include <media/stagefright/foundation/ABuffer.h> 24#include <media/stagefright/foundation/ADebug.h> 25#include <media/stagefright/foundation/AMessage.h> 26#include <media/stagefright/MediaErrors.h> 27#include <media/stagefright/MetaData.h> 28 29namespace android { 30 31// static 32const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll; 33 34NuPlayer::Renderer::Renderer( 35 const sp<MediaPlayerBase::AudioSink> &sink, 36 const sp<AMessage> ¬ify, 37 uint32_t flags) 38 : mAudioSink(sink), 39 mNotify(notify), 40 mFlags(flags), 41 mNumFramesWritten(0), 42 mDrainAudioQueuePending(false), 43 mDrainVideoQueuePending(false), 44 mAudioQueueGeneration(0), 45 mVideoQueueGeneration(0), 46 mFirstAudioTimeUs(-1), 47 mAnchorTimeMediaUs(-1), 48 mAnchorTimeRealUs(-1), 49 mFlushingAudio(false), 50 mFlushingVideo(false), 51 mHasAudio(false), 52 mHasVideo(false), 53 mSyncQueues(false), 54 mPaused(false), 55 mVideoRenderingStarted(false), 56 mVideoRenderingStartGeneration(0), 57 mAudioRenderingStartGeneration(0), 58 mLastPositionUpdateUs(-1ll), 59 mVideoLateByUs(0ll) { 60} 61 62NuPlayer::Renderer::~Renderer() { 63 if (offloadingAudio()) { 64 mAudioSink->stop(); 65 mAudioSink->flush(); 66 mAudioSink->close(); 67 } 68} 69 70void NuPlayer::Renderer::queueBuffer( 71 bool audio, 72 const sp<ABuffer> &buffer, 73 const sp<AMessage> ¬ifyConsumed) { 74 sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id()); 75 msg->setInt32("audio", static_cast<int32_t>(audio)); 76 msg->setBuffer("buffer", buffer); 77 msg->setMessage("notifyConsumed", notifyConsumed); 78 msg->post(); 79} 80 81void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) { 82 CHECK_NE(finalResult, (status_t)OK); 83 84 sp<AMessage> msg = new AMessage(kWhatQueueEOS, id()); 85 msg->setInt32("audio", static_cast<int32_t>(audio)); 86 msg->setInt32("finalResult", finalResult); 87 msg->post(); 88} 89 90void NuPlayer::Renderer::flush(bool audio) { 91 { 92 Mutex::Autolock autoLock(mFlushLock); 93 if (audio) { 94 CHECK(!mFlushingAudio); 95 mFlushingAudio = true; 96 } else { 97 CHECK(!mFlushingVideo); 98 mFlushingVideo = true; 99 } 100 } 101 102 sp<AMessage> msg = new AMessage(kWhatFlush, id()); 103 msg->setInt32("audio", static_cast<int32_t>(audio)); 104 msg->post(); 105} 106 107void NuPlayer::Renderer::signalTimeDiscontinuity() { 108 Mutex::Autolock autoLock(mLock); 109 // CHECK(mAudioQueue.empty()); 110 // CHECK(mVideoQueue.empty()); 111 mAnchorTimeMediaUs = -1; 112 mAnchorTimeRealUs = -1; 113 mSyncQueues = false; 114} 115 116void NuPlayer::Renderer::pause() { 117 (new AMessage(kWhatPause, id()))->post(); 118} 119 120void NuPlayer::Renderer::resume() { 121 (new AMessage(kWhatResume, id()))->post(); 122} 123 124void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { 125 switch (msg->what()) { 126 case kWhatStopAudioSink: 127 { 128 mAudioSink->stop(); 129 break; 130 } 131 132 case kWhatDrainAudioQueue: 133 { 134 int32_t generation; 135 CHECK(msg->findInt32("generation", &generation)); 136 if (generation != mAudioQueueGeneration) { 137 break; 138 } 139 140 mDrainAudioQueuePending = false; 141 142 if (onDrainAudioQueue()) { 143 uint32_t numFramesPlayed; 144 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), 145 (status_t)OK); 146 147 uint32_t numFramesPendingPlayout = 148 mNumFramesWritten - numFramesPlayed; 149 150 // This is how long the audio sink will have data to 151 // play back. 152 int64_t delayUs = 153 mAudioSink->msecsPerFrame() 154 * numFramesPendingPlayout * 1000ll; 155 156 // Let's give it more data after about half that time 157 // has elapsed. 158 // kWhatDrainAudioQueue is used for non-offloading mode, 159 // and mLock is used only for offloading mode. Therefore, 160 // no need to acquire mLock here. 161 postDrainAudioQueue_l(delayUs / 2); 162 } 163 break; 164 } 165 166 case kWhatDrainVideoQueue: 167 { 168 int32_t generation; 169 CHECK(msg->findInt32("generation", &generation)); 170 if (generation != mVideoQueueGeneration) { 171 break; 172 } 173 174 mDrainVideoQueuePending = false; 175 176 onDrainVideoQueue(); 177 178 postDrainVideoQueue(); 179 break; 180 } 181 182 case kWhatQueueBuffer: 183 { 184 onQueueBuffer(msg); 185 break; 186 } 187 188 case kWhatQueueEOS: 189 { 190 onQueueEOS(msg); 191 break; 192 } 193 194 case kWhatFlush: 195 { 196 onFlush(msg); 197 break; 198 } 199 200 case kWhatAudioSinkChanged: 201 { 202 onAudioSinkChanged(); 203 break; 204 } 205 206 case kWhatDisableOffloadAudio: 207 { 208 onDisableOffloadAudio(); 209 break; 210 } 211 212 case kWhatPause: 213 { 214 onPause(); 215 break; 216 } 217 218 case kWhatResume: 219 { 220 onResume(); 221 break; 222 } 223 224 default: 225 TRESPASS(); 226 break; 227 } 228} 229 230void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) { 231 if (mDrainAudioQueuePending || mSyncQueues || mPaused 232 || offloadingAudio()) { 233 return; 234 } 235 236 if (mAudioQueue.empty()) { 237 return; 238 } 239 240 mDrainAudioQueuePending = true; 241 sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id()); 242 msg->setInt32("generation", mAudioQueueGeneration); 243 msg->post(delayUs); 244} 245 246void NuPlayer::Renderer::signalAudioSinkChanged() { 247 (new AMessage(kWhatAudioSinkChanged, id()))->post(); 248} 249 250void NuPlayer::Renderer::signalDisableOffloadAudio() { 251 (new AMessage(kWhatDisableOffloadAudio, id()))->post(); 252} 253 254void NuPlayer::Renderer::prepareForMediaRenderingStart() { 255 mAudioRenderingStartGeneration = mAudioQueueGeneration; 256 mVideoRenderingStartGeneration = mVideoQueueGeneration; 257} 258 259void NuPlayer::Renderer::notifyIfMediaRenderingStarted() { 260 if (mVideoRenderingStartGeneration == mVideoQueueGeneration && 261 mAudioRenderingStartGeneration == mAudioQueueGeneration) { 262 mVideoRenderingStartGeneration = -1; 263 mAudioRenderingStartGeneration = -1; 264 265 sp<AMessage> notify = mNotify->dup(); 266 notify->setInt32("what", kWhatMediaRenderingStart); 267 notify->post(); 268 } 269} 270 271// static 272size_t NuPlayer::Renderer::AudioSinkCallback( 273 MediaPlayerBase::AudioSink * /* audioSink */, 274 void *buffer, 275 size_t size, 276 void *cookie, 277 MediaPlayerBase::AudioSink::cb_event_t event) { 278 NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie; 279 280 switch (event) { 281 case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER: 282 { 283 return me->fillAudioBuffer(buffer, size); 284 break; 285 } 286 287 case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END: 288 { 289 me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM); 290 break; 291 } 292 293 case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN: 294 { 295 // TODO: send this to player. 296 break; 297 } 298 } 299 300 return 0; 301} 302 303size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) { 304 Mutex::Autolock autoLock(mLock); 305 306 if (!offloadingAudio()) { 307 return 0; 308 } 309 310 bool hasEOS = false; 311 312 size_t sizeCopied = 0; 313 while (sizeCopied < size && !mAudioQueue.empty()) { 314 QueueEntry *entry = &*mAudioQueue.begin(); 315 316 if (entry->mBuffer == NULL) { // EOS 317 hasEOS = true; 318 mAudioQueue.erase(mAudioQueue.begin()); 319 entry = NULL; 320 break; 321 } 322 323 if (entry->mOffset == 0) { 324 int64_t mediaTimeUs; 325 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 326 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 327 if (mFirstAudioTimeUs == -1) { 328 mFirstAudioTimeUs = mediaTimeUs; 329 } 330 mAnchorTimeMediaUs = mediaTimeUs; 331 332 uint32_t numFramesPlayed; 333 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); 334 335 // TODO: figure out how to calculate initial latency. 336 // Otherwise, the initial time is not correct till the first sample 337 // is played. 338 mAnchorTimeMediaUs = mFirstAudioTimeUs 339 + (numFramesPlayed * mAudioSink->msecsPerFrame()) * 1000ll; 340 mAnchorTimeRealUs = ALooper::GetNowUs(); 341 } 342 343 size_t copy = entry->mBuffer->size() - entry->mOffset; 344 size_t sizeRemaining = size - sizeCopied; 345 if (copy > sizeRemaining) { 346 copy = sizeRemaining; 347 } 348 349 memcpy((char *)buffer + sizeCopied, 350 entry->mBuffer->data() + entry->mOffset, 351 copy); 352 353 entry->mOffset += copy; 354 if (entry->mOffset == entry->mBuffer->size()) { 355 entry->mNotifyConsumed->post(); 356 mAudioQueue.erase(mAudioQueue.begin()); 357 entry = NULL; 358 } 359 sizeCopied += copy; 360 notifyIfMediaRenderingStarted(); 361 } 362 363 if (sizeCopied != 0) { 364 notifyPosition(); 365 } 366 367 if (hasEOS) { 368 (new AMessage(kWhatStopAudioSink, id()))->post(); 369 } 370 371 return sizeCopied; 372} 373 374bool NuPlayer::Renderer::onDrainAudioQueue() { 375 uint32_t numFramesPlayed; 376 if (mAudioSink->getPosition(&numFramesPlayed) != OK) { 377 return false; 378 } 379 380 ssize_t numFramesAvailableToWrite = 381 mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed); 382 383#if 0 384 if (numFramesAvailableToWrite == mAudioSink->frameCount()) { 385 ALOGI("audio sink underrun"); 386 } else { 387 ALOGV("audio queue has %d frames left to play", 388 mAudioSink->frameCount() - numFramesAvailableToWrite); 389 } 390#endif 391 392 size_t numBytesAvailableToWrite = 393 numFramesAvailableToWrite * mAudioSink->frameSize(); 394 395 while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) { 396 QueueEntry *entry = &*mAudioQueue.begin(); 397 398 if (entry->mBuffer == NULL) { 399 // EOS 400 401 notifyEOS(true /* audio */, entry->mFinalResult); 402 403 mAudioQueue.erase(mAudioQueue.begin()); 404 entry = NULL; 405 return false; 406 } 407 408 if (entry->mOffset == 0) { 409 int64_t mediaTimeUs; 410 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 411 412 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 413 414 mAnchorTimeMediaUs = mediaTimeUs; 415 416 uint32_t numFramesPlayed; 417 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); 418 419 uint32_t numFramesPendingPlayout = 420 mNumFramesWritten - numFramesPlayed; 421 422 int64_t realTimeOffsetUs = 423 (mAudioSink->latency() / 2 /* XXX */ 424 + numFramesPendingPlayout 425 * mAudioSink->msecsPerFrame()) * 1000ll; 426 427 // ALOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs); 428 429 mAnchorTimeRealUs = 430 ALooper::GetNowUs() + realTimeOffsetUs; 431 } 432 433 size_t copy = entry->mBuffer->size() - entry->mOffset; 434 if (copy > numBytesAvailableToWrite) { 435 copy = numBytesAvailableToWrite; 436 } 437 438 CHECK_EQ(mAudioSink->write( 439 entry->mBuffer->data() + entry->mOffset, copy), 440 (ssize_t)copy); 441 442 entry->mOffset += copy; 443 if (entry->mOffset == entry->mBuffer->size()) { 444 entry->mNotifyConsumed->post(); 445 mAudioQueue.erase(mAudioQueue.begin()); 446 447 entry = NULL; 448 } 449 450 numBytesAvailableToWrite -= copy; 451 size_t copiedFrames = copy / mAudioSink->frameSize(); 452 mNumFramesWritten += copiedFrames; 453 454 notifyIfMediaRenderingStarted(); 455 } 456 457 notifyPosition(); 458 459 return !mAudioQueue.empty(); 460} 461 462void NuPlayer::Renderer::postDrainVideoQueue() { 463 if (mDrainVideoQueuePending || mSyncQueues || mPaused) { 464 return; 465 } 466 467 if (mVideoQueue.empty()) { 468 return; 469 } 470 471 QueueEntry &entry = *mVideoQueue.begin(); 472 473 sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id()); 474 msg->setInt32("generation", mVideoQueueGeneration); 475 476 int64_t delayUs; 477 478 if (entry.mBuffer == NULL) { 479 // EOS doesn't carry a timestamp. 480 delayUs = 0; 481 } else if (mFlags & FLAG_REAL_TIME) { 482 int64_t mediaTimeUs; 483 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 484 485 delayUs = mediaTimeUs - ALooper::GetNowUs(); 486 } else { 487 int64_t mediaTimeUs; 488 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 489 490 if (mAnchorTimeMediaUs < 0) { 491 delayUs = 0; 492 493 if (!mHasAudio) { 494 mAnchorTimeMediaUs = mediaTimeUs; 495 mAnchorTimeRealUs = ALooper::GetNowUs(); 496 } 497 } else { 498 int64_t realTimeUs = 499 (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs; 500 501 delayUs = realTimeUs - ALooper::GetNowUs(); 502 } 503 } 504 505 msg->post(delayUs); 506 507 mDrainVideoQueuePending = true; 508} 509 510void NuPlayer::Renderer::onDrainVideoQueue() { 511 if (mVideoQueue.empty()) { 512 return; 513 } 514 515 QueueEntry *entry = &*mVideoQueue.begin(); 516 517 if (entry->mBuffer == NULL) { 518 // EOS 519 520 notifyEOS(false /* audio */, entry->mFinalResult); 521 522 mVideoQueue.erase(mVideoQueue.begin()); 523 entry = NULL; 524 525 mVideoLateByUs = 0ll; 526 527 notifyPosition(); 528 return; 529 } 530 531 int64_t realTimeUs; 532 if (mFlags & FLAG_REAL_TIME) { 533 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs)); 534 } else { 535 int64_t mediaTimeUs; 536 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 537 538 realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs; 539 } 540 541 mVideoLateByUs = ALooper::GetNowUs() - realTimeUs; 542 bool tooLate = (mVideoLateByUs > 40000); 543 544 if (tooLate) { 545 ALOGV("video late by %lld us (%.2f secs)", 546 mVideoLateByUs, mVideoLateByUs / 1E6); 547 } else { 548 ALOGV("rendering video at media time %.2f secs", 549 (mFlags & FLAG_REAL_TIME ? realTimeUs : 550 (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6); 551 } 552 553 entry->mNotifyConsumed->setInt32("render", !tooLate); 554 entry->mNotifyConsumed->post(); 555 mVideoQueue.erase(mVideoQueue.begin()); 556 entry = NULL; 557 558 if (!mVideoRenderingStarted) { 559 mVideoRenderingStarted = true; 560 notifyVideoRenderingStart(); 561 } 562 563 notifyIfMediaRenderingStarted(); 564 565 notifyPosition(); 566} 567 568void NuPlayer::Renderer::notifyVideoRenderingStart() { 569 sp<AMessage> notify = mNotify->dup(); 570 notify->setInt32("what", kWhatVideoRenderingStart); 571 notify->post(); 572} 573 574void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult) { 575 sp<AMessage> notify = mNotify->dup(); 576 notify->setInt32("what", kWhatEOS); 577 notify->setInt32("audio", static_cast<int32_t>(audio)); 578 notify->setInt32("finalResult", finalResult); 579 notify->post(); 580} 581 582void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) { 583 int32_t audio; 584 CHECK(msg->findInt32("audio", &audio)); 585 586 if (audio) { 587 mHasAudio = true; 588 } else { 589 mHasVideo = true; 590 } 591 592 if (dropBufferWhileFlushing(audio, msg)) { 593 return; 594 } 595 596 sp<ABuffer> buffer; 597 CHECK(msg->findBuffer("buffer", &buffer)); 598 599 sp<AMessage> notifyConsumed; 600 CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed)); 601 602 QueueEntry entry; 603 entry.mBuffer = buffer; 604 entry.mNotifyConsumed = notifyConsumed; 605 entry.mOffset = 0; 606 entry.mFinalResult = OK; 607 608 if (audio) { 609 Mutex::Autolock autoLock(mLock); 610 mAudioQueue.push_back(entry); 611 postDrainAudioQueue_l(); 612 } else { 613 mVideoQueue.push_back(entry); 614 postDrainVideoQueue(); 615 } 616 617 Mutex::Autolock autoLock(mLock); 618 if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) { 619 return; 620 } 621 622 sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer; 623 sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer; 624 625 if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) { 626 // EOS signalled on either queue. 627 syncQueuesDone_l(); 628 return; 629 } 630 631 int64_t firstAudioTimeUs; 632 int64_t firstVideoTimeUs; 633 CHECK(firstAudioBuffer->meta() 634 ->findInt64("timeUs", &firstAudioTimeUs)); 635 CHECK(firstVideoBuffer->meta() 636 ->findInt64("timeUs", &firstVideoTimeUs)); 637 638 int64_t diff = firstVideoTimeUs - firstAudioTimeUs; 639 640 ALOGV("queueDiff = %.2f secs", diff / 1E6); 641 642 if (diff > 100000ll) { 643 // Audio data starts More than 0.1 secs before video. 644 // Drop some audio. 645 646 (*mAudioQueue.begin()).mNotifyConsumed->post(); 647 mAudioQueue.erase(mAudioQueue.begin()); 648 return; 649 } 650 651 syncQueuesDone_l(); 652} 653 654void NuPlayer::Renderer::syncQueuesDone_l() { 655 if (!mSyncQueues) { 656 return; 657 } 658 659 mSyncQueues = false; 660 661 if (!mAudioQueue.empty()) { 662 postDrainAudioQueue_l(); 663 } 664 665 if (!mVideoQueue.empty()) { 666 postDrainVideoQueue(); 667 } 668} 669 670void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) { 671 int32_t audio; 672 CHECK(msg->findInt32("audio", &audio)); 673 674 if (dropBufferWhileFlushing(audio, msg)) { 675 return; 676 } 677 678 int32_t finalResult; 679 CHECK(msg->findInt32("finalResult", &finalResult)); 680 681 QueueEntry entry; 682 entry.mOffset = 0; 683 entry.mFinalResult = finalResult; 684 685 if (audio) { 686 Mutex::Autolock autoLock(mLock); 687 if (mAudioQueue.empty() && mSyncQueues) { 688 syncQueuesDone_l(); 689 } 690 mAudioQueue.push_back(entry); 691 postDrainAudioQueue_l(); 692 } else { 693 if (mVideoQueue.empty() && mSyncQueues) { 694 Mutex::Autolock autoLock(mLock); 695 syncQueuesDone_l(); 696 } 697 mVideoQueue.push_back(entry); 698 postDrainVideoQueue(); 699 } 700} 701 702void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { 703 int32_t audio; 704 CHECK(msg->findInt32("audio", &audio)); 705 706 // If we're currently syncing the queues, i.e. dropping audio while 707 // aligning the first audio/video buffer times and only one of the 708 // two queues has data, we may starve that queue by not requesting 709 // more buffers from the decoder. If the other source then encounters 710 // a discontinuity that leads to flushing, we'll never find the 711 // corresponding discontinuity on the other queue. 712 // Therefore we'll stop syncing the queues if at least one of them 713 // is flushed. 714 { 715 Mutex::Autolock autoLock(mLock); 716 syncQueuesDone_l(); 717 } 718 719 ALOGV("flushing %s", audio ? "audio" : "video"); 720 if (audio) { 721 { 722 Mutex::Autolock autoLock(mLock); 723 flushQueue(&mAudioQueue); 724 } 725 726 Mutex::Autolock autoLock(mFlushLock); 727 mFlushingAudio = false; 728 729 mDrainAudioQueuePending = false; 730 ++mAudioQueueGeneration; 731 732 prepareForMediaRenderingStart(); 733 if (offloadingAudio()) { 734 mFirstAudioTimeUs = -1; 735 mAudioSink->pause(); 736 mAudioSink->flush(); 737 mAudioSink->start(); 738 } 739 } else { 740 flushQueue(&mVideoQueue); 741 742 Mutex::Autolock autoLock(mFlushLock); 743 mFlushingVideo = false; 744 745 mDrainVideoQueuePending = false; 746 ++mVideoQueueGeneration; 747 748 prepareForMediaRenderingStart(); 749 } 750 751 notifyFlushComplete(audio); 752} 753 754void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) { 755 while (!queue->empty()) { 756 QueueEntry *entry = &*queue->begin(); 757 758 if (entry->mBuffer != NULL) { 759 entry->mNotifyConsumed->post(); 760 } 761 762 queue->erase(queue->begin()); 763 entry = NULL; 764 } 765} 766 767void NuPlayer::Renderer::notifyFlushComplete(bool audio) { 768 sp<AMessage> notify = mNotify->dup(); 769 notify->setInt32("what", kWhatFlushComplete); 770 notify->setInt32("audio", static_cast<int32_t>(audio)); 771 notify->post(); 772} 773 774bool NuPlayer::Renderer::dropBufferWhileFlushing( 775 bool audio, const sp<AMessage> &msg) { 776 bool flushing = false; 777 778 { 779 Mutex::Autolock autoLock(mFlushLock); 780 if (audio) { 781 flushing = mFlushingAudio; 782 } else { 783 flushing = mFlushingVideo; 784 } 785 } 786 787 if (!flushing) { 788 return false; 789 } 790 791 sp<AMessage> notifyConsumed; 792 if (msg->findMessage("notifyConsumed", ¬ifyConsumed)) { 793 notifyConsumed->post(); 794 } 795 796 return true; 797} 798 799void NuPlayer::Renderer::onAudioSinkChanged() { 800 if (offloadingAudio()) { 801 return; 802 } 803 CHECK(!mDrainAudioQueuePending); 804 mNumFramesWritten = 0; 805 uint32_t written; 806 if (mAudioSink->getFramesWritten(&written) == OK) { 807 mNumFramesWritten = written; 808 } 809} 810 811void NuPlayer::Renderer::onDisableOffloadAudio() { 812 Mutex::Autolock autoLock(mLock); 813 mFlags &= ~FLAG_OFFLOAD_AUDIO; 814} 815 816void NuPlayer::Renderer::notifyPosition() { 817 if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) { 818 return; 819 } 820 821 int64_t nowUs = ALooper::GetNowUs(); 822 823 if (mLastPositionUpdateUs >= 0 824 && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) { 825 return; 826 } 827 mLastPositionUpdateUs = nowUs; 828 829 int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs; 830 831 sp<AMessage> notify = mNotify->dup(); 832 notify->setInt32("what", kWhatPosition); 833 notify->setInt64("positionUs", positionUs); 834 notify->setInt64("videoLateByUs", mVideoLateByUs); 835 notify->post(); 836} 837 838void NuPlayer::Renderer::onPause() { 839 CHECK(!mPaused); 840 841 mDrainAudioQueuePending = false; 842 ++mAudioQueueGeneration; 843 844 mDrainVideoQueuePending = false; 845 ++mVideoQueueGeneration; 846 847 prepareForMediaRenderingStart(); 848 849 if (mHasAudio) { 850 mAudioSink->pause(); 851 } 852 853 ALOGV("now paused audio queue has %d entries, video has %d entries", 854 mAudioQueue.size(), mVideoQueue.size()); 855 856 mPaused = true; 857} 858 859void NuPlayer::Renderer::onResume() { 860 if (!mPaused) { 861 return; 862 } 863 864 if (mHasAudio) { 865 mAudioSink->start(); 866 } 867 868 mPaused = false; 869 870 Mutex::Autolock autoLock(mLock); 871 if (!mAudioQueue.empty()) { 872 postDrainAudioQueue_l(); 873 } 874 875 if (!mVideoQueue.empty()) { 876 postDrainVideoQueue(); 877 } 878} 879 880} // namespace android 881 882