NuPlayerRenderer.cpp revision 259f1624cf7b93ba831af10a616267487601c27f
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "NuPlayerRenderer" 19#include <utils/Log.h> 20 21#include "NuPlayerRenderer.h" 22 23#include <media/stagefright/foundation/ABuffer.h> 24#include <media/stagefright/foundation/ADebug.h> 25#include <media/stagefright/foundation/AMessage.h> 26 27namespace android { 28 29// static 30const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll; 31 32NuPlayer::Renderer::Renderer( 33 const sp<MediaPlayerBase::AudioSink> &sink, 34 const sp<AMessage> ¬ify, 35 uint32_t flags) 36 : mAudioSink(sink), 37 mNotify(notify), 38 mFlags(flags), 39 mNumFramesWritten(0), 40 mDrainAudioQueuePending(false), 41 mDrainVideoQueuePending(false), 42 mAudioQueueGeneration(0), 43 mVideoQueueGeneration(0), 44 mAnchorTimeMediaUs(-1), 45 mAnchorTimeRealUs(-1), 46 mFlushingAudio(false), 47 mFlushingVideo(false), 48 mHasAudio(false), 49 mHasVideo(false), 50 mSyncQueues(false), 51 mPaused(false), 52 mVideoRenderingStarted(false), 53 mVideoRenderingStartGeneration(0), 54 mAudioRenderingStartGeneration(0), 55 mLastPositionUpdateUs(-1ll), 56 mVideoLateByUs(0ll) { 57} 58 59NuPlayer::Renderer::~Renderer() { 60} 61 62void NuPlayer::Renderer::queueBuffer( 63 bool audio, 64 const sp<ABuffer> &buffer, 65 const sp<AMessage> ¬ifyConsumed) { 66 sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id()); 67 msg->setInt32("audio", static_cast<int32_t>(audio)); 68 msg->setBuffer("buffer", buffer); 69 msg->setMessage("notifyConsumed", notifyConsumed); 70 msg->post(); 71} 72 73void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) { 74 CHECK_NE(finalResult, (status_t)OK); 75 76 sp<AMessage> msg = new AMessage(kWhatQueueEOS, id()); 77 msg->setInt32("audio", static_cast<int32_t>(audio)); 78 msg->setInt32("finalResult", finalResult); 79 msg->post(); 80} 81 82void NuPlayer::Renderer::flush(bool audio) { 83 { 84 Mutex::Autolock autoLock(mFlushLock); 85 if (audio) { 86 CHECK(!mFlushingAudio); 87 mFlushingAudio = true; 88 } else { 89 CHECK(!mFlushingVideo); 90 mFlushingVideo = true; 91 } 92 } 93 94 sp<AMessage> msg = new AMessage(kWhatFlush, id()); 95 msg->setInt32("audio", static_cast<int32_t>(audio)); 96 msg->post(); 97} 98 99void NuPlayer::Renderer::signalTimeDiscontinuity() { 100 // CHECK(mAudioQueue.empty()); 101 // CHECK(mVideoQueue.empty()); 102 mAnchorTimeMediaUs = -1; 103 mAnchorTimeRealUs = -1; 104 mSyncQueues = false; 105} 106 107void NuPlayer::Renderer::pause() { 108 (new AMessage(kWhatPause, id()))->post(); 109} 110 111void NuPlayer::Renderer::resume() { 112 (new AMessage(kWhatResume, id()))->post(); 113} 114 115void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { 116 switch (msg->what()) { 117 case kWhatDrainAudioQueue: 118 { 119 int32_t generation; 120 CHECK(msg->findInt32("generation", &generation)); 121 if (generation != mAudioQueueGeneration) { 122 break; 123 } 124 125 mDrainAudioQueuePending = false; 126 127 if (onDrainAudioQueue()) { 128 uint32_t numFramesPlayed; 129 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), 130 (status_t)OK); 131 132 uint32_t numFramesPendingPlayout = 133 mNumFramesWritten - numFramesPlayed; 134 135 // This is how long the audio sink will have data to 136 // play back. 137 int64_t delayUs = 138 mAudioSink->msecsPerFrame() 139 * numFramesPendingPlayout * 1000ll; 140 141 // Let's give it more data after about half that time 142 // has elapsed. 143 postDrainAudioQueue(delayUs / 2); 144 } 145 break; 146 } 147 148 case kWhatDrainVideoQueue: 149 { 150 int32_t generation; 151 CHECK(msg->findInt32("generation", &generation)); 152 if (generation != mVideoQueueGeneration) { 153 break; 154 } 155 156 mDrainVideoQueuePending = false; 157 158 onDrainVideoQueue(); 159 160 postDrainVideoQueue(); 161 break; 162 } 163 164 case kWhatQueueBuffer: 165 { 166 onQueueBuffer(msg); 167 break; 168 } 169 170 case kWhatQueueEOS: 171 { 172 onQueueEOS(msg); 173 break; 174 } 175 176 case kWhatFlush: 177 { 178 onFlush(msg); 179 break; 180 } 181 182 case kWhatAudioSinkChanged: 183 { 184 onAudioSinkChanged(); 185 break; 186 } 187 188 case kWhatPause: 189 { 190 onPause(); 191 break; 192 } 193 194 case kWhatResume: 195 { 196 onResume(); 197 break; 198 } 199 200 default: 201 TRESPASS(); 202 break; 203 } 204} 205 206void NuPlayer::Renderer::postDrainAudioQueue(int64_t delayUs) { 207 if (mDrainAudioQueuePending || mSyncQueues || mPaused) { 208 return; 209 } 210 211 if (mAudioQueue.empty()) { 212 return; 213 } 214 215 mDrainAudioQueuePending = true; 216 sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id()); 217 msg->setInt32("generation", mAudioQueueGeneration); 218 msg->post(delayUs); 219} 220 221void NuPlayer::Renderer::signalAudioSinkChanged() { 222 (new AMessage(kWhatAudioSinkChanged, id()))->post(); 223} 224 225void NuPlayer::Renderer::prepareForMediaRenderingStart() { 226 mAudioRenderingStartGeneration = mAudioQueueGeneration; 227 mVideoRenderingStartGeneration = mVideoQueueGeneration; 228} 229 230void NuPlayer::Renderer::notifyIfMediaRenderingStarted() { 231 if (mVideoRenderingStartGeneration == mVideoQueueGeneration && 232 mAudioRenderingStartGeneration == mAudioQueueGeneration) { 233 mVideoRenderingStartGeneration = -1; 234 mAudioRenderingStartGeneration = -1; 235 236 sp<AMessage> notify = mNotify->dup(); 237 notify->setInt32("what", kWhatMediaRenderingStart); 238 notify->post(); 239 } 240} 241 242bool NuPlayer::Renderer::onDrainAudioQueue() { 243 uint32_t numFramesPlayed; 244 if (mAudioSink->getPosition(&numFramesPlayed) != OK) { 245 return false; 246 } 247 248 ssize_t numFramesAvailableToWrite = 249 mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed); 250 251#if 0 252 if (numFramesAvailableToWrite == mAudioSink->frameCount()) { 253 ALOGI("audio sink underrun"); 254 } else { 255 ALOGV("audio queue has %d frames left to play", 256 mAudioSink->frameCount() - numFramesAvailableToWrite); 257 } 258#endif 259 260 size_t numBytesAvailableToWrite = 261 numFramesAvailableToWrite * mAudioSink->frameSize(); 262 263 while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) { 264 QueueEntry *entry = &*mAudioQueue.begin(); 265 266 if (entry->mBuffer == NULL) { 267 // EOS 268 269 notifyEOS(true /* audio */, entry->mFinalResult); 270 271 mAudioQueue.erase(mAudioQueue.begin()); 272 entry = NULL; 273 return false; 274 } 275 276 if (entry->mOffset == 0) { 277 int64_t mediaTimeUs; 278 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 279 280 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 281 282 mAnchorTimeMediaUs = mediaTimeUs; 283 284 uint32_t numFramesPlayed; 285 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); 286 287 uint32_t numFramesPendingPlayout = 288 mNumFramesWritten - numFramesPlayed; 289 290 int64_t realTimeOffsetUs = 291 (mAudioSink->latency() / 2 /* XXX */ 292 + numFramesPendingPlayout 293 * mAudioSink->msecsPerFrame()) * 1000ll; 294 295 // ALOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs); 296 297 mAnchorTimeRealUs = 298 ALooper::GetNowUs() + realTimeOffsetUs; 299 } 300 301 size_t copy = entry->mBuffer->size() - entry->mOffset; 302 if (copy > numBytesAvailableToWrite) { 303 copy = numBytesAvailableToWrite; 304 } 305 306 CHECK_EQ(mAudioSink->write( 307 entry->mBuffer->data() + entry->mOffset, copy), 308 (ssize_t)copy); 309 310 entry->mOffset += copy; 311 if (entry->mOffset == entry->mBuffer->size()) { 312 entry->mNotifyConsumed->post(); 313 mAudioQueue.erase(mAudioQueue.begin()); 314 315 entry = NULL; 316 } 317 318 numBytesAvailableToWrite -= copy; 319 size_t copiedFrames = copy / mAudioSink->frameSize(); 320 mNumFramesWritten += copiedFrames; 321 322 notifyIfMediaRenderingStarted(); 323 } 324 325 notifyPosition(); 326 327 return !mAudioQueue.empty(); 328} 329 330void NuPlayer::Renderer::postDrainVideoQueue() { 331 if (mDrainVideoQueuePending || mSyncQueues || mPaused) { 332 return; 333 } 334 335 if (mVideoQueue.empty()) { 336 return; 337 } 338 339 QueueEntry &entry = *mVideoQueue.begin(); 340 341 sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id()); 342 msg->setInt32("generation", mVideoQueueGeneration); 343 344 int64_t delayUs; 345 346 if (entry.mBuffer == NULL) { 347 // EOS doesn't carry a timestamp. 348 delayUs = 0; 349 } else if (mFlags & FLAG_REAL_TIME) { 350 int64_t mediaTimeUs; 351 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 352 353 delayUs = mediaTimeUs - ALooper::GetNowUs(); 354 } else { 355 int64_t mediaTimeUs; 356 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 357 358 if (mAnchorTimeMediaUs < 0) { 359 delayUs = 0; 360 361 if (!mHasAudio) { 362 mAnchorTimeMediaUs = mediaTimeUs; 363 mAnchorTimeRealUs = ALooper::GetNowUs(); 364 } 365 } else { 366 int64_t realTimeUs = 367 (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs; 368 369 delayUs = realTimeUs - ALooper::GetNowUs(); 370 } 371 } 372 373 msg->post(delayUs); 374 375 mDrainVideoQueuePending = true; 376} 377 378void NuPlayer::Renderer::onDrainVideoQueue() { 379 if (mVideoQueue.empty()) { 380 return; 381 } 382 383 QueueEntry *entry = &*mVideoQueue.begin(); 384 385 if (entry->mBuffer == NULL) { 386 // EOS 387 388 notifyEOS(false /* audio */, entry->mFinalResult); 389 390 mVideoQueue.erase(mVideoQueue.begin()); 391 entry = NULL; 392 393 mVideoLateByUs = 0ll; 394 395 notifyPosition(); 396 return; 397 } 398 399 int64_t realTimeUs; 400 if (mFlags & FLAG_REAL_TIME) { 401 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs)); 402 } else { 403 int64_t mediaTimeUs; 404 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 405 406 realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs; 407 } 408 409 mVideoLateByUs = ALooper::GetNowUs() - realTimeUs; 410 bool tooLate = (mVideoLateByUs > 40000); 411 412 if (tooLate) { 413 ALOGV("video late by %lld us (%.2f secs)", 414 mVideoLateByUs, mVideoLateByUs / 1E6); 415 } else { 416 ALOGV("rendering video at media time %.2f secs", 417 (mFlags & FLAG_REAL_TIME ? realTimeUs : 418 (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6); 419 } 420 421 entry->mNotifyConsumed->setInt32("render", !tooLate); 422 entry->mNotifyConsumed->post(); 423 mVideoQueue.erase(mVideoQueue.begin()); 424 entry = NULL; 425 426 if (!mVideoRenderingStarted) { 427 mVideoRenderingStarted = true; 428 notifyVideoRenderingStart(); 429 } 430 431 notifyIfMediaRenderingStarted(); 432 433 notifyPosition(); 434} 435 436void NuPlayer::Renderer::notifyVideoRenderingStart() { 437 sp<AMessage> notify = mNotify->dup(); 438 notify->setInt32("what", kWhatVideoRenderingStart); 439 notify->post(); 440} 441 442void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult) { 443 sp<AMessage> notify = mNotify->dup(); 444 notify->setInt32("what", kWhatEOS); 445 notify->setInt32("audio", static_cast<int32_t>(audio)); 446 notify->setInt32("finalResult", finalResult); 447 notify->post(); 448} 449 450void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) { 451 int32_t audio; 452 CHECK(msg->findInt32("audio", &audio)); 453 454 if (audio) { 455 mHasAudio = true; 456 } else { 457 mHasVideo = true; 458 } 459 460 if (dropBufferWhileFlushing(audio, msg)) { 461 return; 462 } 463 464 sp<ABuffer> buffer; 465 CHECK(msg->findBuffer("buffer", &buffer)); 466 467 sp<AMessage> notifyConsumed; 468 CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed)); 469 470 QueueEntry entry; 471 entry.mBuffer = buffer; 472 entry.mNotifyConsumed = notifyConsumed; 473 entry.mOffset = 0; 474 entry.mFinalResult = OK; 475 476 if (audio) { 477 mAudioQueue.push_back(entry); 478 postDrainAudioQueue(); 479 } else { 480 mVideoQueue.push_back(entry); 481 postDrainVideoQueue(); 482 } 483 484 if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) { 485 return; 486 } 487 488 sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer; 489 sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer; 490 491 if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) { 492 // EOS signalled on either queue. 493 syncQueuesDone(); 494 return; 495 } 496 497 int64_t firstAudioTimeUs; 498 int64_t firstVideoTimeUs; 499 CHECK(firstAudioBuffer->meta() 500 ->findInt64("timeUs", &firstAudioTimeUs)); 501 CHECK(firstVideoBuffer->meta() 502 ->findInt64("timeUs", &firstVideoTimeUs)); 503 504 int64_t diff = firstVideoTimeUs - firstAudioTimeUs; 505 506 ALOGV("queueDiff = %.2f secs", diff / 1E6); 507 508 if (diff > 100000ll) { 509 // Audio data starts More than 0.1 secs before video. 510 // Drop some audio. 511 512 (*mAudioQueue.begin()).mNotifyConsumed->post(); 513 mAudioQueue.erase(mAudioQueue.begin()); 514 return; 515 } 516 517 syncQueuesDone(); 518} 519 520void NuPlayer::Renderer::syncQueuesDone() { 521 if (!mSyncQueues) { 522 return; 523 } 524 525 mSyncQueues = false; 526 527 if (!mAudioQueue.empty()) { 528 postDrainAudioQueue(); 529 } 530 531 if (!mVideoQueue.empty()) { 532 postDrainVideoQueue(); 533 } 534} 535 536void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) { 537 int32_t audio; 538 CHECK(msg->findInt32("audio", &audio)); 539 540 if (dropBufferWhileFlushing(audio, msg)) { 541 return; 542 } 543 544 int32_t finalResult; 545 CHECK(msg->findInt32("finalResult", &finalResult)); 546 547 QueueEntry entry; 548 entry.mOffset = 0; 549 entry.mFinalResult = finalResult; 550 551 if (audio) { 552 if (mAudioQueue.empty() && mSyncQueues) { 553 syncQueuesDone(); 554 } 555 mAudioQueue.push_back(entry); 556 postDrainAudioQueue(); 557 } else { 558 if (mVideoQueue.empty() && mSyncQueues) { 559 syncQueuesDone(); 560 } 561 mVideoQueue.push_back(entry); 562 postDrainVideoQueue(); 563 } 564} 565 566void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { 567 int32_t audio; 568 CHECK(msg->findInt32("audio", &audio)); 569 570 // If we're currently syncing the queues, i.e. dropping audio while 571 // aligning the first audio/video buffer times and only one of the 572 // two queues has data, we may starve that queue by not requesting 573 // more buffers from the decoder. If the other source then encounters 574 // a discontinuity that leads to flushing, we'll never find the 575 // corresponding discontinuity on the other queue. 576 // Therefore we'll stop syncing the queues if at least one of them 577 // is flushed. 578 syncQueuesDone(); 579 580 ALOGV("flushing %s", audio ? "audio" : "video"); 581 if (audio) { 582 flushQueue(&mAudioQueue); 583 584 Mutex::Autolock autoLock(mFlushLock); 585 mFlushingAudio = false; 586 587 mDrainAudioQueuePending = false; 588 ++mAudioQueueGeneration; 589 590 prepareForMediaRenderingStart(); 591 } else { 592 flushQueue(&mVideoQueue); 593 594 Mutex::Autolock autoLock(mFlushLock); 595 mFlushingVideo = false; 596 597 mDrainVideoQueuePending = false; 598 ++mVideoQueueGeneration; 599 600 prepareForMediaRenderingStart(); 601 } 602 603 notifyFlushComplete(audio); 604} 605 606void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) { 607 while (!queue->empty()) { 608 QueueEntry *entry = &*queue->begin(); 609 610 if (entry->mBuffer != NULL) { 611 entry->mNotifyConsumed->post(); 612 } 613 614 queue->erase(queue->begin()); 615 entry = NULL; 616 } 617} 618 619void NuPlayer::Renderer::notifyFlushComplete(bool audio) { 620 sp<AMessage> notify = mNotify->dup(); 621 notify->setInt32("what", kWhatFlushComplete); 622 notify->setInt32("audio", static_cast<int32_t>(audio)); 623 notify->post(); 624} 625 626bool NuPlayer::Renderer::dropBufferWhileFlushing( 627 bool audio, const sp<AMessage> &msg) { 628 bool flushing = false; 629 630 { 631 Mutex::Autolock autoLock(mFlushLock); 632 if (audio) { 633 flushing = mFlushingAudio; 634 } else { 635 flushing = mFlushingVideo; 636 } 637 } 638 639 if (!flushing) { 640 return false; 641 } 642 643 sp<AMessage> notifyConsumed; 644 if (msg->findMessage("notifyConsumed", ¬ifyConsumed)) { 645 notifyConsumed->post(); 646 } 647 648 return true; 649} 650 651void NuPlayer::Renderer::onAudioSinkChanged() { 652 CHECK(!mDrainAudioQueuePending); 653 mNumFramesWritten = 0; 654 uint32_t written; 655 if (mAudioSink->getFramesWritten(&written) == OK) { 656 mNumFramesWritten = written; 657 } 658} 659 660void NuPlayer::Renderer::notifyPosition() { 661 if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) { 662 return; 663 } 664 665 int64_t nowUs = ALooper::GetNowUs(); 666 667 if (mLastPositionUpdateUs >= 0 668 && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) { 669 return; 670 } 671 mLastPositionUpdateUs = nowUs; 672 673 int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs; 674 675 sp<AMessage> notify = mNotify->dup(); 676 notify->setInt32("what", kWhatPosition); 677 notify->setInt64("positionUs", positionUs); 678 notify->setInt64("videoLateByUs", mVideoLateByUs); 679 notify->post(); 680} 681 682void NuPlayer::Renderer::onPause() { 683 CHECK(!mPaused); 684 685 mDrainAudioQueuePending = false; 686 ++mAudioQueueGeneration; 687 688 mDrainVideoQueuePending = false; 689 ++mVideoQueueGeneration; 690 691 prepareForMediaRenderingStart(); 692 693 if (mHasAudio) { 694 mAudioSink->pause(); 695 } 696 697 ALOGV("now paused audio queue has %d entries, video has %d entries", 698 mAudioQueue.size(), mVideoQueue.size()); 699 700 mPaused = true; 701} 702 703void NuPlayer::Renderer::onResume() { 704 if (!mPaused) { 705 return; 706 } 707 708 if (mHasAudio) { 709 mAudioSink->start(); 710 } 711 712 mPaused = false; 713 714 if (!mAudioQueue.empty()) { 715 postDrainAudioQueue(); 716 } 717 718 if (!mVideoQueue.empty()) { 719 postDrainVideoQueue(); 720 } 721} 722 723} // namespace android 724 725