NuPlayerRenderer.cpp revision 231406d597cca1c9c009f870fbb62e46b8475186
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "NuPlayerRenderer" 19#include <utils/Log.h> 20 21#include "NuPlayerRenderer.h" 22 23#include <media/stagefright/foundation/ABuffer.h> 24#include <media/stagefright/foundation/ADebug.h> 25#include <media/stagefright/foundation/AMessage.h> 26#include <media/stagefright/foundation/AUtils.h> 27#include <media/stagefright/MediaErrors.h> 28#include <media/stagefright/MetaData.h> 29#include <media/stagefright/Utils.h> 30 31#include <VideoFrameScheduler.h> 32 33#include <inttypes.h> 34 35namespace android { 36 37// Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink 38// is closed to allow the audio DSP to power down. 39static const int64_t kOffloadPauseMaxUs = 10000000ll; 40 41// static 42const NuPlayer::Renderer::PcmInfo NuPlayer::Renderer::AUDIO_PCMINFO_INITIALIZER = { 43 AUDIO_CHANNEL_NONE, 44 AUDIO_OUTPUT_FLAG_NONE, 45 AUDIO_FORMAT_INVALID, 46 0, // mNumChannels 47 0 // mSampleRate 48}; 49 50// static 51const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll; 52 53NuPlayer::Renderer::Renderer( 54 const sp<MediaPlayerBase::AudioSink> &sink, 55 const sp<AMessage> ¬ify, 56 uint32_t flags) 57 : mAudioSink(sink), 58 mNotify(notify), 59 mFlags(flags), 60 mNumFramesWritten(0), 61 mDrainAudioQueuePending(false), 62 mDrainVideoQueuePending(false), 63 mAudioQueueGeneration(0), 64 mVideoQueueGeneration(0), 65 mAudioFirstAnchorTimeMediaUs(-1), 66 mAnchorTimeMediaUs(-1), 67 mAnchorTimeRealUs(-1), 68 mAnchorNumFramesWritten(-1), 69 mAnchorMaxMediaUs(-1), 70 mVideoLateByUs(0ll), 71 mHasAudio(false), 72 mHasVideo(false), 73 mPauseStartedTimeRealUs(-1), 74 mFlushingAudio(false), 75 mFlushingVideo(false), 76 mNotifyCompleteAudio(false), 77 mNotifyCompleteVideo(false), 78 mSyncQueues(false), 79 mPaused(false), 80 mPausePositionMediaTimeUs(-1), 81 mVideoSampleReceived(false), 82 mVideoRenderingStarted(false), 83 mVideoRenderingStartGeneration(0), 84 mAudioRenderingStartGeneration(0), 85 mAudioOffloadPauseTimeoutGeneration(0), 86 mAudioOffloadTornDown(false), 87 mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER), 88 mCurrentPcmInfo(AUDIO_PCMINFO_INITIALIZER), 89 mTotalBuffersQueued(0), 90 mLastAudioBufferDrained(0) { 91} 92 93NuPlayer::Renderer::~Renderer() { 94 if (offloadingAudio()) { 95 mAudioSink->stop(); 96 mAudioSink->flush(); 97 mAudioSink->close(); 98 } 99} 100 101void NuPlayer::Renderer::queueBuffer( 102 bool audio, 103 const sp<ABuffer> &buffer, 104 const sp<AMessage> ¬ifyConsumed) { 105 sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id()); 106 msg->setInt32("audio", static_cast<int32_t>(audio)); 107 msg->setBuffer("buffer", buffer); 108 msg->setMessage("notifyConsumed", notifyConsumed); 109 msg->post(); 110} 111 112void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) { 113 CHECK_NE(finalResult, (status_t)OK); 114 115 sp<AMessage> msg = new AMessage(kWhatQueueEOS, id()); 116 msg->setInt32("audio", static_cast<int32_t>(audio)); 117 msg->setInt32("finalResult", finalResult); 118 msg->post(); 119} 120 121void NuPlayer::Renderer::flush(bool audio, bool notifyComplete) { 122 { 123 Mutex::Autolock autoLock(mFlushLock); 124 if (audio) { 125 mNotifyCompleteAudio |= notifyComplete; 126 if (mFlushingAudio) { 127 return; 128 } 129 mFlushingAudio = true; 130 } else { 131 mNotifyCompleteVideo |= notifyComplete; 132 if (mFlushingVideo) { 133 return; 134 } 135 mFlushingVideo = true; 136 } 137 } 138 139 sp<AMessage> msg = new AMessage(kWhatFlush, id()); 140 msg->setInt32("audio", static_cast<int32_t>(audio)); 141 msg->post(); 142} 143 144void NuPlayer::Renderer::signalTimeDiscontinuity() { 145 Mutex::Autolock autoLock(mLock); 146 // CHECK(mAudioQueue.empty()); 147 // CHECK(mVideoQueue.empty()); 148 setAudioFirstAnchorTime(-1); 149 setAnchorTime(-1, -1); 150 setVideoLateByUs(0); 151 mSyncQueues = false; 152} 153 154void NuPlayer::Renderer::signalAudioSinkChanged() { 155 (new AMessage(kWhatAudioSinkChanged, id()))->post(); 156} 157 158void NuPlayer::Renderer::signalDisableOffloadAudio() { 159 (new AMessage(kWhatDisableOffloadAudio, id()))->post(); 160} 161 162void NuPlayer::Renderer::signalEnableOffloadAudio() { 163 (new AMessage(kWhatEnableOffloadAudio, id()))->post(); 164} 165 166void NuPlayer::Renderer::pause() { 167 (new AMessage(kWhatPause, id()))->post(); 168} 169 170void NuPlayer::Renderer::resume() { 171 (new AMessage(kWhatResume, id()))->post(); 172} 173 174void NuPlayer::Renderer::setVideoFrameRate(float fps) { 175 sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, id()); 176 msg->setFloat("frame-rate", fps); 177 msg->post(); 178} 179 180// Called on any threads, except renderer's thread. 181status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) { 182 { 183 Mutex::Autolock autoLock(mLock); 184 int64_t currentPositionUs; 185 if (getCurrentPositionIfPaused_l(¤tPositionUs)) { 186 *mediaUs = currentPositionUs; 187 return OK; 188 } 189 } 190 return getCurrentPositionFromAnchor(mediaUs, ALooper::GetNowUs()); 191} 192 193// Called on only renderer's thread. 194status_t NuPlayer::Renderer::getCurrentPositionOnLooper(int64_t *mediaUs) { 195 return getCurrentPositionOnLooper(mediaUs, ALooper::GetNowUs()); 196} 197 198// Called on only renderer's thread. 199// Since mPaused and mPausePositionMediaTimeUs are changed only on renderer's 200// thread, no need to acquire mLock. 201status_t NuPlayer::Renderer::getCurrentPositionOnLooper( 202 int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) { 203 int64_t currentPositionUs; 204 if (getCurrentPositionIfPaused_l(¤tPositionUs)) { 205 *mediaUs = currentPositionUs; 206 return OK; 207 } 208 return getCurrentPositionFromAnchor(mediaUs, nowUs, allowPastQueuedVideo); 209} 210 211// Called either with mLock acquired or on renderer's thread. 212bool NuPlayer::Renderer::getCurrentPositionIfPaused_l(int64_t *mediaUs) { 213 if (!mPaused || mPausePositionMediaTimeUs < 0ll) { 214 return false; 215 } 216 *mediaUs = mPausePositionMediaTimeUs; 217 return true; 218} 219 220// Called on any threads. 221status_t NuPlayer::Renderer::getCurrentPositionFromAnchor( 222 int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) { 223 Mutex::Autolock autoLock(mTimeLock); 224 if (!mHasAudio && !mHasVideo) { 225 return NO_INIT; 226 } 227 228 if (mAnchorTimeMediaUs < 0) { 229 return NO_INIT; 230 } 231 232 int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs; 233 234 if (mPauseStartedTimeRealUs != -1) { 235 positionUs -= (nowUs - mPauseStartedTimeRealUs); 236 } 237 238 // limit position to the last queued media time (for video only stream 239 // position will be discrete as we don't know how long each frame lasts) 240 if (mAnchorMaxMediaUs >= 0 && !allowPastQueuedVideo) { 241 if (positionUs > mAnchorMaxMediaUs) { 242 positionUs = mAnchorMaxMediaUs; 243 } 244 } 245 246 if (positionUs < mAudioFirstAnchorTimeMediaUs) { 247 positionUs = mAudioFirstAnchorTimeMediaUs; 248 } 249 250 *mediaUs = (positionUs <= 0) ? 0 : positionUs; 251 return OK; 252} 253 254void NuPlayer::Renderer::setHasMedia(bool audio) { 255 Mutex::Autolock autoLock(mTimeLock); 256 if (audio) { 257 mHasAudio = true; 258 } else { 259 mHasVideo = true; 260 } 261} 262 263void NuPlayer::Renderer::setAudioFirstAnchorTime(int64_t mediaUs) { 264 Mutex::Autolock autoLock(mTimeLock); 265 mAudioFirstAnchorTimeMediaUs = mediaUs; 266} 267 268void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs) { 269 Mutex::Autolock autoLock(mTimeLock); 270 if (mAudioFirstAnchorTimeMediaUs == -1) { 271 mAudioFirstAnchorTimeMediaUs = mediaUs; 272 } 273} 274 275void NuPlayer::Renderer::setAnchorTime( 276 int64_t mediaUs, int64_t realUs, int64_t numFramesWritten, bool resume) { 277 Mutex::Autolock autoLock(mTimeLock); 278 mAnchorTimeMediaUs = mediaUs; 279 mAnchorTimeRealUs = realUs; 280 mAnchorNumFramesWritten = numFramesWritten; 281 if (resume) { 282 mPauseStartedTimeRealUs = -1; 283 } 284} 285 286void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) { 287 Mutex::Autolock autoLock(mTimeLock); 288 mVideoLateByUs = lateUs; 289} 290 291int64_t NuPlayer::Renderer::getVideoLateByUs() { 292 Mutex::Autolock autoLock(mTimeLock); 293 return mVideoLateByUs; 294} 295 296void NuPlayer::Renderer::setPauseStartedTimeRealUs(int64_t realUs) { 297 Mutex::Autolock autoLock(mTimeLock); 298 mPauseStartedTimeRealUs = realUs; 299} 300 301status_t NuPlayer::Renderer::openAudioSink( 302 const sp<AMessage> &format, 303 bool offloadOnly, 304 bool hasVideo, 305 uint32_t flags, 306 bool *isOffloaded) { 307 sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, id()); 308 msg->setMessage("format", format); 309 msg->setInt32("offload-only", offloadOnly); 310 msg->setInt32("has-video", hasVideo); 311 msg->setInt32("flags", flags); 312 313 sp<AMessage> response; 314 msg->postAndAwaitResponse(&response); 315 316 int32_t err; 317 if (!response->findInt32("err", &err)) { 318 err = INVALID_OPERATION; 319 } else if (err == OK && isOffloaded != NULL) { 320 int32_t offload; 321 CHECK(response->findInt32("offload", &offload)); 322 *isOffloaded = (offload != 0); 323 } 324 return err; 325} 326 327void NuPlayer::Renderer::closeAudioSink() { 328 sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, id()); 329 330 sp<AMessage> response; 331 msg->postAndAwaitResponse(&response); 332} 333 334void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { 335 switch (msg->what()) { 336 case kWhatOpenAudioSink: 337 { 338 sp<AMessage> format; 339 CHECK(msg->findMessage("format", &format)); 340 341 int32_t offloadOnly; 342 CHECK(msg->findInt32("offload-only", &offloadOnly)); 343 344 int32_t hasVideo; 345 CHECK(msg->findInt32("has-video", &hasVideo)); 346 347 uint32_t flags; 348 CHECK(msg->findInt32("flags", (int32_t *)&flags)); 349 350 status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags); 351 352 sp<AMessage> response = new AMessage; 353 response->setInt32("err", err); 354 response->setInt32("offload", offloadingAudio()); 355 356 uint32_t replyID; 357 CHECK(msg->senderAwaitsResponse(&replyID)); 358 response->postReply(replyID); 359 360 break; 361 } 362 363 case kWhatCloseAudioSink: 364 { 365 uint32_t replyID; 366 CHECK(msg->senderAwaitsResponse(&replyID)); 367 368 onCloseAudioSink(); 369 370 sp<AMessage> response = new AMessage; 371 response->postReply(replyID); 372 break; 373 } 374 375 case kWhatStopAudioSink: 376 { 377 mAudioSink->stop(); 378 break; 379 } 380 381 case kWhatDrainAudioQueue: 382 { 383 int32_t generation; 384 CHECK(msg->findInt32("generation", &generation)); 385 if (generation != mAudioQueueGeneration) { 386 break; 387 } 388 389 mDrainAudioQueuePending = false; 390 391 if (onDrainAudioQueue()) { 392 uint32_t numFramesPlayed; 393 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), 394 (status_t)OK); 395 396 uint32_t numFramesPendingPlayout = 397 mNumFramesWritten - numFramesPlayed; 398 399 // This is how long the audio sink will have data to 400 // play back. 401 int64_t delayUs = 402 mAudioSink->msecsPerFrame() 403 * numFramesPendingPlayout * 1000ll; 404 405 // Let's give it more data after about half that time 406 // has elapsed. 407 // kWhatDrainAudioQueue is used for non-offloading mode, 408 // and mLock is used only for offloading mode. Therefore, 409 // no need to acquire mLock here. 410 postDrainAudioQueue_l(delayUs / 2); 411 } 412 break; 413 } 414 415 case kWhatDrainVideoQueue: 416 { 417 int32_t generation; 418 CHECK(msg->findInt32("generation", &generation)); 419 if (generation != mVideoQueueGeneration) { 420 break; 421 } 422 423 mDrainVideoQueuePending = false; 424 425 onDrainVideoQueue(); 426 427 Mutex::Autolock autoLock(mLock); 428 postDrainVideoQueue_l(); 429 break; 430 } 431 432 case kWhatPostDrainVideoQueue: 433 { 434 int32_t generation; 435 CHECK(msg->findInt32("generation", &generation)); 436 if (generation != mVideoQueueGeneration) { 437 break; 438 } 439 440 mDrainVideoQueuePending = false; 441 Mutex::Autolock autoLock(mLock); 442 postDrainVideoQueue_l(); 443 break; 444 } 445 446 case kWhatQueueBuffer: 447 { 448 onQueueBuffer(msg); 449 break; 450 } 451 452 case kWhatQueueEOS: 453 { 454 onQueueEOS(msg); 455 break; 456 } 457 458 case kWhatFlush: 459 { 460 onFlush(msg); 461 break; 462 } 463 464 case kWhatAudioSinkChanged: 465 { 466 onAudioSinkChanged(); 467 break; 468 } 469 470 case kWhatDisableOffloadAudio: 471 { 472 onDisableOffloadAudio(); 473 break; 474 } 475 476 case kWhatEnableOffloadAudio: 477 { 478 onEnableOffloadAudio(); 479 break; 480 } 481 482 case kWhatPause: 483 { 484 onPause(); 485 break; 486 } 487 488 case kWhatResume: 489 { 490 onResume(); 491 break; 492 } 493 494 case kWhatSetVideoFrameRate: 495 { 496 float fps; 497 CHECK(msg->findFloat("frame-rate", &fps)); 498 onSetVideoFrameRate(fps); 499 break; 500 } 501 502 case kWhatAudioOffloadTearDown: 503 { 504 onAudioOffloadTearDown(kDueToError); 505 break; 506 } 507 508 case kWhatAudioOffloadPauseTimeout: 509 { 510 int32_t generation; 511 CHECK(msg->findInt32("generation", &generation)); 512 if (generation != mAudioOffloadPauseTimeoutGeneration) { 513 break; 514 } 515 ALOGV("Audio Offload tear down due to pause timeout."); 516 onAudioOffloadTearDown(kDueToTimeout); 517 break; 518 } 519 520 default: 521 TRESPASS(); 522 break; 523 } 524} 525 526void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) { 527 if (mDrainAudioQueuePending || mSyncQueues || mPaused 528 || offloadingAudio()) { 529 return; 530 } 531 532 if (mAudioQueue.empty()) { 533 return; 534 } 535 536 mDrainAudioQueuePending = true; 537 sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id()); 538 msg->setInt32("generation", mAudioQueueGeneration); 539 msg->post(delayUs); 540} 541 542void NuPlayer::Renderer::prepareForMediaRenderingStart() { 543 mAudioRenderingStartGeneration = mAudioQueueGeneration; 544 mVideoRenderingStartGeneration = mVideoQueueGeneration; 545} 546 547void NuPlayer::Renderer::notifyIfMediaRenderingStarted() { 548 if (mVideoRenderingStartGeneration == mVideoQueueGeneration && 549 mAudioRenderingStartGeneration == mAudioQueueGeneration) { 550 mVideoRenderingStartGeneration = -1; 551 mAudioRenderingStartGeneration = -1; 552 553 sp<AMessage> notify = mNotify->dup(); 554 notify->setInt32("what", kWhatMediaRenderingStart); 555 notify->post(); 556 } 557} 558 559// static 560size_t NuPlayer::Renderer::AudioSinkCallback( 561 MediaPlayerBase::AudioSink * /* audioSink */, 562 void *buffer, 563 size_t size, 564 void *cookie, 565 MediaPlayerBase::AudioSink::cb_event_t event) { 566 NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie; 567 568 switch (event) { 569 case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER: 570 { 571 return me->fillAudioBuffer(buffer, size); 572 break; 573 } 574 575 case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END: 576 { 577 me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM); 578 break; 579 } 580 581 case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN: 582 { 583 me->notifyAudioOffloadTearDown(); 584 break; 585 } 586 } 587 588 return 0; 589} 590 591size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) { 592 Mutex::Autolock autoLock(mLock); 593 594 if (!offloadingAudio() || mPaused) { 595 return 0; 596 } 597 598 bool hasEOS = false; 599 600 size_t sizeCopied = 0; 601 bool firstEntry = true; 602 while (sizeCopied < size && !mAudioQueue.empty()) { 603 QueueEntry *entry = &*mAudioQueue.begin(); 604 605 if (entry->mBuffer == NULL) { // EOS 606 hasEOS = true; 607 mAudioQueue.erase(mAudioQueue.begin()); 608 entry = NULL; 609 break; 610 } 611 612 if (firstEntry && entry->mOffset == 0) { 613 firstEntry = false; 614 int64_t mediaTimeUs; 615 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 616 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 617 setAudioFirstAnchorTimeIfNeeded(mediaTimeUs); 618 } 619 620 size_t copy = entry->mBuffer->size() - entry->mOffset; 621 size_t sizeRemaining = size - sizeCopied; 622 if (copy > sizeRemaining) { 623 copy = sizeRemaining; 624 } 625 626 memcpy((char *)buffer + sizeCopied, 627 entry->mBuffer->data() + entry->mOffset, 628 copy); 629 630 entry->mOffset += copy; 631 if (entry->mOffset == entry->mBuffer->size()) { 632 entry->mNotifyConsumed->post(); 633 mAudioQueue.erase(mAudioQueue.begin()); 634 entry = NULL; 635 } 636 sizeCopied += copy; 637 notifyIfMediaRenderingStarted(); 638 } 639 640 if (mAudioFirstAnchorTimeMediaUs >= 0) { 641 int64_t nowUs = ALooper::GetNowUs(); 642 setAnchorTime(mAudioFirstAnchorTimeMediaUs, nowUs - getPlayedOutAudioDurationUs(nowUs)); 643 } 644 645 // we don't know how much data we are queueing for offloaded tracks 646 mAnchorMaxMediaUs = -1; 647 648 if (hasEOS) { 649 (new AMessage(kWhatStopAudioSink, id()))->post(); 650 } 651 652 return sizeCopied; 653} 654 655bool NuPlayer::Renderer::onDrainAudioQueue() { 656 uint32_t numFramesPlayed; 657 if (mAudioSink->getPosition(&numFramesPlayed) != OK) { 658 return false; 659 } 660 661 ssize_t numFramesAvailableToWrite = 662 mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed); 663 664#if 0 665 if (numFramesAvailableToWrite == mAudioSink->frameCount()) { 666 ALOGI("audio sink underrun"); 667 } else { 668 ALOGV("audio queue has %d frames left to play", 669 mAudioSink->frameCount() - numFramesAvailableToWrite); 670 } 671#endif 672 673 size_t numBytesAvailableToWrite = 674 numFramesAvailableToWrite * mAudioSink->frameSize(); 675 676 while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) { 677 QueueEntry *entry = &*mAudioQueue.begin(); 678 679 mLastAudioBufferDrained = entry->mBufferOrdinal; 680 681 if (entry->mBuffer == NULL) { 682 // EOS 683 int64_t postEOSDelayUs = 0; 684 if (mAudioSink->needsTrailingPadding()) { 685 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs()); 686 } 687 notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs); 688 689 mAudioQueue.erase(mAudioQueue.begin()); 690 entry = NULL; 691 if (mAudioSink->needsTrailingPadding()) { 692 // If we're not in gapless playback (i.e. through setNextPlayer), we 693 // need to stop the track here, because that will play out the last 694 // little bit at the end of the file. Otherwise short files won't play. 695 mAudioSink->stop(); 696 mNumFramesWritten = 0; 697 } 698 return false; 699 } 700 701 if (entry->mOffset == 0) { 702 int64_t mediaTimeUs; 703 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 704 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 705 onNewAudioMediaTime(mediaTimeUs); 706 } 707 708 size_t copy = entry->mBuffer->size() - entry->mOffset; 709 if (copy > numBytesAvailableToWrite) { 710 copy = numBytesAvailableToWrite; 711 } 712 713 ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy); 714 if (written < 0) { 715 // An error in AudioSink write. Perhaps the AudioSink was not properly opened. 716 ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy); 717 break; 718 } 719 720 entry->mOffset += written; 721 if (entry->mOffset == entry->mBuffer->size()) { 722 entry->mNotifyConsumed->post(); 723 mAudioQueue.erase(mAudioQueue.begin()); 724 725 entry = NULL; 726 } 727 728 numBytesAvailableToWrite -= written; 729 size_t copiedFrames = written / mAudioSink->frameSize(); 730 mNumFramesWritten += copiedFrames; 731 732 notifyIfMediaRenderingStarted(); 733 734 if (written != (ssize_t)copy) { 735 // A short count was received from AudioSink::write() 736 // 737 // AudioSink write should block until exactly the number of bytes are delivered. 738 // But it may return with a short count (without an error) when: 739 // 740 // 1) Size to be copied is not a multiple of the frame size. We consider this fatal. 741 // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded. 742 743 // (Case 1) 744 // Must be a multiple of the frame size. If it is not a multiple of a frame size, it 745 // needs to fail, as we should not carry over fractional frames between calls. 746 CHECK_EQ(copy % mAudioSink->frameSize(), 0); 747 748 // (Case 2) 749 // Return early to the caller. 750 // Beware of calling immediately again as this may busy-loop if you are not careful. 751 ALOGW("AudioSink write short frame count %zd < %zu", written, copy); 752 break; 753 } 754 } 755 mAnchorMaxMediaUs = 756 mAnchorTimeMediaUs + 757 (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL) 758 * 1000LL * mAudioSink->msecsPerFrame()); 759 760 return !mAudioQueue.empty(); 761} 762 763int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) { 764 int64_t writtenAudioDurationUs = 765 mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame(); 766 return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs); 767} 768 769int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) { 770 int64_t currentPositionUs; 771 if (mPaused || getCurrentPositionOnLooper( 772 ¤tPositionUs, nowUs, true /* allowPastQueuedVideo */) != OK) { 773 // If failed to get current position, e.g. due to audio clock is not ready, then just 774 // play out video immediately without delay. 775 return nowUs; 776 } 777 return (mediaTimeUs - currentPositionUs) + nowUs; 778} 779 780void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) { 781 // TRICKY: vorbis decoder generates multiple frames with the same 782 // timestamp, so only update on the first frame with a given timestamp 783 if (mediaTimeUs == mAnchorTimeMediaUs) { 784 return; 785 } 786 setAudioFirstAnchorTimeIfNeeded(mediaTimeUs); 787 int64_t nowUs = ALooper::GetNowUs(); 788 setAnchorTime( 789 mediaTimeUs, nowUs + getPendingAudioPlayoutDurationUs(nowUs), mNumFramesWritten); 790} 791 792void NuPlayer::Renderer::postDrainVideoQueue_l() { 793 if (mDrainVideoQueuePending 794 || mSyncQueues 795 || (mPaused && mVideoSampleReceived)) { 796 return; 797 } 798 799 if (mVideoQueue.empty()) { 800 return; 801 } 802 803 QueueEntry &entry = *mVideoQueue.begin(); 804 805 sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id()); 806 msg->setInt32("generation", mVideoQueueGeneration); 807 808 if (entry.mBuffer == NULL) { 809 // EOS doesn't carry a timestamp. 810 msg->post(); 811 mDrainVideoQueuePending = true; 812 return; 813 } 814 815 int64_t delayUs; 816 int64_t nowUs = ALooper::GetNowUs(); 817 int64_t realTimeUs; 818 if (mFlags & FLAG_REAL_TIME) { 819 int64_t mediaTimeUs; 820 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 821 realTimeUs = mediaTimeUs; 822 } else { 823 int64_t mediaTimeUs; 824 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 825 826 if (mAnchorTimeMediaUs < 0) { 827 setAnchorTime(mediaTimeUs, nowUs); 828 mPausePositionMediaTimeUs = mediaTimeUs; 829 mAnchorMaxMediaUs = mediaTimeUs; 830 realTimeUs = nowUs; 831 } else { 832 realTimeUs = getRealTimeUs(mediaTimeUs, nowUs); 833 } 834 if (!mHasAudio) { 835 mAnchorMaxMediaUs = mediaTimeUs + 100000; // smooth out videos >= 10fps 836 } 837 838 // Heuristics to handle situation when media time changed without a 839 // discontinuity. If we have not drained an audio buffer that was 840 // received after this buffer, repost in 10 msec. Otherwise repost 841 // in 500 msec. 842 delayUs = realTimeUs - nowUs; 843 if (delayUs > 500000) { 844 int64_t postDelayUs = 500000; 845 if (mHasAudio && (mLastAudioBufferDrained - entry.mBufferOrdinal) <= 0) { 846 postDelayUs = 10000; 847 } 848 msg->setWhat(kWhatPostDrainVideoQueue); 849 msg->post(postDelayUs); 850 mVideoScheduler->restart(); 851 ALOGI("possible video time jump of %dms, retrying in %dms", 852 (int)(delayUs / 1000), (int)(postDelayUs / 1000)); 853 mDrainVideoQueuePending = true; 854 return; 855 } 856 } 857 858 realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000; 859 int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000); 860 861 delayUs = realTimeUs - nowUs; 862 863 ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs); 864 // post 2 display refreshes before rendering is due 865 msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0); 866 867 mDrainVideoQueuePending = true; 868} 869 870void NuPlayer::Renderer::onDrainVideoQueue() { 871 if (mVideoQueue.empty()) { 872 return; 873 } 874 875 QueueEntry *entry = &*mVideoQueue.begin(); 876 877 if (entry->mBuffer == NULL) { 878 // EOS 879 880 notifyEOS(false /* audio */, entry->mFinalResult); 881 882 mVideoQueue.erase(mVideoQueue.begin()); 883 entry = NULL; 884 885 setVideoLateByUs(0); 886 return; 887 } 888 889 int64_t nowUs = -1; 890 int64_t realTimeUs; 891 if (mFlags & FLAG_REAL_TIME) { 892 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs)); 893 } else { 894 int64_t mediaTimeUs; 895 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 896 897 nowUs = ALooper::GetNowUs(); 898 realTimeUs = getRealTimeUs(mediaTimeUs, nowUs); 899 } 900 901 bool tooLate = false; 902 903 if (!mPaused) { 904 if (nowUs == -1) { 905 nowUs = ALooper::GetNowUs(); 906 } 907 setVideoLateByUs(nowUs - realTimeUs); 908 tooLate = (mVideoLateByUs > 40000); 909 910 if (tooLate) { 911 ALOGV("video late by %lld us (%.2f secs)", 912 mVideoLateByUs, mVideoLateByUs / 1E6); 913 } else { 914 ALOGV("rendering video at media time %.2f secs", 915 (mFlags & FLAG_REAL_TIME ? realTimeUs : 916 (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6); 917 } 918 } else { 919 setVideoLateByUs(0); 920 if (!mVideoSampleReceived && !mHasAudio) { 921 // This will ensure that the first frame after a flush won't be used as anchor 922 // when renderer is in paused state, because resume can happen any time after seek. 923 setAnchorTime(-1, -1); 924 } 925 } 926 927 entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll); 928 entry->mNotifyConsumed->setInt32("render", !tooLate); 929 entry->mNotifyConsumed->post(); 930 mVideoQueue.erase(mVideoQueue.begin()); 931 entry = NULL; 932 933 mVideoSampleReceived = true; 934 935 if (!mPaused) { 936 if (!mVideoRenderingStarted) { 937 mVideoRenderingStarted = true; 938 notifyVideoRenderingStart(); 939 } 940 notifyIfMediaRenderingStarted(); 941 } 942} 943 944void NuPlayer::Renderer::notifyVideoRenderingStart() { 945 sp<AMessage> notify = mNotify->dup(); 946 notify->setInt32("what", kWhatVideoRenderingStart); 947 notify->post(); 948} 949 950void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) { 951 sp<AMessage> notify = mNotify->dup(); 952 notify->setInt32("what", kWhatEOS); 953 notify->setInt32("audio", static_cast<int32_t>(audio)); 954 notify->setInt32("finalResult", finalResult); 955 notify->post(delayUs); 956} 957 958void NuPlayer::Renderer::notifyAudioOffloadTearDown() { 959 (new AMessage(kWhatAudioOffloadTearDown, id()))->post(); 960} 961 962void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) { 963 int32_t audio; 964 CHECK(msg->findInt32("audio", &audio)); 965 966 setHasMedia(audio); 967 968 if (mHasVideo) { 969 if (mVideoScheduler == NULL) { 970 mVideoScheduler = new VideoFrameScheduler(); 971 mVideoScheduler->init(); 972 } 973 } 974 975 if (dropBufferWhileFlushing(audio, msg)) { 976 return; 977 } 978 979 sp<ABuffer> buffer; 980 CHECK(msg->findBuffer("buffer", &buffer)); 981 982 sp<AMessage> notifyConsumed; 983 CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed)); 984 985 QueueEntry entry; 986 entry.mBuffer = buffer; 987 entry.mNotifyConsumed = notifyConsumed; 988 entry.mOffset = 0; 989 entry.mFinalResult = OK; 990 entry.mBufferOrdinal = ++mTotalBuffersQueued; 991 992 Mutex::Autolock autoLock(mLock); 993 if (audio) { 994 mAudioQueue.push_back(entry); 995 postDrainAudioQueue_l(); 996 } else { 997 mVideoQueue.push_back(entry); 998 postDrainVideoQueue_l(); 999 } 1000 1001 if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) { 1002 return; 1003 } 1004 1005 sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer; 1006 sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer; 1007 1008 if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) { 1009 // EOS signalled on either queue. 1010 syncQueuesDone_l(); 1011 return; 1012 } 1013 1014 int64_t firstAudioTimeUs; 1015 int64_t firstVideoTimeUs; 1016 CHECK(firstAudioBuffer->meta() 1017 ->findInt64("timeUs", &firstAudioTimeUs)); 1018 CHECK(firstVideoBuffer->meta() 1019 ->findInt64("timeUs", &firstVideoTimeUs)); 1020 1021 int64_t diff = firstVideoTimeUs - firstAudioTimeUs; 1022 1023 ALOGV("queueDiff = %.2f secs", diff / 1E6); 1024 1025 if (diff > 100000ll) { 1026 // Audio data starts More than 0.1 secs before video. 1027 // Drop some audio. 1028 1029 (*mAudioQueue.begin()).mNotifyConsumed->post(); 1030 mAudioQueue.erase(mAudioQueue.begin()); 1031 return; 1032 } 1033 1034 syncQueuesDone_l(); 1035} 1036 1037void NuPlayer::Renderer::syncQueuesDone_l() { 1038 if (!mSyncQueues) { 1039 return; 1040 } 1041 1042 mSyncQueues = false; 1043 1044 if (!mAudioQueue.empty()) { 1045 postDrainAudioQueue_l(); 1046 } 1047 1048 if (!mVideoQueue.empty()) { 1049 postDrainVideoQueue_l(); 1050 } 1051} 1052 1053void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) { 1054 int32_t audio; 1055 CHECK(msg->findInt32("audio", &audio)); 1056 1057 if (dropBufferWhileFlushing(audio, msg)) { 1058 return; 1059 } 1060 1061 int32_t finalResult; 1062 CHECK(msg->findInt32("finalResult", &finalResult)); 1063 1064 QueueEntry entry; 1065 entry.mOffset = 0; 1066 entry.mFinalResult = finalResult; 1067 1068 Mutex::Autolock autoLock(mLock); 1069 if (audio) { 1070 if (mAudioQueue.empty() && mSyncQueues) { 1071 syncQueuesDone_l(); 1072 } 1073 mAudioQueue.push_back(entry); 1074 postDrainAudioQueue_l(); 1075 } else { 1076 if (mVideoQueue.empty() && mSyncQueues) { 1077 syncQueuesDone_l(); 1078 } 1079 mVideoQueue.push_back(entry); 1080 postDrainVideoQueue_l(); 1081 } 1082} 1083 1084void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { 1085 int32_t audio, notifyComplete; 1086 CHECK(msg->findInt32("audio", &audio)); 1087 1088 { 1089 Mutex::Autolock autoLock(mFlushLock); 1090 if (audio) { 1091 mFlushingAudio = false; 1092 notifyComplete = mNotifyCompleteAudio; 1093 mNotifyCompleteAudio = false; 1094 } else { 1095 mFlushingVideo = false; 1096 notifyComplete = mNotifyCompleteVideo; 1097 mNotifyCompleteVideo = false; 1098 } 1099 } 1100 1101 // If we're currently syncing the queues, i.e. dropping audio while 1102 // aligning the first audio/video buffer times and only one of the 1103 // two queues has data, we may starve that queue by not requesting 1104 // more buffers from the decoder. If the other source then encounters 1105 // a discontinuity that leads to flushing, we'll never find the 1106 // corresponding discontinuity on the other queue. 1107 // Therefore we'll stop syncing the queues if at least one of them 1108 // is flushed. 1109 { 1110 Mutex::Autolock autoLock(mLock); 1111 syncQueuesDone_l(); 1112 setPauseStartedTimeRealUs(-1); 1113 setAnchorTime(-1, -1); 1114 } 1115 1116 ALOGV("flushing %s", audio ? "audio" : "video"); 1117 if (audio) { 1118 { 1119 Mutex::Autolock autoLock(mLock); 1120 flushQueue(&mAudioQueue); 1121 1122 ++mAudioQueueGeneration; 1123 prepareForMediaRenderingStart(); 1124 1125 if (offloadingAudio()) { 1126 setAudioFirstAnchorTime(-1); 1127 } 1128 } 1129 1130 mDrainAudioQueuePending = false; 1131 1132 if (offloadingAudio()) { 1133 mAudioSink->pause(); 1134 mAudioSink->flush(); 1135 mAudioSink->start(); 1136 } 1137 } else { 1138 flushQueue(&mVideoQueue); 1139 1140 mDrainVideoQueuePending = false; 1141 ++mVideoQueueGeneration; 1142 1143 if (mVideoScheduler != NULL) { 1144 mVideoScheduler->restart(); 1145 } 1146 1147 prepareForMediaRenderingStart(); 1148 } 1149 1150 mVideoSampleReceived = false; 1151 1152 if (notifyComplete) { 1153 notifyFlushComplete(audio); 1154 } 1155} 1156 1157void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) { 1158 while (!queue->empty()) { 1159 QueueEntry *entry = &*queue->begin(); 1160 1161 if (entry->mBuffer != NULL) { 1162 entry->mNotifyConsumed->post(); 1163 } 1164 1165 queue->erase(queue->begin()); 1166 entry = NULL; 1167 } 1168} 1169 1170void NuPlayer::Renderer::notifyFlushComplete(bool audio) { 1171 sp<AMessage> notify = mNotify->dup(); 1172 notify->setInt32("what", kWhatFlushComplete); 1173 notify->setInt32("audio", static_cast<int32_t>(audio)); 1174 notify->post(); 1175} 1176 1177bool NuPlayer::Renderer::dropBufferWhileFlushing( 1178 bool audio, const sp<AMessage> &msg) { 1179 bool flushing = false; 1180 1181 { 1182 Mutex::Autolock autoLock(mFlushLock); 1183 if (audio) { 1184 flushing = mFlushingAudio; 1185 } else { 1186 flushing = mFlushingVideo; 1187 } 1188 } 1189 1190 if (!flushing) { 1191 return false; 1192 } 1193 1194 sp<AMessage> notifyConsumed; 1195 if (msg->findMessage("notifyConsumed", ¬ifyConsumed)) { 1196 notifyConsumed->post(); 1197 } 1198 1199 return true; 1200} 1201 1202void NuPlayer::Renderer::onAudioSinkChanged() { 1203 if (offloadingAudio()) { 1204 return; 1205 } 1206 CHECK(!mDrainAudioQueuePending); 1207 mNumFramesWritten = 0; 1208 mAnchorNumFramesWritten = -1; 1209 uint32_t written; 1210 if (mAudioSink->getFramesWritten(&written) == OK) { 1211 mNumFramesWritten = written; 1212 } 1213} 1214 1215void NuPlayer::Renderer::onDisableOffloadAudio() { 1216 Mutex::Autolock autoLock(mLock); 1217 mFlags &= ~FLAG_OFFLOAD_AUDIO; 1218 ++mAudioQueueGeneration; 1219} 1220 1221void NuPlayer::Renderer::onEnableOffloadAudio() { 1222 Mutex::Autolock autoLock(mLock); 1223 mFlags |= FLAG_OFFLOAD_AUDIO; 1224 ++mAudioQueueGeneration; 1225} 1226 1227void NuPlayer::Renderer::onPause() { 1228 if (mPaused) { 1229 ALOGW("Renderer::onPause() called while already paused!"); 1230 return; 1231 } 1232 int64_t currentPositionUs; 1233 int64_t pausePositionMediaTimeUs; 1234 if (getCurrentPositionFromAnchor( 1235 ¤tPositionUs, ALooper::GetNowUs()) == OK) { 1236 pausePositionMediaTimeUs = currentPositionUs; 1237 } else { 1238 // Set paused position to -1 (unavailabe) if we don't have anchor time 1239 // This could happen if client does a seekTo() immediately followed by 1240 // pause(). Renderer will be flushed with anchor time cleared. We don't 1241 // want to leave stale value in mPausePositionMediaTimeUs. 1242 pausePositionMediaTimeUs = -1; 1243 } 1244 { 1245 Mutex::Autolock autoLock(mLock); 1246 mPausePositionMediaTimeUs = pausePositionMediaTimeUs; 1247 ++mAudioQueueGeneration; 1248 ++mVideoQueueGeneration; 1249 prepareForMediaRenderingStart(); 1250 mPaused = true; 1251 setPauseStartedTimeRealUs(ALooper::GetNowUs()); 1252 } 1253 1254 mDrainAudioQueuePending = false; 1255 mDrainVideoQueuePending = false; 1256 1257 if (mHasAudio) { 1258 mAudioSink->pause(); 1259 startAudioOffloadPauseTimeout(); 1260 } 1261 1262 ALOGV("now paused audio queue has %d entries, video has %d entries", 1263 mAudioQueue.size(), mVideoQueue.size()); 1264} 1265 1266void NuPlayer::Renderer::onResume() { 1267 if (!mPaused) { 1268 return; 1269 } 1270 1271 if (mHasAudio) { 1272 cancelAudioOffloadPauseTimeout(); 1273 mAudioSink->start(); 1274 } 1275 1276 Mutex::Autolock autoLock(mLock); 1277 mPaused = false; 1278 if (mPauseStartedTimeRealUs != -1) { 1279 int64_t newAnchorRealUs = 1280 mAnchorTimeRealUs + ALooper::GetNowUs() - mPauseStartedTimeRealUs; 1281 setAnchorTime( 1282 mAnchorTimeMediaUs, newAnchorRealUs, mAnchorNumFramesWritten, true /* resume */); 1283 } 1284 1285 if (!mAudioQueue.empty()) { 1286 postDrainAudioQueue_l(); 1287 } 1288 1289 if (!mVideoQueue.empty()) { 1290 postDrainVideoQueue_l(); 1291 } 1292} 1293 1294void NuPlayer::Renderer::onSetVideoFrameRate(float fps) { 1295 if (mVideoScheduler == NULL) { 1296 mVideoScheduler = new VideoFrameScheduler(); 1297 } 1298 mVideoScheduler->init(fps); 1299} 1300 1301// TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs() 1302// as it acquires locks and may query the audio driver. 1303// 1304// Some calls could conceivably retrieve extrapolated data instead of 1305// accessing getTimestamp() or getPosition() every time a data buffer with 1306// a media time is received. 1307// 1308int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) { 1309 uint32_t numFramesPlayed; 1310 int64_t numFramesPlayedAt; 1311 AudioTimestamp ts; 1312 static const int64_t kStaleTimestamp100ms = 100000; 1313 1314 status_t res = mAudioSink->getTimestamp(ts); 1315 if (res == OK) { // case 1: mixing audio tracks and offloaded tracks. 1316 numFramesPlayed = ts.mPosition; 1317 numFramesPlayedAt = 1318 ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000; 1319 const int64_t timestampAge = nowUs - numFramesPlayedAt; 1320 if (timestampAge > kStaleTimestamp100ms) { 1321 // This is an audio FIXME. 1322 // getTimestamp returns a timestamp which may come from audio mixing threads. 1323 // After pausing, the MixerThread may go idle, thus the mTime estimate may 1324 // become stale. Assuming that the MixerThread runs 20ms, with FastMixer at 5ms, 1325 // the max latency should be about 25ms with an average around 12ms (to be verified). 1326 // For safety we use 100ms. 1327 ALOGV("getTimestamp: returned stale timestamp nowUs(%lld) numFramesPlayedAt(%lld)", 1328 (long long)nowUs, (long long)numFramesPlayedAt); 1329 numFramesPlayedAt = nowUs - kStaleTimestamp100ms; 1330 } 1331 //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAt); 1332 } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track 1333 numFramesPlayed = 0; 1334 numFramesPlayedAt = nowUs; 1335 //ALOGD("getTimestamp: WOULD_BLOCK %d %lld", 1336 // numFramesPlayed, (long long)numFramesPlayedAt); 1337 } else { // case 3: transitory at new track or audio fast tracks. 1338 res = mAudioSink->getPosition(&numFramesPlayed); 1339 CHECK_EQ(res, (status_t)OK); 1340 numFramesPlayedAt = nowUs; 1341 numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */ 1342 //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt); 1343 } 1344 1345 // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours. 1346 //CHECK_EQ(numFramesPlayed & (1 << 31), 0); // can't be negative until 12.4 hrs, test 1347 int64_t durationUs = (int64_t)((int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame()) 1348 + nowUs - numFramesPlayedAt; 1349 if (durationUs < 0) { 1350 // Occurs when numFramesPlayed position is very small and the following: 1351 // (1) In case 1, the time nowUs is computed before getTimestamp() is called and 1352 // numFramesPlayedAt is greater than nowUs by time more than numFramesPlayed. 1353 // (2) In case 3, using getPosition and adding mAudioSink->latency() to 1354 // numFramesPlayedAt, by a time amount greater than numFramesPlayed. 1355 // 1356 // Both of these are transitory conditions. 1357 ALOGV("getPlayedOutAudioDurationUs: negative duration %lld set to zero", (long long)durationUs); 1358 durationUs = 0; 1359 } 1360 ALOGV("getPlayedOutAudioDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)", 1361 (long long)durationUs, (long long)nowUs, numFramesPlayed, (long long)numFramesPlayedAt); 1362 return durationUs; 1363} 1364 1365void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reason) { 1366 if (mAudioOffloadTornDown) { 1367 return; 1368 } 1369 mAudioOffloadTornDown = true; 1370 1371 int64_t currentPositionUs; 1372 if (getCurrentPositionOnLooper(¤tPositionUs) != OK) { 1373 currentPositionUs = 0; 1374 } 1375 1376 mAudioSink->stop(); 1377 mAudioSink->flush(); 1378 1379 sp<AMessage> notify = mNotify->dup(); 1380 notify->setInt32("what", kWhatAudioOffloadTearDown); 1381 notify->setInt64("positionUs", currentPositionUs); 1382 notify->setInt32("reason", reason); 1383 notify->post(); 1384} 1385 1386void NuPlayer::Renderer::startAudioOffloadPauseTimeout() { 1387 if (offloadingAudio()) { 1388 sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, id()); 1389 msg->setInt32("generation", mAudioOffloadPauseTimeoutGeneration); 1390 msg->post(kOffloadPauseMaxUs); 1391 } 1392} 1393 1394void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() { 1395 if (offloadingAudio()) { 1396 ++mAudioOffloadPauseTimeoutGeneration; 1397 } 1398} 1399 1400status_t NuPlayer::Renderer::onOpenAudioSink( 1401 const sp<AMessage> &format, 1402 bool offloadOnly, 1403 bool hasVideo, 1404 uint32_t flags) { 1405 ALOGV("openAudioSink: offloadOnly(%d) offloadingAudio(%d)", 1406 offloadOnly, offloadingAudio()); 1407 bool audioSinkChanged = false; 1408 1409 int32_t numChannels; 1410 CHECK(format->findInt32("channel-count", &numChannels)); 1411 1412 int32_t channelMask; 1413 if (!format->findInt32("channel-mask", &channelMask)) { 1414 // signal to the AudioSink to derive the mask from count. 1415 channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER; 1416 } 1417 1418 int32_t sampleRate; 1419 CHECK(format->findInt32("sample-rate", &sampleRate)); 1420 1421 if (offloadingAudio()) { 1422 audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT; 1423 AString mime; 1424 CHECK(format->findString("mime", &mime)); 1425 status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str()); 1426 1427 if (err != OK) { 1428 ALOGE("Couldn't map mime \"%s\" to a valid " 1429 "audio_format", mime.c_str()); 1430 onDisableOffloadAudio(); 1431 } else { 1432 ALOGV("Mime \"%s\" mapped to audio_format 0x%x", 1433 mime.c_str(), audioFormat); 1434 1435 int avgBitRate = -1; 1436 format->findInt32("bit-rate", &avgBitRate); 1437 1438 int32_t aacProfile = -1; 1439 if (audioFormat == AUDIO_FORMAT_AAC 1440 && format->findInt32("aac-profile", &aacProfile)) { 1441 // Redefine AAC format as per aac profile 1442 mapAACProfileToAudioFormat( 1443 audioFormat, 1444 aacProfile); 1445 } 1446 1447 audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER; 1448 offloadInfo.duration_us = -1; 1449 format->findInt64( 1450 "durationUs", &offloadInfo.duration_us); 1451 offloadInfo.sample_rate = sampleRate; 1452 offloadInfo.channel_mask = channelMask; 1453 offloadInfo.format = audioFormat; 1454 offloadInfo.stream_type = AUDIO_STREAM_MUSIC; 1455 offloadInfo.bit_rate = avgBitRate; 1456 offloadInfo.has_video = hasVideo; 1457 offloadInfo.is_streaming = true; 1458 1459 if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) { 1460 ALOGV("openAudioSink: no change in offload mode"); 1461 // no change from previous configuration, everything ok. 1462 return OK; 1463 } 1464 mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER; 1465 1466 ALOGV("openAudioSink: try to open AudioSink in offload mode"); 1467 uint32_t offloadFlags = flags; 1468 offloadFlags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD; 1469 offloadFlags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER; 1470 audioSinkChanged = true; 1471 mAudioSink->close(); 1472 err = mAudioSink->open( 1473 sampleRate, 1474 numChannels, 1475 (audio_channel_mask_t)channelMask, 1476 audioFormat, 1477 8 /* bufferCount */, 1478 &NuPlayer::Renderer::AudioSinkCallback, 1479 this, 1480 (audio_output_flags_t)offloadFlags, 1481 &offloadInfo); 1482 1483 if (err == OK) { 1484 // If the playback is offloaded to h/w, we pass 1485 // the HAL some metadata information. 1486 // We don't want to do this for PCM because it 1487 // will be going through the AudioFlinger mixer 1488 // before reaching the hardware. 1489 // TODO 1490 mCurrentOffloadInfo = offloadInfo; 1491 err = mAudioSink->start(); 1492 ALOGV_IF(err == OK, "openAudioSink: offload succeeded"); 1493 } 1494 if (err != OK) { 1495 // Clean up, fall back to non offload mode. 1496 mAudioSink->close(); 1497 onDisableOffloadAudio(); 1498 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER; 1499 ALOGV("openAudioSink: offload failed"); 1500 } 1501 } 1502 } 1503 if (!offloadOnly && !offloadingAudio()) { 1504 ALOGV("openAudioSink: open AudioSink in NON-offload mode"); 1505 uint32_t pcmFlags = flags; 1506 pcmFlags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD; 1507 1508 const PcmInfo info = { 1509 (audio_channel_mask_t)channelMask, 1510 (audio_output_flags_t)pcmFlags, 1511 AUDIO_FORMAT_PCM_16_BIT, // TODO: change to audioFormat 1512 numChannels, 1513 sampleRate 1514 }; 1515 if (memcmp(&mCurrentPcmInfo, &info, sizeof(info)) == 0) { 1516 ALOGV("openAudioSink: no change in pcm mode"); 1517 // no change from previous configuration, everything ok. 1518 return OK; 1519 } 1520 1521 audioSinkChanged = true; 1522 mAudioSink->close(); 1523 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER; 1524 status_t err = mAudioSink->open( 1525 sampleRate, 1526 numChannels, 1527 (audio_channel_mask_t)channelMask, 1528 AUDIO_FORMAT_PCM_16_BIT, 1529 8 /* bufferCount */, 1530 NULL, 1531 NULL, 1532 (audio_output_flags_t)pcmFlags); 1533 if (err != OK) { 1534 ALOGW("openAudioSink: non offloaded open failed status: %d", err); 1535 mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER; 1536 return err; 1537 } 1538 mCurrentPcmInfo = info; 1539 mAudioSink->start(); 1540 } 1541 if (audioSinkChanged) { 1542 onAudioSinkChanged(); 1543 } 1544 if (offloadingAudio()) { 1545 mAudioOffloadTornDown = false; 1546 } 1547 return OK; 1548} 1549 1550void NuPlayer::Renderer::onCloseAudioSink() { 1551 mAudioSink->close(); 1552 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER; 1553 mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER; 1554} 1555 1556} // namespace android 1557 1558