NuPlayerRenderer.cpp revision 7137ec7e005a5a6e3c0edb91cfacf16a31f4bf6a
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "NuPlayerRenderer" 19#include <utils/Log.h> 20 21#include "NuPlayerRenderer.h" 22 23#include <media/stagefright/foundation/ABuffer.h> 24#include <media/stagefright/foundation/ADebug.h> 25#include <media/stagefright/foundation/AMessage.h> 26#include <media/stagefright/foundation/AUtils.h> 27#include <media/stagefright/MediaErrors.h> 28#include <media/stagefright/MetaData.h> 29#include <media/stagefright/Utils.h> 30 31#include <VideoFrameScheduler.h> 32 33#include <inttypes.h> 34 35namespace android { 36 37// Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink 38// is closed to allow the audio DSP to power down. 39static const int64_t kOffloadPauseMaxUs = 10000000ll; 40 41// static 42const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll; 43 44NuPlayer::Renderer::Renderer( 45 const sp<MediaPlayerBase::AudioSink> &sink, 46 const sp<AMessage> ¬ify, 47 uint32_t flags) 48 : mAudioSink(sink), 49 mNotify(notify), 50 mFlags(flags), 51 mNumFramesWritten(0), 52 mDrainAudioQueuePending(false), 53 mDrainVideoQueuePending(false), 54 mAudioQueueGeneration(0), 55 mVideoQueueGeneration(0), 56 mAudioFirstAnchorTimeMediaUs(-1), 57 mAnchorTimeMediaUs(-1), 58 mAnchorTimeRealUs(-1), 59 mAnchorNumFramesWritten(-1), 60 mAnchorMaxMediaUs(-1), 61 mVideoLateByUs(0ll), 62 mHasAudio(false), 63 mHasVideo(false), 64 mPauseStartedTimeRealUs(-1), 65 mFlushingAudio(false), 66 mFlushingVideo(false), 67 mNotifyCompleteAudio(false), 68 mNotifyCompleteVideo(false), 69 mSyncQueues(false), 70 mPaused(false), 71 mVideoSampleReceived(false), 72 mVideoRenderingStarted(false), 73 mVideoRenderingStartGeneration(0), 74 mAudioRenderingStartGeneration(0), 75 mAudioOffloadPauseTimeoutGeneration(0), 76 mAudioOffloadTornDown(false), 77 mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER), 78 mTotalBuffersQueued(0), 79 mLastAudioBufferDrained(0) { 80} 81 82NuPlayer::Renderer::~Renderer() { 83 if (offloadingAudio()) { 84 mAudioSink->stop(); 85 mAudioSink->flush(); 86 mAudioSink->close(); 87 } 88} 89 90void NuPlayer::Renderer::queueBuffer( 91 bool audio, 92 const sp<ABuffer> &buffer, 93 const sp<AMessage> ¬ifyConsumed) { 94 sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id()); 95 msg->setInt32("audio", static_cast<int32_t>(audio)); 96 msg->setBuffer("buffer", buffer); 97 msg->setMessage("notifyConsumed", notifyConsumed); 98 msg->post(); 99} 100 101void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) { 102 CHECK_NE(finalResult, (status_t)OK); 103 104 sp<AMessage> msg = new AMessage(kWhatQueueEOS, id()); 105 msg->setInt32("audio", static_cast<int32_t>(audio)); 106 msg->setInt32("finalResult", finalResult); 107 msg->post(); 108} 109 110void NuPlayer::Renderer::flush(bool audio, bool notifyComplete) { 111 { 112 Mutex::Autolock autoLock(mFlushLock); 113 if (audio) { 114 mNotifyCompleteAudio |= notifyComplete; 115 if (mFlushingAudio) { 116 return; 117 } 118 mFlushingAudio = true; 119 } else { 120 mNotifyCompleteVideo |= notifyComplete; 121 if (mFlushingVideo) { 122 return; 123 } 124 mFlushingVideo = true; 125 } 126 } 127 128 sp<AMessage> msg = new AMessage(kWhatFlush, id()); 129 msg->setInt32("audio", static_cast<int32_t>(audio)); 130 msg->post(); 131} 132 133void NuPlayer::Renderer::signalTimeDiscontinuity() { 134 Mutex::Autolock autoLock(mLock); 135 // CHECK(mAudioQueue.empty()); 136 // CHECK(mVideoQueue.empty()); 137 setAudioFirstAnchorTime(-1); 138 setAnchorTime(-1, -1); 139 setVideoLateByUs(0); 140 mSyncQueues = false; 141} 142 143void NuPlayer::Renderer::signalAudioSinkChanged() { 144 (new AMessage(kWhatAudioSinkChanged, id()))->post(); 145} 146 147void NuPlayer::Renderer::signalDisableOffloadAudio() { 148 (new AMessage(kWhatDisableOffloadAudio, id()))->post(); 149} 150 151void NuPlayer::Renderer::signalEnableOffloadAudio() { 152 (new AMessage(kWhatEnableOffloadAudio, id()))->post(); 153} 154 155void NuPlayer::Renderer::pause() { 156 (new AMessage(kWhatPause, id()))->post(); 157} 158 159void NuPlayer::Renderer::resume() { 160 (new AMessage(kWhatResume, id()))->post(); 161} 162 163void NuPlayer::Renderer::setVideoFrameRate(float fps) { 164 sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, id()); 165 msg->setFloat("frame-rate", fps); 166 msg->post(); 167} 168 169status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) { 170 return getCurrentPosition(mediaUs, ALooper::GetNowUs()); 171} 172 173status_t NuPlayer::Renderer::getCurrentPosition( 174 int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) { 175 Mutex::Autolock autoLock(mTimeLock); 176 if (!mHasAudio && !mHasVideo) { 177 return NO_INIT; 178 } 179 180 if (mAnchorTimeMediaUs < 0) { 181 return NO_INIT; 182 } 183 184 int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs; 185 186 if (mPauseStartedTimeRealUs != -1) { 187 positionUs -= (nowUs - mPauseStartedTimeRealUs); 188 } 189 190 // limit position to the last queued media time (for video only stream 191 // position will be discrete as we don't know how long each frame lasts) 192 if (mAnchorMaxMediaUs >= 0 && !allowPastQueuedVideo) { 193 if (positionUs > mAnchorMaxMediaUs) { 194 positionUs = mAnchorMaxMediaUs; 195 } 196 } 197 198 if (positionUs < mAudioFirstAnchorTimeMediaUs) { 199 positionUs = mAudioFirstAnchorTimeMediaUs; 200 } 201 202 *mediaUs = (positionUs <= 0) ? 0 : positionUs; 203 return OK; 204} 205 206void NuPlayer::Renderer::setHasMedia(bool audio) { 207 Mutex::Autolock autoLock(mTimeLock); 208 if (audio) { 209 mHasAudio = true; 210 } else { 211 mHasVideo = true; 212 } 213} 214 215void NuPlayer::Renderer::setAudioFirstAnchorTime(int64_t mediaUs) { 216 Mutex::Autolock autoLock(mTimeLock); 217 mAudioFirstAnchorTimeMediaUs = mediaUs; 218} 219 220void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs) { 221 Mutex::Autolock autoLock(mTimeLock); 222 if (mAudioFirstAnchorTimeMediaUs == -1) { 223 mAudioFirstAnchorTimeMediaUs = mediaUs; 224 } 225} 226 227void NuPlayer::Renderer::setAnchorTime( 228 int64_t mediaUs, int64_t realUs, int64_t numFramesWritten, bool resume) { 229 Mutex::Autolock autoLock(mTimeLock); 230 mAnchorTimeMediaUs = mediaUs; 231 mAnchorTimeRealUs = realUs; 232 mAnchorNumFramesWritten = numFramesWritten; 233 if (resume) { 234 mPauseStartedTimeRealUs = -1; 235 } 236} 237 238void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) { 239 Mutex::Autolock autoLock(mTimeLock); 240 mVideoLateByUs = lateUs; 241} 242 243int64_t NuPlayer::Renderer::getVideoLateByUs() { 244 Mutex::Autolock autoLock(mTimeLock); 245 return mVideoLateByUs; 246} 247 248void NuPlayer::Renderer::setPauseStartedTimeRealUs(int64_t realUs) { 249 Mutex::Autolock autoLock(mTimeLock); 250 mPauseStartedTimeRealUs = realUs; 251} 252 253bool NuPlayer::Renderer::openAudioSink( 254 const sp<AMessage> &format, 255 bool offloadOnly, 256 bool hasVideo, 257 uint32_t flags) { 258 sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, id()); 259 msg->setMessage("format", format); 260 msg->setInt32("offload-only", offloadOnly); 261 msg->setInt32("has-video", hasVideo); 262 msg->setInt32("flags", flags); 263 264 sp<AMessage> response; 265 msg->postAndAwaitResponse(&response); 266 267 int32_t offload; 268 CHECK(response->findInt32("offload", &offload)); 269 return (offload != 0); 270} 271 272void NuPlayer::Renderer::closeAudioSink() { 273 sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, id()); 274 275 sp<AMessage> response; 276 msg->postAndAwaitResponse(&response); 277} 278 279void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { 280 switch (msg->what()) { 281 case kWhatOpenAudioSink: 282 { 283 sp<AMessage> format; 284 CHECK(msg->findMessage("format", &format)); 285 286 int32_t offloadOnly; 287 CHECK(msg->findInt32("offload-only", &offloadOnly)); 288 289 int32_t hasVideo; 290 CHECK(msg->findInt32("has-video", &hasVideo)); 291 292 uint32_t flags; 293 CHECK(msg->findInt32("flags", (int32_t *)&flags)); 294 295 bool offload = onOpenAudioSink(format, offloadOnly, hasVideo, flags); 296 297 sp<AMessage> response = new AMessage; 298 response->setInt32("offload", offload); 299 300 uint32_t replyID; 301 CHECK(msg->senderAwaitsResponse(&replyID)); 302 response->postReply(replyID); 303 304 break; 305 } 306 307 case kWhatCloseAudioSink: 308 { 309 uint32_t replyID; 310 CHECK(msg->senderAwaitsResponse(&replyID)); 311 312 onCloseAudioSink(); 313 314 sp<AMessage> response = new AMessage; 315 response->postReply(replyID); 316 break; 317 } 318 319 case kWhatStopAudioSink: 320 { 321 mAudioSink->stop(); 322 break; 323 } 324 325 case kWhatDrainAudioQueue: 326 { 327 int32_t generation; 328 CHECK(msg->findInt32("generation", &generation)); 329 if (generation != mAudioQueueGeneration) { 330 break; 331 } 332 333 mDrainAudioQueuePending = false; 334 335 if (onDrainAudioQueue()) { 336 uint32_t numFramesPlayed; 337 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), 338 (status_t)OK); 339 340 uint32_t numFramesPendingPlayout = 341 mNumFramesWritten - numFramesPlayed; 342 343 // This is how long the audio sink will have data to 344 // play back. 345 int64_t delayUs = 346 mAudioSink->msecsPerFrame() 347 * numFramesPendingPlayout * 1000ll; 348 349 // Let's give it more data after about half that time 350 // has elapsed. 351 // kWhatDrainAudioQueue is used for non-offloading mode, 352 // and mLock is used only for offloading mode. Therefore, 353 // no need to acquire mLock here. 354 postDrainAudioQueue_l(delayUs / 2); 355 } 356 break; 357 } 358 359 case kWhatDrainVideoQueue: 360 { 361 int32_t generation; 362 CHECK(msg->findInt32("generation", &generation)); 363 if (generation != mVideoQueueGeneration) { 364 break; 365 } 366 367 mDrainVideoQueuePending = false; 368 369 onDrainVideoQueue(); 370 371 postDrainVideoQueue(); 372 break; 373 } 374 375 case kWhatPostDrainVideoQueue: 376 { 377 int32_t generation; 378 CHECK(msg->findInt32("generation", &generation)); 379 if (generation != mVideoQueueGeneration) { 380 break; 381 } 382 383 mDrainVideoQueuePending = false; 384 postDrainVideoQueue(); 385 break; 386 } 387 388 case kWhatQueueBuffer: 389 { 390 onQueueBuffer(msg); 391 break; 392 } 393 394 case kWhatQueueEOS: 395 { 396 onQueueEOS(msg); 397 break; 398 } 399 400 case kWhatFlush: 401 { 402 onFlush(msg); 403 break; 404 } 405 406 case kWhatAudioSinkChanged: 407 { 408 onAudioSinkChanged(); 409 break; 410 } 411 412 case kWhatDisableOffloadAudio: 413 { 414 onDisableOffloadAudio(); 415 break; 416 } 417 418 case kWhatEnableOffloadAudio: 419 { 420 onEnableOffloadAudio(); 421 break; 422 } 423 424 case kWhatPause: 425 { 426 onPause(); 427 break; 428 } 429 430 case kWhatResume: 431 { 432 onResume(); 433 break; 434 } 435 436 case kWhatSetVideoFrameRate: 437 { 438 float fps; 439 CHECK(msg->findFloat("frame-rate", &fps)); 440 onSetVideoFrameRate(fps); 441 break; 442 } 443 444 case kWhatAudioOffloadTearDown: 445 { 446 onAudioOffloadTearDown(kDueToError); 447 break; 448 } 449 450 case kWhatAudioOffloadPauseTimeout: 451 { 452 int32_t generation; 453 CHECK(msg->findInt32("generation", &generation)); 454 if (generation != mAudioOffloadPauseTimeoutGeneration) { 455 break; 456 } 457 ALOGV("Audio Offload tear down due to pause timeout."); 458 onAudioOffloadTearDown(kDueToTimeout); 459 break; 460 } 461 462 default: 463 TRESPASS(); 464 break; 465 } 466} 467 468void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) { 469 if (mDrainAudioQueuePending || mSyncQueues || mPaused 470 || offloadingAudio()) { 471 return; 472 } 473 474 if (mAudioQueue.empty()) { 475 return; 476 } 477 478 mDrainAudioQueuePending = true; 479 sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id()); 480 msg->setInt32("generation", mAudioQueueGeneration); 481 msg->post(delayUs); 482} 483 484void NuPlayer::Renderer::prepareForMediaRenderingStart() { 485 mAudioRenderingStartGeneration = mAudioQueueGeneration; 486 mVideoRenderingStartGeneration = mVideoQueueGeneration; 487} 488 489void NuPlayer::Renderer::notifyIfMediaRenderingStarted() { 490 if (mVideoRenderingStartGeneration == mVideoQueueGeneration && 491 mAudioRenderingStartGeneration == mAudioQueueGeneration) { 492 mVideoRenderingStartGeneration = -1; 493 mAudioRenderingStartGeneration = -1; 494 495 sp<AMessage> notify = mNotify->dup(); 496 notify->setInt32("what", kWhatMediaRenderingStart); 497 notify->post(); 498 } 499} 500 501// static 502size_t NuPlayer::Renderer::AudioSinkCallback( 503 MediaPlayerBase::AudioSink * /* audioSink */, 504 void *buffer, 505 size_t size, 506 void *cookie, 507 MediaPlayerBase::AudioSink::cb_event_t event) { 508 NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie; 509 510 switch (event) { 511 case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER: 512 { 513 return me->fillAudioBuffer(buffer, size); 514 break; 515 } 516 517 case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END: 518 { 519 me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM); 520 break; 521 } 522 523 case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN: 524 { 525 me->notifyAudioOffloadTearDown(); 526 break; 527 } 528 } 529 530 return 0; 531} 532 533size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) { 534 Mutex::Autolock autoLock(mLock); 535 536 if (!offloadingAudio() || mPaused) { 537 return 0; 538 } 539 540 bool hasEOS = false; 541 542 size_t sizeCopied = 0; 543 bool firstEntry = true; 544 while (sizeCopied < size && !mAudioQueue.empty()) { 545 QueueEntry *entry = &*mAudioQueue.begin(); 546 547 if (entry->mBuffer == NULL) { // EOS 548 hasEOS = true; 549 mAudioQueue.erase(mAudioQueue.begin()); 550 entry = NULL; 551 break; 552 } 553 554 if (firstEntry && entry->mOffset == 0) { 555 firstEntry = false; 556 int64_t mediaTimeUs; 557 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 558 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 559 setAudioFirstAnchorTimeIfNeeded(mediaTimeUs); 560 } 561 562 size_t copy = entry->mBuffer->size() - entry->mOffset; 563 size_t sizeRemaining = size - sizeCopied; 564 if (copy > sizeRemaining) { 565 copy = sizeRemaining; 566 } 567 568 memcpy((char *)buffer + sizeCopied, 569 entry->mBuffer->data() + entry->mOffset, 570 copy); 571 572 entry->mOffset += copy; 573 if (entry->mOffset == entry->mBuffer->size()) { 574 entry->mNotifyConsumed->post(); 575 mAudioQueue.erase(mAudioQueue.begin()); 576 entry = NULL; 577 } 578 sizeCopied += copy; 579 notifyIfMediaRenderingStarted(); 580 } 581 582 if (mAudioFirstAnchorTimeMediaUs >= 0) { 583 int64_t nowUs = ALooper::GetNowUs(); 584 setAnchorTime(mAudioFirstAnchorTimeMediaUs, nowUs - getPlayedOutAudioDurationUs(nowUs)); 585 } 586 587 // we don't know how much data we are queueing for offloaded tracks 588 mAnchorMaxMediaUs = -1; 589 590 if (hasEOS) { 591 (new AMessage(kWhatStopAudioSink, id()))->post(); 592 } 593 594 return sizeCopied; 595} 596 597bool NuPlayer::Renderer::onDrainAudioQueue() { 598 uint32_t numFramesPlayed; 599 if (mAudioSink->getPosition(&numFramesPlayed) != OK) { 600 return false; 601 } 602 603 ssize_t numFramesAvailableToWrite = 604 mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed); 605 606#if 0 607 if (numFramesAvailableToWrite == mAudioSink->frameCount()) { 608 ALOGI("audio sink underrun"); 609 } else { 610 ALOGV("audio queue has %d frames left to play", 611 mAudioSink->frameCount() - numFramesAvailableToWrite); 612 } 613#endif 614 615 size_t numBytesAvailableToWrite = 616 numFramesAvailableToWrite * mAudioSink->frameSize(); 617 618 while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) { 619 QueueEntry *entry = &*mAudioQueue.begin(); 620 621 mLastAudioBufferDrained = entry->mBufferOrdinal; 622 623 if (entry->mBuffer == NULL) { 624 // EOS 625 int64_t postEOSDelayUs = 0; 626 if (mAudioSink->needsTrailingPadding()) { 627 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs()); 628 } 629 notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs); 630 631 mAudioQueue.erase(mAudioQueue.begin()); 632 entry = NULL; 633 // Need to stop the track here, because that will play out the last 634 // little bit at the end of the file. Otherwise short files won't play. 635 mAudioSink->stop(); 636 mNumFramesWritten = 0; 637 return false; 638 } 639 640 if (entry->mOffset == 0) { 641 int64_t mediaTimeUs; 642 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 643 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 644 onNewAudioMediaTime(mediaTimeUs); 645 } 646 647 size_t copy = entry->mBuffer->size() - entry->mOffset; 648 if (copy > numBytesAvailableToWrite) { 649 copy = numBytesAvailableToWrite; 650 } 651 652 ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy); 653 if (written < 0) { 654 // An error in AudioSink write is fatal here. 655 LOG_ALWAYS_FATAL("AudioSink write error(%zd) when writing %zu bytes", written, copy); 656 } 657 658 entry->mOffset += written; 659 if (entry->mOffset == entry->mBuffer->size()) { 660 entry->mNotifyConsumed->post(); 661 mAudioQueue.erase(mAudioQueue.begin()); 662 663 entry = NULL; 664 } 665 666 numBytesAvailableToWrite -= written; 667 size_t copiedFrames = written / mAudioSink->frameSize(); 668 mNumFramesWritten += copiedFrames; 669 670 notifyIfMediaRenderingStarted(); 671 672 if (written != (ssize_t)copy) { 673 // A short count was received from AudioSink::write() 674 // 675 // AudioSink write should block until exactly the number of bytes are delivered. 676 // But it may return with a short count (without an error) when: 677 // 678 // 1) Size to be copied is not a multiple of the frame size. We consider this fatal. 679 // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded. 680 681 // (Case 1) 682 // Must be a multiple of the frame size. If it is not a multiple of a frame size, it 683 // needs to fail, as we should not carry over fractional frames between calls. 684 CHECK_EQ(copy % mAudioSink->frameSize(), 0); 685 686 // (Case 2) 687 // Return early to the caller. 688 // Beware of calling immediately again as this may busy-loop if you are not careful. 689 ALOGW("AudioSink write short frame count %zd < %zu", written, copy); 690 break; 691 } 692 } 693 mAnchorMaxMediaUs = 694 mAnchorTimeMediaUs + 695 (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL) 696 * 1000LL * mAudioSink->msecsPerFrame()); 697 698 return !mAudioQueue.empty(); 699} 700 701int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) { 702 int64_t writtenAudioDurationUs = 703 mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame(); 704 return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs); 705} 706 707int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) { 708 int64_t currentPositionUs; 709 if (getCurrentPosition(¤tPositionUs, nowUs, true /* allowPastQueuedVideo */) != OK) { 710 // If failed to get current position, e.g. due to audio clock is not ready, then just 711 // play out video immediately without delay. 712 return nowUs; 713 } 714 return (mediaTimeUs - currentPositionUs) + nowUs; 715} 716 717void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) { 718 // TRICKY: vorbis decoder generates multiple frames with the same 719 // timestamp, so only update on the first frame with a given timestamp 720 if (mediaTimeUs == mAnchorTimeMediaUs) { 721 return; 722 } 723 setAudioFirstAnchorTimeIfNeeded(mediaTimeUs); 724 int64_t nowUs = ALooper::GetNowUs(); 725 setAnchorTime( 726 mediaTimeUs, nowUs + getPendingAudioPlayoutDurationUs(nowUs), mNumFramesWritten); 727} 728 729void NuPlayer::Renderer::postDrainVideoQueue() { 730 if (mDrainVideoQueuePending 731 || mSyncQueues 732 || (mPaused && mVideoSampleReceived)) { 733 return; 734 } 735 736 if (mVideoQueue.empty()) { 737 return; 738 } 739 740 QueueEntry &entry = *mVideoQueue.begin(); 741 742 sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id()); 743 msg->setInt32("generation", mVideoQueueGeneration); 744 745 if (entry.mBuffer == NULL) { 746 // EOS doesn't carry a timestamp. 747 msg->post(); 748 mDrainVideoQueuePending = true; 749 return; 750 } 751 752 int64_t delayUs; 753 int64_t nowUs = ALooper::GetNowUs(); 754 int64_t realTimeUs; 755 if (mFlags & FLAG_REAL_TIME) { 756 int64_t mediaTimeUs; 757 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 758 realTimeUs = mediaTimeUs; 759 } else { 760 int64_t mediaTimeUs; 761 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 762 763 if (mAnchorTimeMediaUs < 0) { 764 setAnchorTime(mediaTimeUs, nowUs); 765 realTimeUs = nowUs; 766 } else { 767 realTimeUs = getRealTimeUs(mediaTimeUs, nowUs); 768 } 769 if (!mHasAudio) { 770 mAnchorMaxMediaUs = mediaTimeUs + 100000; // smooth out videos >= 10fps 771 } 772 773 // Heuristics to handle situation when media time changed without a 774 // discontinuity. If we have not drained an audio buffer that was 775 // received after this buffer, repost in 10 msec. Otherwise repost 776 // in 500 msec. 777 delayUs = realTimeUs - nowUs; 778 if (delayUs > 500000) { 779 int64_t postDelayUs = 500000; 780 if (mHasAudio && (mLastAudioBufferDrained - entry.mBufferOrdinal) <= 0) { 781 postDelayUs = 10000; 782 } 783 msg->setWhat(kWhatPostDrainVideoQueue); 784 msg->post(postDelayUs); 785 mVideoScheduler->restart(); 786 ALOGI("possible video time jump of %dms, retrying in %dms", 787 (int)(delayUs / 1000), (int)(postDelayUs / 1000)); 788 mDrainVideoQueuePending = true; 789 return; 790 } 791 } 792 793 realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000; 794 int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000); 795 796 delayUs = realTimeUs - nowUs; 797 798 ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs); 799 // post 2 display refreshes before rendering is due 800 msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0); 801 802 mDrainVideoQueuePending = true; 803} 804 805void NuPlayer::Renderer::onDrainVideoQueue() { 806 if (mVideoQueue.empty()) { 807 return; 808 } 809 810 QueueEntry *entry = &*mVideoQueue.begin(); 811 812 if (entry->mBuffer == NULL) { 813 // EOS 814 815 notifyEOS(false /* audio */, entry->mFinalResult); 816 817 mVideoQueue.erase(mVideoQueue.begin()); 818 entry = NULL; 819 820 setVideoLateByUs(0); 821 return; 822 } 823 824 int64_t nowUs = -1; 825 int64_t realTimeUs; 826 if (mFlags & FLAG_REAL_TIME) { 827 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs)); 828 } else { 829 int64_t mediaTimeUs; 830 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 831 832 nowUs = ALooper::GetNowUs(); 833 realTimeUs = getRealTimeUs(mediaTimeUs, nowUs); 834 } 835 836 bool tooLate = false; 837 838 if (!mPaused) { 839 if (nowUs == -1) { 840 nowUs = ALooper::GetNowUs(); 841 } 842 setVideoLateByUs(nowUs - realTimeUs); 843 tooLate = (mVideoLateByUs > 40000); 844 845 if (tooLate) { 846 ALOGV("video late by %lld us (%.2f secs)", 847 mVideoLateByUs, mVideoLateByUs / 1E6); 848 } else { 849 ALOGV("rendering video at media time %.2f secs", 850 (mFlags & FLAG_REAL_TIME ? realTimeUs : 851 (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6); 852 } 853 } else { 854 setVideoLateByUs(0); 855 if (!mVideoSampleReceived && !mHasAudio) { 856 // This will ensure that the first frame after a flush won't be used as anchor 857 // when renderer is in paused state, because resume can happen any time after seek. 858 setAnchorTime(-1, -1); 859 } 860 } 861 862 entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll); 863 entry->mNotifyConsumed->setInt32("render", !tooLate); 864 entry->mNotifyConsumed->post(); 865 mVideoQueue.erase(mVideoQueue.begin()); 866 entry = NULL; 867 868 mVideoSampleReceived = true; 869 870 if (!mPaused) { 871 if (!mVideoRenderingStarted) { 872 mVideoRenderingStarted = true; 873 notifyVideoRenderingStart(); 874 } 875 notifyIfMediaRenderingStarted(); 876 } 877} 878 879void NuPlayer::Renderer::notifyVideoRenderingStart() { 880 sp<AMessage> notify = mNotify->dup(); 881 notify->setInt32("what", kWhatVideoRenderingStart); 882 notify->post(); 883} 884 885void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) { 886 sp<AMessage> notify = mNotify->dup(); 887 notify->setInt32("what", kWhatEOS); 888 notify->setInt32("audio", static_cast<int32_t>(audio)); 889 notify->setInt32("finalResult", finalResult); 890 notify->post(delayUs); 891} 892 893void NuPlayer::Renderer::notifyAudioOffloadTearDown() { 894 (new AMessage(kWhatAudioOffloadTearDown, id()))->post(); 895} 896 897void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) { 898 int32_t audio; 899 CHECK(msg->findInt32("audio", &audio)); 900 901 setHasMedia(audio); 902 903 if (mHasVideo) { 904 if (mVideoScheduler == NULL) { 905 mVideoScheduler = new VideoFrameScheduler(); 906 mVideoScheduler->init(); 907 } 908 } 909 910 if (dropBufferWhileFlushing(audio, msg)) { 911 return; 912 } 913 914 sp<ABuffer> buffer; 915 CHECK(msg->findBuffer("buffer", &buffer)); 916 917 sp<AMessage> notifyConsumed; 918 CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed)); 919 920 QueueEntry entry; 921 entry.mBuffer = buffer; 922 entry.mNotifyConsumed = notifyConsumed; 923 entry.mOffset = 0; 924 entry.mFinalResult = OK; 925 entry.mBufferOrdinal = ++mTotalBuffersQueued; 926 927 if (audio) { 928 Mutex::Autolock autoLock(mLock); 929 mAudioQueue.push_back(entry); 930 postDrainAudioQueue_l(); 931 } else { 932 mVideoQueue.push_back(entry); 933 postDrainVideoQueue(); 934 } 935 936 Mutex::Autolock autoLock(mLock); 937 if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) { 938 return; 939 } 940 941 sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer; 942 sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer; 943 944 if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) { 945 // EOS signalled on either queue. 946 syncQueuesDone_l(); 947 return; 948 } 949 950 int64_t firstAudioTimeUs; 951 int64_t firstVideoTimeUs; 952 CHECK(firstAudioBuffer->meta() 953 ->findInt64("timeUs", &firstAudioTimeUs)); 954 CHECK(firstVideoBuffer->meta() 955 ->findInt64("timeUs", &firstVideoTimeUs)); 956 957 int64_t diff = firstVideoTimeUs - firstAudioTimeUs; 958 959 ALOGV("queueDiff = %.2f secs", diff / 1E6); 960 961 if (diff > 100000ll) { 962 // Audio data starts More than 0.1 secs before video. 963 // Drop some audio. 964 965 (*mAudioQueue.begin()).mNotifyConsumed->post(); 966 mAudioQueue.erase(mAudioQueue.begin()); 967 return; 968 } 969 970 syncQueuesDone_l(); 971} 972 973void NuPlayer::Renderer::syncQueuesDone_l() { 974 if (!mSyncQueues) { 975 return; 976 } 977 978 mSyncQueues = false; 979 980 if (!mAudioQueue.empty()) { 981 postDrainAudioQueue_l(); 982 } 983 984 if (!mVideoQueue.empty()) { 985 postDrainVideoQueue(); 986 } 987} 988 989void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) { 990 int32_t audio; 991 CHECK(msg->findInt32("audio", &audio)); 992 993 if (dropBufferWhileFlushing(audio, msg)) { 994 return; 995 } 996 997 int32_t finalResult; 998 CHECK(msg->findInt32("finalResult", &finalResult)); 999 1000 QueueEntry entry; 1001 entry.mOffset = 0; 1002 entry.mFinalResult = finalResult; 1003 1004 if (audio) { 1005 Mutex::Autolock autoLock(mLock); 1006 if (mAudioQueue.empty() && mSyncQueues) { 1007 syncQueuesDone_l(); 1008 } 1009 mAudioQueue.push_back(entry); 1010 postDrainAudioQueue_l(); 1011 } else { 1012 if (mVideoQueue.empty() && mSyncQueues) { 1013 Mutex::Autolock autoLock(mLock); 1014 syncQueuesDone_l(); 1015 } 1016 mVideoQueue.push_back(entry); 1017 postDrainVideoQueue(); 1018 } 1019} 1020 1021void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { 1022 int32_t audio, notifyComplete; 1023 CHECK(msg->findInt32("audio", &audio)); 1024 1025 { 1026 Mutex::Autolock autoLock(mFlushLock); 1027 if (audio) { 1028 mFlushingAudio = false; 1029 notifyComplete = mNotifyCompleteAudio; 1030 mNotifyCompleteAudio = false; 1031 } else { 1032 mFlushingVideo = false; 1033 notifyComplete = mNotifyCompleteVideo; 1034 mNotifyCompleteVideo = false; 1035 } 1036 } 1037 1038 // If we're currently syncing the queues, i.e. dropping audio while 1039 // aligning the first audio/video buffer times and only one of the 1040 // two queues has data, we may starve that queue by not requesting 1041 // more buffers from the decoder. If the other source then encounters 1042 // a discontinuity that leads to flushing, we'll never find the 1043 // corresponding discontinuity on the other queue. 1044 // Therefore we'll stop syncing the queues if at least one of them 1045 // is flushed. 1046 { 1047 Mutex::Autolock autoLock(mLock); 1048 syncQueuesDone_l(); 1049 setPauseStartedTimeRealUs(-1); 1050 } 1051 1052 ALOGV("flushing %s", audio ? "audio" : "video"); 1053 if (audio) { 1054 { 1055 Mutex::Autolock autoLock(mLock); 1056 flushQueue(&mAudioQueue); 1057 1058 ++mAudioQueueGeneration; 1059 prepareForMediaRenderingStart(); 1060 1061 if (offloadingAudio()) { 1062 setAudioFirstAnchorTime(-1); 1063 } 1064 } 1065 1066 mDrainAudioQueuePending = false; 1067 1068 if (offloadingAudio()) { 1069 mAudioSink->pause(); 1070 mAudioSink->flush(); 1071 mAudioSink->start(); 1072 } 1073 } else { 1074 flushQueue(&mVideoQueue); 1075 1076 mDrainVideoQueuePending = false; 1077 ++mVideoQueueGeneration; 1078 1079 if (mVideoScheduler != NULL) { 1080 mVideoScheduler->restart(); 1081 } 1082 1083 prepareForMediaRenderingStart(); 1084 } 1085 1086 mVideoSampleReceived = false; 1087 1088 if (notifyComplete) { 1089 notifyFlushComplete(audio); 1090 } 1091} 1092 1093void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) { 1094 while (!queue->empty()) { 1095 QueueEntry *entry = &*queue->begin(); 1096 1097 if (entry->mBuffer != NULL) { 1098 entry->mNotifyConsumed->post(); 1099 } 1100 1101 queue->erase(queue->begin()); 1102 entry = NULL; 1103 } 1104} 1105 1106void NuPlayer::Renderer::notifyFlushComplete(bool audio) { 1107 sp<AMessage> notify = mNotify->dup(); 1108 notify->setInt32("what", kWhatFlushComplete); 1109 notify->setInt32("audio", static_cast<int32_t>(audio)); 1110 notify->post(); 1111} 1112 1113bool NuPlayer::Renderer::dropBufferWhileFlushing( 1114 bool audio, const sp<AMessage> &msg) { 1115 bool flushing = false; 1116 1117 { 1118 Mutex::Autolock autoLock(mFlushLock); 1119 if (audio) { 1120 flushing = mFlushingAudio; 1121 } else { 1122 flushing = mFlushingVideo; 1123 } 1124 } 1125 1126 if (!flushing) { 1127 return false; 1128 } 1129 1130 sp<AMessage> notifyConsumed; 1131 if (msg->findMessage("notifyConsumed", ¬ifyConsumed)) { 1132 notifyConsumed->post(); 1133 } 1134 1135 return true; 1136} 1137 1138void NuPlayer::Renderer::onAudioSinkChanged() { 1139 if (offloadingAudio()) { 1140 return; 1141 } 1142 CHECK(!mDrainAudioQueuePending); 1143 mNumFramesWritten = 0; 1144 mAnchorNumFramesWritten = -1; 1145 uint32_t written; 1146 if (mAudioSink->getFramesWritten(&written) == OK) { 1147 mNumFramesWritten = written; 1148 } 1149} 1150 1151void NuPlayer::Renderer::onDisableOffloadAudio() { 1152 Mutex::Autolock autoLock(mLock); 1153 mFlags &= ~FLAG_OFFLOAD_AUDIO; 1154 ++mAudioQueueGeneration; 1155} 1156 1157void NuPlayer::Renderer::onEnableOffloadAudio() { 1158 Mutex::Autolock autoLock(mLock); 1159 mFlags |= FLAG_OFFLOAD_AUDIO; 1160 ++mAudioQueueGeneration; 1161} 1162 1163void NuPlayer::Renderer::onPause() { 1164 if (mPaused) { 1165 ALOGW("Renderer::onPause() called while already paused!"); 1166 return; 1167 } 1168 { 1169 Mutex::Autolock autoLock(mLock); 1170 ++mAudioQueueGeneration; 1171 ++mVideoQueueGeneration; 1172 prepareForMediaRenderingStart(); 1173 mPaused = true; 1174 setPauseStartedTimeRealUs(ALooper::GetNowUs()); 1175 } 1176 1177 mDrainAudioQueuePending = false; 1178 mDrainVideoQueuePending = false; 1179 1180 if (mHasAudio) { 1181 mAudioSink->pause(); 1182 startAudioOffloadPauseTimeout(); 1183 } 1184 1185 ALOGV("now paused audio queue has %d entries, video has %d entries", 1186 mAudioQueue.size(), mVideoQueue.size()); 1187} 1188 1189void NuPlayer::Renderer::onResume() { 1190 if (!mPaused) { 1191 return; 1192 } 1193 1194 if (mHasAudio) { 1195 cancelAudioOffloadPauseTimeout(); 1196 mAudioSink->start(); 1197 } 1198 1199 Mutex::Autolock autoLock(mLock); 1200 mPaused = false; 1201 if (mPauseStartedTimeRealUs != -1) { 1202 int64_t newAnchorRealUs = 1203 mAnchorTimeRealUs + ALooper::GetNowUs() - mPauseStartedTimeRealUs; 1204 setAnchorTime( 1205 mAnchorTimeMediaUs, newAnchorRealUs, mAnchorNumFramesWritten, true /* resume */); 1206 } 1207 1208 if (!mAudioQueue.empty()) { 1209 postDrainAudioQueue_l(); 1210 } 1211 1212 if (!mVideoQueue.empty()) { 1213 postDrainVideoQueue(); 1214 } 1215} 1216 1217void NuPlayer::Renderer::onSetVideoFrameRate(float fps) { 1218 if (mVideoScheduler == NULL) { 1219 mVideoScheduler = new VideoFrameScheduler(); 1220 } 1221 mVideoScheduler->init(fps); 1222} 1223 1224// TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs() 1225// as it acquires locks and may query the audio driver. 1226// 1227// Some calls could conceivably retrieve extrapolated data instead of 1228// accessing getTimestamp() or getPosition() every time a data buffer with 1229// a media time is received. 1230// 1231int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) { 1232 uint32_t numFramesPlayed; 1233 int64_t numFramesPlayedAt; 1234 AudioTimestamp ts; 1235 static const int64_t kStaleTimestamp100ms = 100000; 1236 1237 status_t res = mAudioSink->getTimestamp(ts); 1238 if (res == OK) { // case 1: mixing audio tracks and offloaded tracks. 1239 numFramesPlayed = ts.mPosition; 1240 numFramesPlayedAt = 1241 ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000; 1242 const int64_t timestampAge = nowUs - numFramesPlayedAt; 1243 if (timestampAge > kStaleTimestamp100ms) { 1244 // This is an audio FIXME. 1245 // getTimestamp returns a timestamp which may come from audio mixing threads. 1246 // After pausing, the MixerThread may go idle, thus the mTime estimate may 1247 // become stale. Assuming that the MixerThread runs 20ms, with FastMixer at 5ms, 1248 // the max latency should be about 25ms with an average around 12ms (to be verified). 1249 // For safety we use 100ms. 1250 ALOGV("getTimestamp: returned stale timestamp nowUs(%lld) numFramesPlayedAt(%lld)", 1251 (long long)nowUs, (long long)numFramesPlayedAt); 1252 numFramesPlayedAt = nowUs - kStaleTimestamp100ms; 1253 } 1254 //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAt); 1255 } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track 1256 numFramesPlayed = 0; 1257 numFramesPlayedAt = nowUs; 1258 //ALOGD("getTimestamp: WOULD_BLOCK %d %lld", 1259 // numFramesPlayed, (long long)numFramesPlayedAt); 1260 } else { // case 3: transitory at new track or audio fast tracks. 1261 res = mAudioSink->getPosition(&numFramesPlayed); 1262 CHECK_EQ(res, (status_t)OK); 1263 numFramesPlayedAt = nowUs; 1264 numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */ 1265 //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt); 1266 } 1267 1268 // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours. 1269 //CHECK_EQ(numFramesPlayed & (1 << 31), 0); // can't be negative until 12.4 hrs, test 1270 int64_t durationUs = (int64_t)((int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame()) 1271 + nowUs - numFramesPlayedAt; 1272 if (durationUs < 0) { 1273 // Occurs when numFramesPlayed position is very small and the following: 1274 // (1) In case 1, the time nowUs is computed before getTimestamp() is called and 1275 // numFramesPlayedAt is greater than nowUs by time more than numFramesPlayed. 1276 // (2) In case 3, using getPosition and adding mAudioSink->latency() to 1277 // numFramesPlayedAt, by a time amount greater than numFramesPlayed. 1278 // 1279 // Both of these are transitory conditions. 1280 ALOGV("getPlayedOutAudioDurationUs: negative duration %lld set to zero", (long long)durationUs); 1281 durationUs = 0; 1282 } 1283 ALOGV("getPlayedOutAudioDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)", 1284 (long long)durationUs, (long long)nowUs, numFramesPlayed, (long long)numFramesPlayedAt); 1285 return durationUs; 1286} 1287 1288void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reason) { 1289 if (mAudioOffloadTornDown) { 1290 return; 1291 } 1292 mAudioOffloadTornDown = true; 1293 1294 int64_t currentPositionUs; 1295 if (getCurrentPosition(¤tPositionUs) != OK) { 1296 currentPositionUs = 0; 1297 } 1298 1299 mAudioSink->stop(); 1300 mAudioSink->flush(); 1301 1302 sp<AMessage> notify = mNotify->dup(); 1303 notify->setInt32("what", kWhatAudioOffloadTearDown); 1304 notify->setInt64("positionUs", currentPositionUs); 1305 notify->setInt32("reason", reason); 1306 notify->post(); 1307} 1308 1309void NuPlayer::Renderer::startAudioOffloadPauseTimeout() { 1310 if (offloadingAudio()) { 1311 sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, id()); 1312 msg->setInt32("generation", mAudioOffloadPauseTimeoutGeneration); 1313 msg->post(kOffloadPauseMaxUs); 1314 } 1315} 1316 1317void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() { 1318 if (offloadingAudio()) { 1319 ++mAudioOffloadPauseTimeoutGeneration; 1320 } 1321} 1322 1323bool NuPlayer::Renderer::onOpenAudioSink( 1324 const sp<AMessage> &format, 1325 bool offloadOnly, 1326 bool hasVideo, 1327 uint32_t flags) { 1328 ALOGV("openAudioSink: offloadOnly(%d) offloadingAudio(%d)", 1329 offloadOnly, offloadingAudio()); 1330 bool audioSinkChanged = false; 1331 1332 int32_t numChannels; 1333 CHECK(format->findInt32("channel-count", &numChannels)); 1334 1335 int32_t channelMask; 1336 if (!format->findInt32("channel-mask", &channelMask)) { 1337 // signal to the AudioSink to derive the mask from count. 1338 channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER; 1339 } 1340 1341 int32_t sampleRate; 1342 CHECK(format->findInt32("sample-rate", &sampleRate)); 1343 1344 if (offloadingAudio()) { 1345 audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT; 1346 AString mime; 1347 CHECK(format->findString("mime", &mime)); 1348 status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str()); 1349 1350 if (err != OK) { 1351 ALOGE("Couldn't map mime \"%s\" to a valid " 1352 "audio_format", mime.c_str()); 1353 onDisableOffloadAudio(); 1354 } else { 1355 ALOGV("Mime \"%s\" mapped to audio_format 0x%x", 1356 mime.c_str(), audioFormat); 1357 1358 int avgBitRate = -1; 1359 format->findInt32("bit-rate", &avgBitRate); 1360 1361 int32_t aacProfile = -1; 1362 if (audioFormat == AUDIO_FORMAT_AAC 1363 && format->findInt32("aac-profile", &aacProfile)) { 1364 // Redefine AAC format as per aac profile 1365 mapAACProfileToAudioFormat( 1366 audioFormat, 1367 aacProfile); 1368 } 1369 1370 audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER; 1371 offloadInfo.duration_us = -1; 1372 format->findInt64( 1373 "durationUs", &offloadInfo.duration_us); 1374 offloadInfo.sample_rate = sampleRate; 1375 offloadInfo.channel_mask = channelMask; 1376 offloadInfo.format = audioFormat; 1377 offloadInfo.stream_type = AUDIO_STREAM_MUSIC; 1378 offloadInfo.bit_rate = avgBitRate; 1379 offloadInfo.has_video = hasVideo; 1380 offloadInfo.is_streaming = true; 1381 1382 if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) { 1383 ALOGV("openAudioSink: no change in offload mode"); 1384 // no change from previous configuration, everything ok. 1385 return offloadingAudio(); 1386 } 1387 ALOGV("openAudioSink: try to open AudioSink in offload mode"); 1388 uint32_t offloadFlags = flags; 1389 offloadFlags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD; 1390 offloadFlags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER; 1391 audioSinkChanged = true; 1392 mAudioSink->close(); 1393 err = mAudioSink->open( 1394 sampleRate, 1395 numChannels, 1396 (audio_channel_mask_t)channelMask, 1397 audioFormat, 1398 8 /* bufferCount */, 1399 &NuPlayer::Renderer::AudioSinkCallback, 1400 this, 1401 (audio_output_flags_t)offloadFlags, 1402 &offloadInfo); 1403 1404 if (err == OK) { 1405 // If the playback is offloaded to h/w, we pass 1406 // the HAL some metadata information. 1407 // We don't want to do this for PCM because it 1408 // will be going through the AudioFlinger mixer 1409 // before reaching the hardware. 1410 // TODO 1411 mCurrentOffloadInfo = offloadInfo; 1412 err = mAudioSink->start(); 1413 ALOGV_IF(err == OK, "openAudioSink: offload succeeded"); 1414 } 1415 if (err != OK) { 1416 // Clean up, fall back to non offload mode. 1417 mAudioSink->close(); 1418 onDisableOffloadAudio(); 1419 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER; 1420 ALOGV("openAudioSink: offload failed"); 1421 } 1422 } 1423 } 1424 if (!offloadOnly && !offloadingAudio()) { 1425 ALOGV("openAudioSink: open AudioSink in NON-offload mode"); 1426 uint32_t pcmFlags = flags; 1427 pcmFlags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD; 1428 audioSinkChanged = true; 1429 mAudioSink->close(); 1430 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER; 1431 CHECK_EQ(mAudioSink->open( 1432 sampleRate, 1433 numChannels, 1434 (audio_channel_mask_t)channelMask, 1435 AUDIO_FORMAT_PCM_16_BIT, 1436 8 /* bufferCount */, 1437 NULL, 1438 NULL, 1439 (audio_output_flags_t)pcmFlags), 1440 (status_t)OK); 1441 mAudioSink->start(); 1442 } 1443 if (audioSinkChanged) { 1444 onAudioSinkChanged(); 1445 } 1446 if (offloadingAudio()) { 1447 mAudioOffloadTornDown = false; 1448 } 1449 1450 return offloadingAudio(); 1451} 1452 1453void NuPlayer::Renderer::onCloseAudioSink() { 1454 mAudioSink->close(); 1455 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER; 1456} 1457 1458} // namespace android 1459 1460