NuPlayerRenderer.cpp revision 474d7c778b63aa33dcf25a92e23a52c1c47f0ac1
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "NuPlayerRenderer" 19#include <utils/Log.h> 20 21#include "NuPlayerRenderer.h" 22 23#include <media/stagefright/foundation/ABuffer.h> 24#include <media/stagefright/foundation/ADebug.h> 25#include <media/stagefright/foundation/AMessage.h> 26#include <media/stagefright/foundation/AUtils.h> 27#include <media/stagefright/MediaErrors.h> 28#include <media/stagefright/MetaData.h> 29#include <media/stagefright/Utils.h> 30 31#include <VideoFrameScheduler.h> 32 33#include <inttypes.h> 34 35namespace android { 36 37// Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink 38// is closed to allow the audio DSP to power down. 39static const int64_t kOffloadPauseMaxUs = 10000000ll; 40 41// static 42const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll; 43 44NuPlayer::Renderer::Renderer( 45 const sp<MediaPlayerBase::AudioSink> &sink, 46 const sp<AMessage> ¬ify, 47 uint32_t flags) 48 : mAudioSink(sink), 49 mNotify(notify), 50 mFlags(flags), 51 mNumFramesWritten(0), 52 mDrainAudioQueuePending(false), 53 mDrainVideoQueuePending(false), 54 mAudioQueueGeneration(0), 55 mVideoQueueGeneration(0), 56 mAudioFirstAnchorTimeMediaUs(-1), 57 mAnchorTimeMediaUs(-1), 58 mAnchorTimeRealUs(-1), 59 mAnchorNumFramesWritten(-1), 60 mAnchorMaxMediaUs(-1), 61 mVideoLateByUs(0ll), 62 mHasAudio(false), 63 mHasVideo(false), 64 mPauseStartedTimeRealUs(-1), 65 mFlushingAudio(false), 66 mFlushingVideo(false), 67 mNotifyCompleteAudio(false), 68 mNotifyCompleteVideo(false), 69 mSyncQueues(false), 70 mPaused(false), 71 mVideoSampleReceived(false), 72 mVideoRenderingStarted(false), 73 mVideoRenderingStartGeneration(0), 74 mAudioRenderingStartGeneration(0), 75 mAudioOffloadPauseTimeoutGeneration(0), 76 mAudioOffloadTornDown(false), 77 mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER), 78 mTotalBuffersQueued(0), 79 mLastAudioBufferDrained(0) { 80} 81 82NuPlayer::Renderer::~Renderer() { 83 if (offloadingAudio()) { 84 mAudioSink->stop(); 85 mAudioSink->flush(); 86 mAudioSink->close(); 87 } 88} 89 90void NuPlayer::Renderer::queueBuffer( 91 bool audio, 92 const sp<ABuffer> &buffer, 93 const sp<AMessage> ¬ifyConsumed) { 94 sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id()); 95 msg->setInt32("audio", static_cast<int32_t>(audio)); 96 msg->setBuffer("buffer", buffer); 97 msg->setMessage("notifyConsumed", notifyConsumed); 98 msg->post(); 99} 100 101void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) { 102 CHECK_NE(finalResult, (status_t)OK); 103 104 sp<AMessage> msg = new AMessage(kWhatQueueEOS, id()); 105 msg->setInt32("audio", static_cast<int32_t>(audio)); 106 msg->setInt32("finalResult", finalResult); 107 msg->post(); 108} 109 110void NuPlayer::Renderer::flush(bool audio, bool notifyComplete) { 111 { 112 Mutex::Autolock autoLock(mFlushLock); 113 if (audio) { 114 mNotifyCompleteAudio |= notifyComplete; 115 if (mFlushingAudio) { 116 return; 117 } 118 mFlushingAudio = true; 119 } else { 120 mNotifyCompleteVideo |= notifyComplete; 121 if (mFlushingVideo) { 122 return; 123 } 124 mFlushingVideo = true; 125 } 126 } 127 128 sp<AMessage> msg = new AMessage(kWhatFlush, id()); 129 msg->setInt32("audio", static_cast<int32_t>(audio)); 130 msg->post(); 131} 132 133void NuPlayer::Renderer::signalTimeDiscontinuity() { 134 Mutex::Autolock autoLock(mLock); 135 // CHECK(mAudioQueue.empty()); 136 // CHECK(mVideoQueue.empty()); 137 setAudioFirstAnchorTime(-1); 138 setAnchorTime(-1, -1); 139 setVideoLateByUs(0); 140 mSyncQueues = false; 141} 142 143void NuPlayer::Renderer::signalAudioSinkChanged() { 144 (new AMessage(kWhatAudioSinkChanged, id()))->post(); 145} 146 147void NuPlayer::Renderer::signalDisableOffloadAudio() { 148 (new AMessage(kWhatDisableOffloadAudio, id()))->post(); 149} 150 151void NuPlayer::Renderer::signalEnableOffloadAudio() { 152 (new AMessage(kWhatEnableOffloadAudio, id()))->post(); 153} 154 155void NuPlayer::Renderer::pause() { 156 (new AMessage(kWhatPause, id()))->post(); 157} 158 159void NuPlayer::Renderer::resume() { 160 (new AMessage(kWhatResume, id()))->post(); 161} 162 163void NuPlayer::Renderer::setVideoFrameRate(float fps) { 164 sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, id()); 165 msg->setFloat("frame-rate", fps); 166 msg->post(); 167} 168 169status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) { 170 return getCurrentPosition(mediaUs, ALooper::GetNowUs()); 171} 172 173status_t NuPlayer::Renderer::getCurrentPosition( 174 int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) { 175 Mutex::Autolock autoLock(mTimeLock); 176 if (!mHasAudio && !mHasVideo) { 177 return NO_INIT; 178 } 179 180 if (mAnchorTimeMediaUs < 0) { 181 return NO_INIT; 182 } 183 184 int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs; 185 186 if (mPauseStartedTimeRealUs != -1) { 187 positionUs -= (nowUs - mPauseStartedTimeRealUs); 188 } 189 190 // limit position to the last queued media time (for video only stream 191 // position will be discrete as we don't know how long each frame lasts) 192 if (mAnchorMaxMediaUs >= 0 && !allowPastQueuedVideo) { 193 if (positionUs > mAnchorMaxMediaUs) { 194 positionUs = mAnchorMaxMediaUs; 195 } 196 } 197 198 if (positionUs < mAudioFirstAnchorTimeMediaUs) { 199 positionUs = mAudioFirstAnchorTimeMediaUs; 200 } 201 202 *mediaUs = (positionUs <= 0) ? 0 : positionUs; 203 return OK; 204} 205 206void NuPlayer::Renderer::setHasMedia(bool audio) { 207 Mutex::Autolock autoLock(mTimeLock); 208 if (audio) { 209 mHasAudio = true; 210 } else { 211 mHasVideo = true; 212 } 213} 214 215void NuPlayer::Renderer::setAudioFirstAnchorTime(int64_t mediaUs) { 216 Mutex::Autolock autoLock(mTimeLock); 217 mAudioFirstAnchorTimeMediaUs = mediaUs; 218} 219 220void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs) { 221 Mutex::Autolock autoLock(mTimeLock); 222 if (mAudioFirstAnchorTimeMediaUs == -1) { 223 mAudioFirstAnchorTimeMediaUs = mediaUs; 224 } 225} 226 227void NuPlayer::Renderer::setAnchorTime( 228 int64_t mediaUs, int64_t realUs, int64_t numFramesWritten, bool resume) { 229 Mutex::Autolock autoLock(mTimeLock); 230 mAnchorTimeMediaUs = mediaUs; 231 mAnchorTimeRealUs = realUs; 232 mAnchorNumFramesWritten = numFramesWritten; 233 if (resume) { 234 mPauseStartedTimeRealUs = -1; 235 } 236} 237 238void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) { 239 Mutex::Autolock autoLock(mTimeLock); 240 mVideoLateByUs = lateUs; 241} 242 243int64_t NuPlayer::Renderer::getVideoLateByUs() { 244 Mutex::Autolock autoLock(mTimeLock); 245 return mVideoLateByUs; 246} 247 248void NuPlayer::Renderer::setPauseStartedTimeRealUs(int64_t realUs) { 249 Mutex::Autolock autoLock(mTimeLock); 250 mPauseStartedTimeRealUs = realUs; 251} 252 253status_t NuPlayer::Renderer::openAudioSink( 254 const sp<AMessage> &format, 255 bool offloadOnly, 256 bool hasVideo, 257 uint32_t flags, 258 bool *isOffloaded) { 259 sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, id()); 260 msg->setMessage("format", format); 261 msg->setInt32("offload-only", offloadOnly); 262 msg->setInt32("has-video", hasVideo); 263 msg->setInt32("flags", flags); 264 265 sp<AMessage> response; 266 msg->postAndAwaitResponse(&response); 267 268 int32_t err; 269 if (!response->findInt32("err", &err)) { 270 err = INVALID_OPERATION; 271 } else if (err == OK && isOffloaded != NULL) { 272 int32_t offload; 273 CHECK(response->findInt32("offload", &offload)); 274 *isOffloaded = (offload != 0); 275 } 276 return err; 277} 278 279void NuPlayer::Renderer::closeAudioSink() { 280 sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, id()); 281 282 sp<AMessage> response; 283 msg->postAndAwaitResponse(&response); 284} 285 286void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { 287 switch (msg->what()) { 288 case kWhatOpenAudioSink: 289 { 290 sp<AMessage> format; 291 CHECK(msg->findMessage("format", &format)); 292 293 int32_t offloadOnly; 294 CHECK(msg->findInt32("offload-only", &offloadOnly)); 295 296 int32_t hasVideo; 297 CHECK(msg->findInt32("has-video", &hasVideo)); 298 299 uint32_t flags; 300 CHECK(msg->findInt32("flags", (int32_t *)&flags)); 301 302 status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags); 303 304 sp<AMessage> response = new AMessage; 305 response->setInt32("err", err); 306 response->setInt32("offload", offloadingAudio()); 307 308 uint32_t replyID; 309 CHECK(msg->senderAwaitsResponse(&replyID)); 310 response->postReply(replyID); 311 312 break; 313 } 314 315 case kWhatCloseAudioSink: 316 { 317 uint32_t replyID; 318 CHECK(msg->senderAwaitsResponse(&replyID)); 319 320 onCloseAudioSink(); 321 322 sp<AMessage> response = new AMessage; 323 response->postReply(replyID); 324 break; 325 } 326 327 case kWhatStopAudioSink: 328 { 329 mAudioSink->stop(); 330 break; 331 } 332 333 case kWhatDrainAudioQueue: 334 { 335 int32_t generation; 336 CHECK(msg->findInt32("generation", &generation)); 337 if (generation != mAudioQueueGeneration) { 338 break; 339 } 340 341 mDrainAudioQueuePending = false; 342 343 if (onDrainAudioQueue()) { 344 uint32_t numFramesPlayed; 345 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), 346 (status_t)OK); 347 348 uint32_t numFramesPendingPlayout = 349 mNumFramesWritten - numFramesPlayed; 350 351 // This is how long the audio sink will have data to 352 // play back. 353 int64_t delayUs = 354 mAudioSink->msecsPerFrame() 355 * numFramesPendingPlayout * 1000ll; 356 357 // Let's give it more data after about half that time 358 // has elapsed. 359 // kWhatDrainAudioQueue is used for non-offloading mode, 360 // and mLock is used only for offloading mode. Therefore, 361 // no need to acquire mLock here. 362 postDrainAudioQueue_l(delayUs / 2); 363 } 364 break; 365 } 366 367 case kWhatDrainVideoQueue: 368 { 369 int32_t generation; 370 CHECK(msg->findInt32("generation", &generation)); 371 if (generation != mVideoQueueGeneration) { 372 break; 373 } 374 375 mDrainVideoQueuePending = false; 376 377 onDrainVideoQueue(); 378 379 postDrainVideoQueue(); 380 break; 381 } 382 383 case kWhatPostDrainVideoQueue: 384 { 385 int32_t generation; 386 CHECK(msg->findInt32("generation", &generation)); 387 if (generation != mVideoQueueGeneration) { 388 break; 389 } 390 391 mDrainVideoQueuePending = false; 392 postDrainVideoQueue(); 393 break; 394 } 395 396 case kWhatQueueBuffer: 397 { 398 onQueueBuffer(msg); 399 break; 400 } 401 402 case kWhatQueueEOS: 403 { 404 onQueueEOS(msg); 405 break; 406 } 407 408 case kWhatFlush: 409 { 410 onFlush(msg); 411 break; 412 } 413 414 case kWhatAudioSinkChanged: 415 { 416 onAudioSinkChanged(); 417 break; 418 } 419 420 case kWhatDisableOffloadAudio: 421 { 422 onDisableOffloadAudio(); 423 break; 424 } 425 426 case kWhatEnableOffloadAudio: 427 { 428 onEnableOffloadAudio(); 429 break; 430 } 431 432 case kWhatPause: 433 { 434 onPause(); 435 break; 436 } 437 438 case kWhatResume: 439 { 440 onResume(); 441 break; 442 } 443 444 case kWhatSetVideoFrameRate: 445 { 446 float fps; 447 CHECK(msg->findFloat("frame-rate", &fps)); 448 onSetVideoFrameRate(fps); 449 break; 450 } 451 452 case kWhatAudioOffloadTearDown: 453 { 454 onAudioOffloadTearDown(kDueToError); 455 break; 456 } 457 458 case kWhatAudioOffloadPauseTimeout: 459 { 460 int32_t generation; 461 CHECK(msg->findInt32("generation", &generation)); 462 if (generation != mAudioOffloadPauseTimeoutGeneration) { 463 break; 464 } 465 ALOGV("Audio Offload tear down due to pause timeout."); 466 onAudioOffloadTearDown(kDueToTimeout); 467 break; 468 } 469 470 default: 471 TRESPASS(); 472 break; 473 } 474} 475 476void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) { 477 if (mDrainAudioQueuePending || mSyncQueues || mPaused 478 || offloadingAudio()) { 479 return; 480 } 481 482 if (mAudioQueue.empty()) { 483 return; 484 } 485 486 mDrainAudioQueuePending = true; 487 sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id()); 488 msg->setInt32("generation", mAudioQueueGeneration); 489 msg->post(delayUs); 490} 491 492void NuPlayer::Renderer::prepareForMediaRenderingStart() { 493 mAudioRenderingStartGeneration = mAudioQueueGeneration; 494 mVideoRenderingStartGeneration = mVideoQueueGeneration; 495} 496 497void NuPlayer::Renderer::notifyIfMediaRenderingStarted() { 498 if (mVideoRenderingStartGeneration == mVideoQueueGeneration && 499 mAudioRenderingStartGeneration == mAudioQueueGeneration) { 500 mVideoRenderingStartGeneration = -1; 501 mAudioRenderingStartGeneration = -1; 502 503 sp<AMessage> notify = mNotify->dup(); 504 notify->setInt32("what", kWhatMediaRenderingStart); 505 notify->post(); 506 } 507} 508 509// static 510size_t NuPlayer::Renderer::AudioSinkCallback( 511 MediaPlayerBase::AudioSink * /* audioSink */, 512 void *buffer, 513 size_t size, 514 void *cookie, 515 MediaPlayerBase::AudioSink::cb_event_t event) { 516 NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie; 517 518 switch (event) { 519 case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER: 520 { 521 return me->fillAudioBuffer(buffer, size); 522 break; 523 } 524 525 case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END: 526 { 527 me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM); 528 break; 529 } 530 531 case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN: 532 { 533 me->notifyAudioOffloadTearDown(); 534 break; 535 } 536 } 537 538 return 0; 539} 540 541size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) { 542 Mutex::Autolock autoLock(mLock); 543 544 if (!offloadingAudio() || mPaused) { 545 return 0; 546 } 547 548 bool hasEOS = false; 549 550 size_t sizeCopied = 0; 551 bool firstEntry = true; 552 while (sizeCopied < size && !mAudioQueue.empty()) { 553 QueueEntry *entry = &*mAudioQueue.begin(); 554 555 if (entry->mBuffer == NULL) { // EOS 556 hasEOS = true; 557 mAudioQueue.erase(mAudioQueue.begin()); 558 entry = NULL; 559 break; 560 } 561 562 if (firstEntry && entry->mOffset == 0) { 563 firstEntry = false; 564 int64_t mediaTimeUs; 565 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 566 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 567 setAudioFirstAnchorTimeIfNeeded(mediaTimeUs); 568 } 569 570 size_t copy = entry->mBuffer->size() - entry->mOffset; 571 size_t sizeRemaining = size - sizeCopied; 572 if (copy > sizeRemaining) { 573 copy = sizeRemaining; 574 } 575 576 memcpy((char *)buffer + sizeCopied, 577 entry->mBuffer->data() + entry->mOffset, 578 copy); 579 580 entry->mOffset += copy; 581 if (entry->mOffset == entry->mBuffer->size()) { 582 entry->mNotifyConsumed->post(); 583 mAudioQueue.erase(mAudioQueue.begin()); 584 entry = NULL; 585 } 586 sizeCopied += copy; 587 notifyIfMediaRenderingStarted(); 588 } 589 590 if (mAudioFirstAnchorTimeMediaUs >= 0) { 591 int64_t nowUs = ALooper::GetNowUs(); 592 setAnchorTime(mAudioFirstAnchorTimeMediaUs, nowUs - getPlayedOutAudioDurationUs(nowUs)); 593 } 594 595 // we don't know how much data we are queueing for offloaded tracks 596 mAnchorMaxMediaUs = -1; 597 598 if (hasEOS) { 599 (new AMessage(kWhatStopAudioSink, id()))->post(); 600 } 601 602 return sizeCopied; 603} 604 605bool NuPlayer::Renderer::onDrainAudioQueue() { 606 uint32_t numFramesPlayed; 607 if (mAudioSink->getPosition(&numFramesPlayed) != OK) { 608 return false; 609 } 610 611 ssize_t numFramesAvailableToWrite = 612 mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed); 613 614#if 0 615 if (numFramesAvailableToWrite == mAudioSink->frameCount()) { 616 ALOGI("audio sink underrun"); 617 } else { 618 ALOGV("audio queue has %d frames left to play", 619 mAudioSink->frameCount() - numFramesAvailableToWrite); 620 } 621#endif 622 623 size_t numBytesAvailableToWrite = 624 numFramesAvailableToWrite * mAudioSink->frameSize(); 625 626 while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) { 627 QueueEntry *entry = &*mAudioQueue.begin(); 628 629 mLastAudioBufferDrained = entry->mBufferOrdinal; 630 631 if (entry->mBuffer == NULL) { 632 // EOS 633 int64_t postEOSDelayUs = 0; 634 if (mAudioSink->needsTrailingPadding()) { 635 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs()); 636 } 637 notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs); 638 639 mAudioQueue.erase(mAudioQueue.begin()); 640 entry = NULL; 641 // Need to stop the track here, because that will play out the last 642 // little bit at the end of the file. Otherwise short files won't play. 643 mAudioSink->stop(); 644 mNumFramesWritten = 0; 645 return false; 646 } 647 648 if (entry->mOffset == 0) { 649 int64_t mediaTimeUs; 650 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 651 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 652 onNewAudioMediaTime(mediaTimeUs); 653 } 654 655 size_t copy = entry->mBuffer->size() - entry->mOffset; 656 if (copy > numBytesAvailableToWrite) { 657 copy = numBytesAvailableToWrite; 658 } 659 660 ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy); 661 if (written < 0) { 662 // An error in AudioSink write. Perhaps the AudioSink was not properly opened. 663 ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy); 664 break; 665 } 666 667 entry->mOffset += written; 668 if (entry->mOffset == entry->mBuffer->size()) { 669 entry->mNotifyConsumed->post(); 670 mAudioQueue.erase(mAudioQueue.begin()); 671 672 entry = NULL; 673 } 674 675 numBytesAvailableToWrite -= written; 676 size_t copiedFrames = written / mAudioSink->frameSize(); 677 mNumFramesWritten += copiedFrames; 678 679 notifyIfMediaRenderingStarted(); 680 681 if (written != (ssize_t)copy) { 682 // A short count was received from AudioSink::write() 683 // 684 // AudioSink write should block until exactly the number of bytes are delivered. 685 // But it may return with a short count (without an error) when: 686 // 687 // 1) Size to be copied is not a multiple of the frame size. We consider this fatal. 688 // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded. 689 690 // (Case 1) 691 // Must be a multiple of the frame size. If it is not a multiple of a frame size, it 692 // needs to fail, as we should not carry over fractional frames between calls. 693 CHECK_EQ(copy % mAudioSink->frameSize(), 0); 694 695 // (Case 2) 696 // Return early to the caller. 697 // Beware of calling immediately again as this may busy-loop if you are not careful. 698 ALOGW("AudioSink write short frame count %zd < %zu", written, copy); 699 break; 700 } 701 } 702 mAnchorMaxMediaUs = 703 mAnchorTimeMediaUs + 704 (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL) 705 * 1000LL * mAudioSink->msecsPerFrame()); 706 707 return !mAudioQueue.empty(); 708} 709 710int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) { 711 int64_t writtenAudioDurationUs = 712 mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame(); 713 return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs); 714} 715 716int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) { 717 int64_t currentPositionUs; 718 if (getCurrentPosition(¤tPositionUs, nowUs, true /* allowPastQueuedVideo */) != OK) { 719 // If failed to get current position, e.g. due to audio clock is not ready, then just 720 // play out video immediately without delay. 721 return nowUs; 722 } 723 return (mediaTimeUs - currentPositionUs) + nowUs; 724} 725 726void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) { 727 // TRICKY: vorbis decoder generates multiple frames with the same 728 // timestamp, so only update on the first frame with a given timestamp 729 if (mediaTimeUs == mAnchorTimeMediaUs) { 730 return; 731 } 732 setAudioFirstAnchorTimeIfNeeded(mediaTimeUs); 733 int64_t nowUs = ALooper::GetNowUs(); 734 setAnchorTime( 735 mediaTimeUs, nowUs + getPendingAudioPlayoutDurationUs(nowUs), mNumFramesWritten); 736} 737 738void NuPlayer::Renderer::postDrainVideoQueue() { 739 if (mDrainVideoQueuePending 740 || mSyncQueues 741 || (mPaused && mVideoSampleReceived)) { 742 return; 743 } 744 745 if (mVideoQueue.empty()) { 746 return; 747 } 748 749 QueueEntry &entry = *mVideoQueue.begin(); 750 751 sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id()); 752 msg->setInt32("generation", mVideoQueueGeneration); 753 754 if (entry.mBuffer == NULL) { 755 // EOS doesn't carry a timestamp. 756 msg->post(); 757 mDrainVideoQueuePending = true; 758 return; 759 } 760 761 int64_t delayUs; 762 int64_t nowUs = ALooper::GetNowUs(); 763 int64_t realTimeUs; 764 if (mFlags & FLAG_REAL_TIME) { 765 int64_t mediaTimeUs; 766 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 767 realTimeUs = mediaTimeUs; 768 } else { 769 int64_t mediaTimeUs; 770 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 771 772 if (mAnchorTimeMediaUs < 0) { 773 setAnchorTime(mediaTimeUs, nowUs); 774 mAnchorMaxMediaUs = mediaTimeUs; 775 realTimeUs = nowUs; 776 } else { 777 realTimeUs = getRealTimeUs(mediaTimeUs, nowUs); 778 } 779 if (!mHasAudio) { 780 mAnchorMaxMediaUs = mediaTimeUs + 100000; // smooth out videos >= 10fps 781 } 782 783 // Heuristics to handle situation when media time changed without a 784 // discontinuity. If we have not drained an audio buffer that was 785 // received after this buffer, repost in 10 msec. Otherwise repost 786 // in 500 msec. 787 delayUs = realTimeUs - nowUs; 788 if (delayUs > 500000) { 789 int64_t postDelayUs = 500000; 790 if (mHasAudio && (mLastAudioBufferDrained - entry.mBufferOrdinal) <= 0) { 791 postDelayUs = 10000; 792 } 793 msg->setWhat(kWhatPostDrainVideoQueue); 794 msg->post(postDelayUs); 795 mVideoScheduler->restart(); 796 ALOGI("possible video time jump of %dms, retrying in %dms", 797 (int)(delayUs / 1000), (int)(postDelayUs / 1000)); 798 mDrainVideoQueuePending = true; 799 return; 800 } 801 } 802 803 realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000; 804 int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000); 805 806 delayUs = realTimeUs - nowUs; 807 808 ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs); 809 // post 2 display refreshes before rendering is due 810 msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0); 811 812 mDrainVideoQueuePending = true; 813} 814 815void NuPlayer::Renderer::onDrainVideoQueue() { 816 if (mVideoQueue.empty()) { 817 return; 818 } 819 820 QueueEntry *entry = &*mVideoQueue.begin(); 821 822 if (entry->mBuffer == NULL) { 823 // EOS 824 825 notifyEOS(false /* audio */, entry->mFinalResult); 826 827 mVideoQueue.erase(mVideoQueue.begin()); 828 entry = NULL; 829 830 setVideoLateByUs(0); 831 return; 832 } 833 834 int64_t nowUs = -1; 835 int64_t realTimeUs; 836 if (mFlags & FLAG_REAL_TIME) { 837 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs)); 838 } else { 839 int64_t mediaTimeUs; 840 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 841 842 nowUs = ALooper::GetNowUs(); 843 realTimeUs = getRealTimeUs(mediaTimeUs, nowUs); 844 } 845 846 bool tooLate = false; 847 848 if (!mPaused) { 849 if (nowUs == -1) { 850 nowUs = ALooper::GetNowUs(); 851 } 852 setVideoLateByUs(nowUs - realTimeUs); 853 tooLate = (mVideoLateByUs > 40000); 854 855 if (tooLate) { 856 ALOGV("video late by %lld us (%.2f secs)", 857 mVideoLateByUs, mVideoLateByUs / 1E6); 858 } else { 859 ALOGV("rendering video at media time %.2f secs", 860 (mFlags & FLAG_REAL_TIME ? realTimeUs : 861 (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6); 862 } 863 } else { 864 setVideoLateByUs(0); 865 if (!mVideoSampleReceived && !mHasAudio) { 866 // This will ensure that the first frame after a flush won't be used as anchor 867 // when renderer is in paused state, because resume can happen any time after seek. 868 setAnchorTime(-1, -1); 869 } 870 } 871 872 entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll); 873 entry->mNotifyConsumed->setInt32("render", !tooLate); 874 entry->mNotifyConsumed->post(); 875 mVideoQueue.erase(mVideoQueue.begin()); 876 entry = NULL; 877 878 mVideoSampleReceived = true; 879 880 if (!mPaused) { 881 if (!mVideoRenderingStarted) { 882 mVideoRenderingStarted = true; 883 notifyVideoRenderingStart(); 884 } 885 notifyIfMediaRenderingStarted(); 886 } 887} 888 889void NuPlayer::Renderer::notifyVideoRenderingStart() { 890 sp<AMessage> notify = mNotify->dup(); 891 notify->setInt32("what", kWhatVideoRenderingStart); 892 notify->post(); 893} 894 895void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) { 896 sp<AMessage> notify = mNotify->dup(); 897 notify->setInt32("what", kWhatEOS); 898 notify->setInt32("audio", static_cast<int32_t>(audio)); 899 notify->setInt32("finalResult", finalResult); 900 notify->post(delayUs); 901} 902 903void NuPlayer::Renderer::notifyAudioOffloadTearDown() { 904 (new AMessage(kWhatAudioOffloadTearDown, id()))->post(); 905} 906 907void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) { 908 int32_t audio; 909 CHECK(msg->findInt32("audio", &audio)); 910 911 setHasMedia(audio); 912 913 if (mHasVideo) { 914 if (mVideoScheduler == NULL) { 915 mVideoScheduler = new VideoFrameScheduler(); 916 mVideoScheduler->init(); 917 } 918 } 919 920 if (dropBufferWhileFlushing(audio, msg)) { 921 return; 922 } 923 924 sp<ABuffer> buffer; 925 CHECK(msg->findBuffer("buffer", &buffer)); 926 927 sp<AMessage> notifyConsumed; 928 CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed)); 929 930 QueueEntry entry; 931 entry.mBuffer = buffer; 932 entry.mNotifyConsumed = notifyConsumed; 933 entry.mOffset = 0; 934 entry.mFinalResult = OK; 935 entry.mBufferOrdinal = ++mTotalBuffersQueued; 936 937 if (audio) { 938 Mutex::Autolock autoLock(mLock); 939 mAudioQueue.push_back(entry); 940 postDrainAudioQueue_l(); 941 } else { 942 mVideoQueue.push_back(entry); 943 postDrainVideoQueue(); 944 } 945 946 Mutex::Autolock autoLock(mLock); 947 if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) { 948 return; 949 } 950 951 sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer; 952 sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer; 953 954 if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) { 955 // EOS signalled on either queue. 956 syncQueuesDone_l(); 957 return; 958 } 959 960 int64_t firstAudioTimeUs; 961 int64_t firstVideoTimeUs; 962 CHECK(firstAudioBuffer->meta() 963 ->findInt64("timeUs", &firstAudioTimeUs)); 964 CHECK(firstVideoBuffer->meta() 965 ->findInt64("timeUs", &firstVideoTimeUs)); 966 967 int64_t diff = firstVideoTimeUs - firstAudioTimeUs; 968 969 ALOGV("queueDiff = %.2f secs", diff / 1E6); 970 971 if (diff > 100000ll) { 972 // Audio data starts More than 0.1 secs before video. 973 // Drop some audio. 974 975 (*mAudioQueue.begin()).mNotifyConsumed->post(); 976 mAudioQueue.erase(mAudioQueue.begin()); 977 return; 978 } 979 980 syncQueuesDone_l(); 981} 982 983void NuPlayer::Renderer::syncQueuesDone_l() { 984 if (!mSyncQueues) { 985 return; 986 } 987 988 mSyncQueues = false; 989 990 if (!mAudioQueue.empty()) { 991 postDrainAudioQueue_l(); 992 } 993 994 if (!mVideoQueue.empty()) { 995 postDrainVideoQueue(); 996 } 997} 998 999void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) { 1000 int32_t audio; 1001 CHECK(msg->findInt32("audio", &audio)); 1002 1003 if (dropBufferWhileFlushing(audio, msg)) { 1004 return; 1005 } 1006 1007 int32_t finalResult; 1008 CHECK(msg->findInt32("finalResult", &finalResult)); 1009 1010 QueueEntry entry; 1011 entry.mOffset = 0; 1012 entry.mFinalResult = finalResult; 1013 1014 if (audio) { 1015 Mutex::Autolock autoLock(mLock); 1016 if (mAudioQueue.empty() && mSyncQueues) { 1017 syncQueuesDone_l(); 1018 } 1019 mAudioQueue.push_back(entry); 1020 postDrainAudioQueue_l(); 1021 } else { 1022 if (mVideoQueue.empty() && mSyncQueues) { 1023 Mutex::Autolock autoLock(mLock); 1024 syncQueuesDone_l(); 1025 } 1026 mVideoQueue.push_back(entry); 1027 postDrainVideoQueue(); 1028 } 1029} 1030 1031void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { 1032 int32_t audio, notifyComplete; 1033 CHECK(msg->findInt32("audio", &audio)); 1034 1035 { 1036 Mutex::Autolock autoLock(mFlushLock); 1037 if (audio) { 1038 mFlushingAudio = false; 1039 notifyComplete = mNotifyCompleteAudio; 1040 mNotifyCompleteAudio = false; 1041 } else { 1042 mFlushingVideo = false; 1043 notifyComplete = mNotifyCompleteVideo; 1044 mNotifyCompleteVideo = false; 1045 } 1046 } 1047 1048 // If we're currently syncing the queues, i.e. dropping audio while 1049 // aligning the first audio/video buffer times and only one of the 1050 // two queues has data, we may starve that queue by not requesting 1051 // more buffers from the decoder. If the other source then encounters 1052 // a discontinuity that leads to flushing, we'll never find the 1053 // corresponding discontinuity on the other queue. 1054 // Therefore we'll stop syncing the queues if at least one of them 1055 // is flushed. 1056 { 1057 Mutex::Autolock autoLock(mLock); 1058 syncQueuesDone_l(); 1059 setPauseStartedTimeRealUs(-1); 1060 setAnchorTime(-1, -1); 1061 } 1062 1063 ALOGV("flushing %s", audio ? "audio" : "video"); 1064 if (audio) { 1065 { 1066 Mutex::Autolock autoLock(mLock); 1067 flushQueue(&mAudioQueue); 1068 1069 ++mAudioQueueGeneration; 1070 prepareForMediaRenderingStart(); 1071 1072 if (offloadingAudio()) { 1073 setAudioFirstAnchorTime(-1); 1074 } 1075 } 1076 1077 mDrainAudioQueuePending = false; 1078 1079 if (offloadingAudio()) { 1080 mAudioSink->pause(); 1081 mAudioSink->flush(); 1082 mAudioSink->start(); 1083 } 1084 } else { 1085 flushQueue(&mVideoQueue); 1086 1087 mDrainVideoQueuePending = false; 1088 ++mVideoQueueGeneration; 1089 1090 if (mVideoScheduler != NULL) { 1091 mVideoScheduler->restart(); 1092 } 1093 1094 prepareForMediaRenderingStart(); 1095 } 1096 1097 mVideoSampleReceived = false; 1098 1099 if (notifyComplete) { 1100 notifyFlushComplete(audio); 1101 } 1102} 1103 1104void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) { 1105 while (!queue->empty()) { 1106 QueueEntry *entry = &*queue->begin(); 1107 1108 if (entry->mBuffer != NULL) { 1109 entry->mNotifyConsumed->post(); 1110 } 1111 1112 queue->erase(queue->begin()); 1113 entry = NULL; 1114 } 1115} 1116 1117void NuPlayer::Renderer::notifyFlushComplete(bool audio) { 1118 sp<AMessage> notify = mNotify->dup(); 1119 notify->setInt32("what", kWhatFlushComplete); 1120 notify->setInt32("audio", static_cast<int32_t>(audio)); 1121 notify->post(); 1122} 1123 1124bool NuPlayer::Renderer::dropBufferWhileFlushing( 1125 bool audio, const sp<AMessage> &msg) { 1126 bool flushing = false; 1127 1128 { 1129 Mutex::Autolock autoLock(mFlushLock); 1130 if (audio) { 1131 flushing = mFlushingAudio; 1132 } else { 1133 flushing = mFlushingVideo; 1134 } 1135 } 1136 1137 if (!flushing) { 1138 return false; 1139 } 1140 1141 sp<AMessage> notifyConsumed; 1142 if (msg->findMessage("notifyConsumed", ¬ifyConsumed)) { 1143 notifyConsumed->post(); 1144 } 1145 1146 return true; 1147} 1148 1149void NuPlayer::Renderer::onAudioSinkChanged() { 1150 if (offloadingAudio()) { 1151 return; 1152 } 1153 CHECK(!mDrainAudioQueuePending); 1154 mNumFramesWritten = 0; 1155 mAnchorNumFramesWritten = -1; 1156 uint32_t written; 1157 if (mAudioSink->getFramesWritten(&written) == OK) { 1158 mNumFramesWritten = written; 1159 } 1160} 1161 1162void NuPlayer::Renderer::onDisableOffloadAudio() { 1163 Mutex::Autolock autoLock(mLock); 1164 mFlags &= ~FLAG_OFFLOAD_AUDIO; 1165 ++mAudioQueueGeneration; 1166} 1167 1168void NuPlayer::Renderer::onEnableOffloadAudio() { 1169 Mutex::Autolock autoLock(mLock); 1170 mFlags |= FLAG_OFFLOAD_AUDIO; 1171 ++mAudioQueueGeneration; 1172} 1173 1174void NuPlayer::Renderer::onPause() { 1175 if (mPaused) { 1176 ALOGW("Renderer::onPause() called while already paused!"); 1177 return; 1178 } 1179 { 1180 Mutex::Autolock autoLock(mLock); 1181 ++mAudioQueueGeneration; 1182 ++mVideoQueueGeneration; 1183 prepareForMediaRenderingStart(); 1184 mPaused = true; 1185 setPauseStartedTimeRealUs(ALooper::GetNowUs()); 1186 } 1187 1188 mDrainAudioQueuePending = false; 1189 mDrainVideoQueuePending = false; 1190 1191 if (mHasAudio) { 1192 mAudioSink->pause(); 1193 startAudioOffloadPauseTimeout(); 1194 } 1195 1196 ALOGV("now paused audio queue has %d entries, video has %d entries", 1197 mAudioQueue.size(), mVideoQueue.size()); 1198} 1199 1200void NuPlayer::Renderer::onResume() { 1201 if (!mPaused) { 1202 return; 1203 } 1204 1205 if (mHasAudio) { 1206 cancelAudioOffloadPauseTimeout(); 1207 mAudioSink->start(); 1208 } 1209 1210 Mutex::Autolock autoLock(mLock); 1211 mPaused = false; 1212 if (mPauseStartedTimeRealUs != -1) { 1213 int64_t newAnchorRealUs = 1214 mAnchorTimeRealUs + ALooper::GetNowUs() - mPauseStartedTimeRealUs; 1215 setAnchorTime( 1216 mAnchorTimeMediaUs, newAnchorRealUs, mAnchorNumFramesWritten, true /* resume */); 1217 } 1218 1219 if (!mAudioQueue.empty()) { 1220 postDrainAudioQueue_l(); 1221 } 1222 1223 if (!mVideoQueue.empty()) { 1224 postDrainVideoQueue(); 1225 } 1226} 1227 1228void NuPlayer::Renderer::onSetVideoFrameRate(float fps) { 1229 if (mVideoScheduler == NULL) { 1230 mVideoScheduler = new VideoFrameScheduler(); 1231 } 1232 mVideoScheduler->init(fps); 1233} 1234 1235// TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs() 1236// as it acquires locks and may query the audio driver. 1237// 1238// Some calls could conceivably retrieve extrapolated data instead of 1239// accessing getTimestamp() or getPosition() every time a data buffer with 1240// a media time is received. 1241// 1242int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) { 1243 uint32_t numFramesPlayed; 1244 int64_t numFramesPlayedAt; 1245 AudioTimestamp ts; 1246 static const int64_t kStaleTimestamp100ms = 100000; 1247 1248 status_t res = mAudioSink->getTimestamp(ts); 1249 if (res == OK) { // case 1: mixing audio tracks and offloaded tracks. 1250 numFramesPlayed = ts.mPosition; 1251 numFramesPlayedAt = 1252 ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000; 1253 const int64_t timestampAge = nowUs - numFramesPlayedAt; 1254 if (timestampAge > kStaleTimestamp100ms) { 1255 // This is an audio FIXME. 1256 // getTimestamp returns a timestamp which may come from audio mixing threads. 1257 // After pausing, the MixerThread may go idle, thus the mTime estimate may 1258 // become stale. Assuming that the MixerThread runs 20ms, with FastMixer at 5ms, 1259 // the max latency should be about 25ms with an average around 12ms (to be verified). 1260 // For safety we use 100ms. 1261 ALOGV("getTimestamp: returned stale timestamp nowUs(%lld) numFramesPlayedAt(%lld)", 1262 (long long)nowUs, (long long)numFramesPlayedAt); 1263 numFramesPlayedAt = nowUs - kStaleTimestamp100ms; 1264 } 1265 //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAt); 1266 } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track 1267 numFramesPlayed = 0; 1268 numFramesPlayedAt = nowUs; 1269 //ALOGD("getTimestamp: WOULD_BLOCK %d %lld", 1270 // numFramesPlayed, (long long)numFramesPlayedAt); 1271 } else { // case 3: transitory at new track or audio fast tracks. 1272 res = mAudioSink->getPosition(&numFramesPlayed); 1273 CHECK_EQ(res, (status_t)OK); 1274 numFramesPlayedAt = nowUs; 1275 numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */ 1276 //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt); 1277 } 1278 1279 // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours. 1280 //CHECK_EQ(numFramesPlayed & (1 << 31), 0); // can't be negative until 12.4 hrs, test 1281 int64_t durationUs = (int64_t)((int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame()) 1282 + nowUs - numFramesPlayedAt; 1283 if (durationUs < 0) { 1284 // Occurs when numFramesPlayed position is very small and the following: 1285 // (1) In case 1, the time nowUs is computed before getTimestamp() is called and 1286 // numFramesPlayedAt is greater than nowUs by time more than numFramesPlayed. 1287 // (2) In case 3, using getPosition and adding mAudioSink->latency() to 1288 // numFramesPlayedAt, by a time amount greater than numFramesPlayed. 1289 // 1290 // Both of these are transitory conditions. 1291 ALOGV("getPlayedOutAudioDurationUs: negative duration %lld set to zero", (long long)durationUs); 1292 durationUs = 0; 1293 } 1294 ALOGV("getPlayedOutAudioDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)", 1295 (long long)durationUs, (long long)nowUs, numFramesPlayed, (long long)numFramesPlayedAt); 1296 return durationUs; 1297} 1298 1299void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reason) { 1300 if (mAudioOffloadTornDown) { 1301 return; 1302 } 1303 mAudioOffloadTornDown = true; 1304 1305 int64_t currentPositionUs; 1306 if (getCurrentPosition(¤tPositionUs) != OK) { 1307 currentPositionUs = 0; 1308 } 1309 1310 mAudioSink->stop(); 1311 mAudioSink->flush(); 1312 1313 sp<AMessage> notify = mNotify->dup(); 1314 notify->setInt32("what", kWhatAudioOffloadTearDown); 1315 notify->setInt64("positionUs", currentPositionUs); 1316 notify->setInt32("reason", reason); 1317 notify->post(); 1318} 1319 1320void NuPlayer::Renderer::startAudioOffloadPauseTimeout() { 1321 if (offloadingAudio()) { 1322 sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, id()); 1323 msg->setInt32("generation", mAudioOffloadPauseTimeoutGeneration); 1324 msg->post(kOffloadPauseMaxUs); 1325 } 1326} 1327 1328void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() { 1329 if (offloadingAudio()) { 1330 ++mAudioOffloadPauseTimeoutGeneration; 1331 } 1332} 1333 1334status_t NuPlayer::Renderer::onOpenAudioSink( 1335 const sp<AMessage> &format, 1336 bool offloadOnly, 1337 bool hasVideo, 1338 uint32_t flags) { 1339 ALOGV("openAudioSink: offloadOnly(%d) offloadingAudio(%d)", 1340 offloadOnly, offloadingAudio()); 1341 bool audioSinkChanged = false; 1342 1343 int32_t numChannels; 1344 CHECK(format->findInt32("channel-count", &numChannels)); 1345 1346 int32_t channelMask; 1347 if (!format->findInt32("channel-mask", &channelMask)) { 1348 // signal to the AudioSink to derive the mask from count. 1349 channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER; 1350 } 1351 1352 int32_t sampleRate; 1353 CHECK(format->findInt32("sample-rate", &sampleRate)); 1354 1355 if (offloadingAudio()) { 1356 audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT; 1357 AString mime; 1358 CHECK(format->findString("mime", &mime)); 1359 status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str()); 1360 1361 if (err != OK) { 1362 ALOGE("Couldn't map mime \"%s\" to a valid " 1363 "audio_format", mime.c_str()); 1364 onDisableOffloadAudio(); 1365 } else { 1366 ALOGV("Mime \"%s\" mapped to audio_format 0x%x", 1367 mime.c_str(), audioFormat); 1368 1369 int avgBitRate = -1; 1370 format->findInt32("bit-rate", &avgBitRate); 1371 1372 int32_t aacProfile = -1; 1373 if (audioFormat == AUDIO_FORMAT_AAC 1374 && format->findInt32("aac-profile", &aacProfile)) { 1375 // Redefine AAC format as per aac profile 1376 mapAACProfileToAudioFormat( 1377 audioFormat, 1378 aacProfile); 1379 } 1380 1381 audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER; 1382 offloadInfo.duration_us = -1; 1383 format->findInt64( 1384 "durationUs", &offloadInfo.duration_us); 1385 offloadInfo.sample_rate = sampleRate; 1386 offloadInfo.channel_mask = channelMask; 1387 offloadInfo.format = audioFormat; 1388 offloadInfo.stream_type = AUDIO_STREAM_MUSIC; 1389 offloadInfo.bit_rate = avgBitRate; 1390 offloadInfo.has_video = hasVideo; 1391 offloadInfo.is_streaming = true; 1392 1393 if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) { 1394 ALOGV("openAudioSink: no change in offload mode"); 1395 // no change from previous configuration, everything ok. 1396 return OK; 1397 } 1398 ALOGV("openAudioSink: try to open AudioSink in offload mode"); 1399 uint32_t offloadFlags = flags; 1400 offloadFlags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD; 1401 offloadFlags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER; 1402 audioSinkChanged = true; 1403 mAudioSink->close(); 1404 err = mAudioSink->open( 1405 sampleRate, 1406 numChannels, 1407 (audio_channel_mask_t)channelMask, 1408 audioFormat, 1409 8 /* bufferCount */, 1410 &NuPlayer::Renderer::AudioSinkCallback, 1411 this, 1412 (audio_output_flags_t)offloadFlags, 1413 &offloadInfo); 1414 1415 if (err == OK) { 1416 // If the playback is offloaded to h/w, we pass 1417 // the HAL some metadata information. 1418 // We don't want to do this for PCM because it 1419 // will be going through the AudioFlinger mixer 1420 // before reaching the hardware. 1421 // TODO 1422 mCurrentOffloadInfo = offloadInfo; 1423 err = mAudioSink->start(); 1424 ALOGV_IF(err == OK, "openAudioSink: offload succeeded"); 1425 } 1426 if (err != OK) { 1427 // Clean up, fall back to non offload mode. 1428 mAudioSink->close(); 1429 onDisableOffloadAudio(); 1430 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER; 1431 ALOGV("openAudioSink: offload failed"); 1432 } 1433 } 1434 } 1435 if (!offloadOnly && !offloadingAudio()) { 1436 ALOGV("openAudioSink: open AudioSink in NON-offload mode"); 1437 uint32_t pcmFlags = flags; 1438 pcmFlags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD; 1439 audioSinkChanged = true; 1440 mAudioSink->close(); 1441 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER; 1442 status_t err = mAudioSink->open( 1443 sampleRate, 1444 numChannels, 1445 (audio_channel_mask_t)channelMask, 1446 AUDIO_FORMAT_PCM_16_BIT, 1447 8 /* bufferCount */, 1448 NULL, 1449 NULL, 1450 (audio_output_flags_t)pcmFlags); 1451 if (err != OK) { 1452 ALOGW("openAudioSink: non offloaded open failed status: %d", err); 1453 return err; 1454 } 1455 mAudioSink->start(); 1456 } 1457 if (audioSinkChanged) { 1458 onAudioSinkChanged(); 1459 } 1460 if (offloadingAudio()) { 1461 mAudioOffloadTornDown = false; 1462 } 1463 return OK; 1464} 1465 1466void NuPlayer::Renderer::onCloseAudioSink() { 1467 mAudioSink->close(); 1468 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER; 1469} 1470 1471} // namespace android 1472 1473