NuPlayerRenderer.cpp revision f83408b41bbd796b7923d719e7e3799ddc7acaff
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "NuPlayerRenderer" 19#include <utils/Log.h> 20 21#include "NuPlayerRenderer.h" 22 23#include <media/stagefright/foundation/ABuffer.h> 24#include <media/stagefright/foundation/ADebug.h> 25#include <media/stagefright/foundation/AMessage.h> 26#include <media/stagefright/foundation/AUtils.h> 27#include <media/stagefright/MediaErrors.h> 28#include <media/stagefright/MetaData.h> 29#include <media/stagefright/Utils.h> 30 31#include <VideoFrameScheduler.h> 32 33#include <inttypes.h> 34 35namespace android { 36 37// Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink 38// is closed to allow the audio DSP to power down. 39static const int64_t kOffloadPauseMaxUs = 10000000ll; 40 41// static 42const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll; 43 44NuPlayer::Renderer::Renderer( 45 const sp<MediaPlayerBase::AudioSink> &sink, 46 const sp<AMessage> ¬ify, 47 uint32_t flags) 48 : mAudioSink(sink), 49 mNotify(notify), 50 mFlags(flags), 51 mNumFramesWritten(0), 52 mDrainAudioQueuePending(false), 53 mDrainVideoQueuePending(false), 54 mAudioQueueGeneration(0), 55 mVideoQueueGeneration(0), 56 mAudioFirstAnchorTimeMediaUs(-1), 57 mAnchorTimeMediaUs(-1), 58 mAnchorTimeRealUs(-1), 59 mAnchorNumFramesWritten(-1), 60 mAnchorMaxMediaUs(-1), 61 mVideoLateByUs(0ll), 62 mHasAudio(false), 63 mHasVideo(false), 64 mPauseStartedTimeRealUs(-1), 65 mFlushingAudio(false), 66 mFlushingVideo(false), 67 mSyncQueues(false), 68 mPaused(false), 69 mVideoSampleReceived(false), 70 mVideoRenderingStarted(false), 71 mVideoRenderingStartGeneration(0), 72 mAudioRenderingStartGeneration(0), 73 mAudioOffloadPauseTimeoutGeneration(0), 74 mAudioOffloadTornDown(false), 75 mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER), 76 mTotalBuffersQueued(0), 77 mLastAudioBufferDrained(0) { 78} 79 80NuPlayer::Renderer::~Renderer() { 81 if (offloadingAudio()) { 82 mAudioSink->stop(); 83 mAudioSink->flush(); 84 mAudioSink->close(); 85 } 86} 87 88void NuPlayer::Renderer::queueBuffer( 89 bool audio, 90 const sp<ABuffer> &buffer, 91 const sp<AMessage> ¬ifyConsumed) { 92 sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id()); 93 msg->setInt32("audio", static_cast<int32_t>(audio)); 94 msg->setBuffer("buffer", buffer); 95 msg->setMessage("notifyConsumed", notifyConsumed); 96 msg->post(); 97} 98 99void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) { 100 CHECK_NE(finalResult, (status_t)OK); 101 102 sp<AMessage> msg = new AMessage(kWhatQueueEOS, id()); 103 msg->setInt32("audio", static_cast<int32_t>(audio)); 104 msg->setInt32("finalResult", finalResult); 105 msg->post(); 106} 107 108void NuPlayer::Renderer::flush(bool audio) { 109 { 110 Mutex::Autolock autoLock(mFlushLock); 111 if (audio) { 112 if (mFlushingAudio) { 113 return; 114 } 115 mFlushingAudio = true; 116 } else { 117 if (mFlushingVideo) { 118 return; 119 } 120 mFlushingVideo = true; 121 } 122 } 123 124 sp<AMessage> msg = new AMessage(kWhatFlush, id()); 125 msg->setInt32("audio", static_cast<int32_t>(audio)); 126 msg->post(); 127} 128 129void NuPlayer::Renderer::signalTimeDiscontinuity() { 130 Mutex::Autolock autoLock(mLock); 131 // CHECK(mAudioQueue.empty()); 132 // CHECK(mVideoQueue.empty()); 133 setAudioFirstAnchorTime(-1); 134 setAnchorTime(-1, -1); 135 setVideoLateByUs(0); 136 mSyncQueues = false; 137} 138 139void NuPlayer::Renderer::signalAudioSinkChanged() { 140 (new AMessage(kWhatAudioSinkChanged, id()))->post(); 141} 142 143void NuPlayer::Renderer::signalDisableOffloadAudio() { 144 (new AMessage(kWhatDisableOffloadAudio, id()))->post(); 145} 146 147void NuPlayer::Renderer::pause() { 148 (new AMessage(kWhatPause, id()))->post(); 149} 150 151void NuPlayer::Renderer::resume() { 152 (new AMessage(kWhatResume, id()))->post(); 153} 154 155void NuPlayer::Renderer::setVideoFrameRate(float fps) { 156 sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, id()); 157 msg->setFloat("frame-rate", fps); 158 msg->post(); 159} 160 161status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) { 162 return getCurrentPosition(mediaUs, ALooper::GetNowUs()); 163} 164 165status_t NuPlayer::Renderer::getCurrentPosition( 166 int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) { 167 Mutex::Autolock autoLock(mTimeLock); 168 if (!mHasAudio && !mHasVideo) { 169 return NO_INIT; 170 } 171 172 if (mAnchorTimeMediaUs < 0) { 173 return NO_INIT; 174 } 175 176 int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs; 177 178 if (mPauseStartedTimeRealUs != -1) { 179 positionUs -= (nowUs - mPauseStartedTimeRealUs); 180 } 181 182 // limit position to the last queued media time (for video only stream 183 // position will be discrete as we don't know how long each frame lasts) 184 if (mAnchorMaxMediaUs >= 0 && !allowPastQueuedVideo) { 185 if (positionUs > mAnchorMaxMediaUs) { 186 positionUs = mAnchorMaxMediaUs; 187 } 188 } 189 190 if (positionUs < mAudioFirstAnchorTimeMediaUs) { 191 positionUs = mAudioFirstAnchorTimeMediaUs; 192 } 193 194 *mediaUs = (positionUs <= 0) ? 0 : positionUs; 195 return OK; 196} 197 198void NuPlayer::Renderer::setHasMedia(bool audio) { 199 Mutex::Autolock autoLock(mTimeLock); 200 if (audio) { 201 mHasAudio = true; 202 } else { 203 mHasVideo = true; 204 } 205} 206 207void NuPlayer::Renderer::setAudioFirstAnchorTime(int64_t mediaUs) { 208 Mutex::Autolock autoLock(mTimeLock); 209 mAudioFirstAnchorTimeMediaUs = mediaUs; 210} 211 212void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs) { 213 Mutex::Autolock autoLock(mTimeLock); 214 if (mAudioFirstAnchorTimeMediaUs == -1) { 215 mAudioFirstAnchorTimeMediaUs = mediaUs; 216 } 217} 218 219void NuPlayer::Renderer::setAnchorTime( 220 int64_t mediaUs, int64_t realUs, int64_t numFramesWritten, bool resume) { 221 Mutex::Autolock autoLock(mTimeLock); 222 mAnchorTimeMediaUs = mediaUs; 223 mAnchorTimeRealUs = realUs; 224 mAnchorNumFramesWritten = numFramesWritten; 225 if (resume) { 226 mPauseStartedTimeRealUs = -1; 227 } 228} 229 230void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) { 231 Mutex::Autolock autoLock(mTimeLock); 232 mVideoLateByUs = lateUs; 233} 234 235int64_t NuPlayer::Renderer::getVideoLateByUs() { 236 Mutex::Autolock autoLock(mTimeLock); 237 return mVideoLateByUs; 238} 239 240void NuPlayer::Renderer::setPauseStartedTimeRealUs(int64_t realUs) { 241 Mutex::Autolock autoLock(mTimeLock); 242 mPauseStartedTimeRealUs = realUs; 243} 244 245bool NuPlayer::Renderer::openAudioSink( 246 const sp<AMessage> &format, 247 bool offloadOnly, 248 bool hasVideo, 249 uint32_t flags) { 250 sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, id()); 251 msg->setMessage("format", format); 252 msg->setInt32("offload-only", offloadOnly); 253 msg->setInt32("has-video", hasVideo); 254 msg->setInt32("flags", flags); 255 256 sp<AMessage> response; 257 msg->postAndAwaitResponse(&response); 258 259 int32_t offload; 260 CHECK(response->findInt32("offload", &offload)); 261 return (offload != 0); 262} 263 264void NuPlayer::Renderer::closeAudioSink() { 265 sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, id()); 266 267 sp<AMessage> response; 268 msg->postAndAwaitResponse(&response); 269} 270 271void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { 272 switch (msg->what()) { 273 case kWhatOpenAudioSink: 274 { 275 sp<AMessage> format; 276 CHECK(msg->findMessage("format", &format)); 277 278 int32_t offloadOnly; 279 CHECK(msg->findInt32("offload-only", &offloadOnly)); 280 281 int32_t hasVideo; 282 CHECK(msg->findInt32("has-video", &hasVideo)); 283 284 uint32_t flags; 285 CHECK(msg->findInt32("flags", (int32_t *)&flags)); 286 287 bool offload = onOpenAudioSink(format, offloadOnly, hasVideo, flags); 288 289 sp<AMessage> response = new AMessage; 290 response->setInt32("offload", offload); 291 292 uint32_t replyID; 293 CHECK(msg->senderAwaitsResponse(&replyID)); 294 response->postReply(replyID); 295 296 break; 297 } 298 299 case kWhatCloseAudioSink: 300 { 301 uint32_t replyID; 302 CHECK(msg->senderAwaitsResponse(&replyID)); 303 304 onCloseAudioSink(); 305 306 sp<AMessage> response = new AMessage; 307 response->postReply(replyID); 308 break; 309 } 310 311 case kWhatStopAudioSink: 312 { 313 mAudioSink->stop(); 314 break; 315 } 316 317 case kWhatDrainAudioQueue: 318 { 319 int32_t generation; 320 CHECK(msg->findInt32("generation", &generation)); 321 if (generation != mAudioQueueGeneration) { 322 break; 323 } 324 325 mDrainAudioQueuePending = false; 326 327 if (onDrainAudioQueue()) { 328 uint32_t numFramesPlayed; 329 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), 330 (status_t)OK); 331 332 uint32_t numFramesPendingPlayout = 333 mNumFramesWritten - numFramesPlayed; 334 335 // This is how long the audio sink will have data to 336 // play back. 337 int64_t delayUs = 338 mAudioSink->msecsPerFrame() 339 * numFramesPendingPlayout * 1000ll; 340 341 // Let's give it more data after about half that time 342 // has elapsed. 343 // kWhatDrainAudioQueue is used for non-offloading mode, 344 // and mLock is used only for offloading mode. Therefore, 345 // no need to acquire mLock here. 346 postDrainAudioQueue_l(delayUs / 2); 347 } 348 break; 349 } 350 351 case kWhatDrainVideoQueue: 352 { 353 int32_t generation; 354 CHECK(msg->findInt32("generation", &generation)); 355 if (generation != mVideoQueueGeneration) { 356 break; 357 } 358 359 mDrainVideoQueuePending = false; 360 361 onDrainVideoQueue(); 362 363 postDrainVideoQueue(); 364 break; 365 } 366 367 case kWhatPostDrainVideoQueue: 368 { 369 int32_t generation; 370 CHECK(msg->findInt32("generation", &generation)); 371 if (generation != mVideoQueueGeneration) { 372 break; 373 } 374 375 mDrainVideoQueuePending = false; 376 postDrainVideoQueue(); 377 break; 378 } 379 380 case kWhatQueueBuffer: 381 { 382 onQueueBuffer(msg); 383 break; 384 } 385 386 case kWhatQueueEOS: 387 { 388 onQueueEOS(msg); 389 break; 390 } 391 392 case kWhatFlush: 393 { 394 onFlush(msg); 395 break; 396 } 397 398 case kWhatAudioSinkChanged: 399 { 400 onAudioSinkChanged(); 401 break; 402 } 403 404 case kWhatDisableOffloadAudio: 405 { 406 onDisableOffloadAudio(); 407 break; 408 } 409 410 case kWhatPause: 411 { 412 onPause(); 413 break; 414 } 415 416 case kWhatResume: 417 { 418 onResume(); 419 break; 420 } 421 422 case kWhatSetVideoFrameRate: 423 { 424 float fps; 425 CHECK(msg->findFloat("frame-rate", &fps)); 426 onSetVideoFrameRate(fps); 427 break; 428 } 429 430 case kWhatAudioOffloadTearDown: 431 { 432 onAudioOffloadTearDown(kDueToError); 433 break; 434 } 435 436 case kWhatAudioOffloadPauseTimeout: 437 { 438 int32_t generation; 439 CHECK(msg->findInt32("generation", &generation)); 440 if (generation != mAudioOffloadPauseTimeoutGeneration) { 441 break; 442 } 443 ALOGV("Audio Offload tear down due to pause timeout."); 444 onAudioOffloadTearDown(kDueToTimeout); 445 break; 446 } 447 448 default: 449 TRESPASS(); 450 break; 451 } 452} 453 454void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) { 455 if (mDrainAudioQueuePending || mSyncQueues || mPaused 456 || offloadingAudio()) { 457 return; 458 } 459 460 if (mAudioQueue.empty()) { 461 return; 462 } 463 464 mDrainAudioQueuePending = true; 465 sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id()); 466 msg->setInt32("generation", mAudioQueueGeneration); 467 msg->post(delayUs); 468} 469 470void NuPlayer::Renderer::prepareForMediaRenderingStart() { 471 mAudioRenderingStartGeneration = mAudioQueueGeneration; 472 mVideoRenderingStartGeneration = mVideoQueueGeneration; 473} 474 475void NuPlayer::Renderer::notifyIfMediaRenderingStarted() { 476 if (mVideoRenderingStartGeneration == mVideoQueueGeneration && 477 mAudioRenderingStartGeneration == mAudioQueueGeneration) { 478 mVideoRenderingStartGeneration = -1; 479 mAudioRenderingStartGeneration = -1; 480 481 sp<AMessage> notify = mNotify->dup(); 482 notify->setInt32("what", kWhatMediaRenderingStart); 483 notify->post(); 484 } 485} 486 487// static 488size_t NuPlayer::Renderer::AudioSinkCallback( 489 MediaPlayerBase::AudioSink * /* audioSink */, 490 void *buffer, 491 size_t size, 492 void *cookie, 493 MediaPlayerBase::AudioSink::cb_event_t event) { 494 NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie; 495 496 switch (event) { 497 case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER: 498 { 499 return me->fillAudioBuffer(buffer, size); 500 break; 501 } 502 503 case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END: 504 { 505 me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM); 506 break; 507 } 508 509 case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN: 510 { 511 me->notifyAudioOffloadTearDown(); 512 break; 513 } 514 } 515 516 return 0; 517} 518 519size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) { 520 Mutex::Autolock autoLock(mLock); 521 522 if (!offloadingAudio() || mPaused) { 523 return 0; 524 } 525 526 bool hasEOS = false; 527 528 size_t sizeCopied = 0; 529 bool firstEntry = true; 530 while (sizeCopied < size && !mAudioQueue.empty()) { 531 QueueEntry *entry = &*mAudioQueue.begin(); 532 533 if (entry->mBuffer == NULL) { // EOS 534 hasEOS = true; 535 mAudioQueue.erase(mAudioQueue.begin()); 536 entry = NULL; 537 break; 538 } 539 540 if (firstEntry && entry->mOffset == 0) { 541 firstEntry = false; 542 int64_t mediaTimeUs; 543 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 544 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 545 setAudioFirstAnchorTimeIfNeeded(mediaTimeUs); 546 } 547 548 size_t copy = entry->mBuffer->size() - entry->mOffset; 549 size_t sizeRemaining = size - sizeCopied; 550 if (copy > sizeRemaining) { 551 copy = sizeRemaining; 552 } 553 554 memcpy((char *)buffer + sizeCopied, 555 entry->mBuffer->data() + entry->mOffset, 556 copy); 557 558 entry->mOffset += copy; 559 if (entry->mOffset == entry->mBuffer->size()) { 560 entry->mNotifyConsumed->post(); 561 mAudioQueue.erase(mAudioQueue.begin()); 562 entry = NULL; 563 } 564 sizeCopied += copy; 565 notifyIfMediaRenderingStarted(); 566 } 567 568 if (mAudioFirstAnchorTimeMediaUs >= 0) { 569 int64_t nowUs = ALooper::GetNowUs(); 570 setAnchorTime(mAudioFirstAnchorTimeMediaUs, nowUs - getPlayedOutAudioDurationUs(nowUs)); 571 } 572 573 // we don't know how much data we are queueing for offloaded tracks 574 mAnchorMaxMediaUs = -1; 575 576 if (hasEOS) { 577 (new AMessage(kWhatStopAudioSink, id()))->post(); 578 } 579 580 return sizeCopied; 581} 582 583bool NuPlayer::Renderer::onDrainAudioQueue() { 584 uint32_t numFramesPlayed; 585 if (mAudioSink->getPosition(&numFramesPlayed) != OK) { 586 return false; 587 } 588 589 ssize_t numFramesAvailableToWrite = 590 mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed); 591 592#if 0 593 if (numFramesAvailableToWrite == mAudioSink->frameCount()) { 594 ALOGI("audio sink underrun"); 595 } else { 596 ALOGV("audio queue has %d frames left to play", 597 mAudioSink->frameCount() - numFramesAvailableToWrite); 598 } 599#endif 600 601 size_t numBytesAvailableToWrite = 602 numFramesAvailableToWrite * mAudioSink->frameSize(); 603 604 while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) { 605 QueueEntry *entry = &*mAudioQueue.begin(); 606 607 mLastAudioBufferDrained = entry->mBufferOrdinal; 608 609 if (entry->mBuffer == NULL) { 610 // EOS 611 int64_t postEOSDelayUs = 0; 612 if (mAudioSink->needsTrailingPadding()) { 613 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs()); 614 } 615 notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs); 616 617 mAudioQueue.erase(mAudioQueue.begin()); 618 entry = NULL; 619 // Need to stop the track here, because that will play out the last 620 // little bit at the end of the file. Otherwise short files won't play. 621 mAudioSink->stop(); 622 mNumFramesWritten = 0; 623 return false; 624 } 625 626 if (entry->mOffset == 0) { 627 int64_t mediaTimeUs; 628 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 629 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 630 onNewAudioMediaTime(mediaTimeUs); 631 } 632 633 size_t copy = entry->mBuffer->size() - entry->mOffset; 634 if (copy > numBytesAvailableToWrite) { 635 copy = numBytesAvailableToWrite; 636 } 637 638 ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy); 639 if (written < 0) { 640 // An error in AudioSink write is fatal here. 641 LOG_ALWAYS_FATAL("AudioSink write error(%zd) when writing %zu bytes", written, copy); 642 } 643 644 entry->mOffset += written; 645 if (entry->mOffset == entry->mBuffer->size()) { 646 entry->mNotifyConsumed->post(); 647 mAudioQueue.erase(mAudioQueue.begin()); 648 649 entry = NULL; 650 } 651 652 numBytesAvailableToWrite -= written; 653 size_t copiedFrames = written / mAudioSink->frameSize(); 654 mNumFramesWritten += copiedFrames; 655 656 notifyIfMediaRenderingStarted(); 657 658 if (written != (ssize_t)copy) { 659 // A short count was received from AudioSink::write() 660 // 661 // AudioSink write should block until exactly the number of bytes are delivered. 662 // But it may return with a short count (without an error) when: 663 // 664 // 1) Size to be copied is not a multiple of the frame size. We consider this fatal. 665 // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded. 666 667 // (Case 1) 668 // Must be a multiple of the frame size. If it is not a multiple of a frame size, it 669 // needs to fail, as we should not carry over fractional frames between calls. 670 CHECK_EQ(copy % mAudioSink->frameSize(), 0); 671 672 // (Case 2) 673 // Return early to the caller. 674 // Beware of calling immediately again as this may busy-loop if you are not careful. 675 ALOGW("AudioSink write short frame count %zd < %zu", written, copy); 676 break; 677 } 678 } 679 mAnchorMaxMediaUs = 680 mAnchorTimeMediaUs + 681 (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL) 682 * 1000LL * mAudioSink->msecsPerFrame()); 683 684 return !mAudioQueue.empty(); 685} 686 687int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) { 688 int64_t writtenAudioDurationUs = 689 mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame(); 690 return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs); 691} 692 693int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) { 694 int64_t currentPositionUs; 695 if (getCurrentPosition(¤tPositionUs, nowUs, true /* allowPastQueuedVideo */) != OK) { 696 // If failed to get current position, e.g. due to audio clock is not ready, then just 697 // play out video immediately without delay. 698 return nowUs; 699 } 700 return (mediaTimeUs - currentPositionUs) + nowUs; 701} 702 703void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) { 704 // TRICKY: vorbis decoder generates multiple frames with the same 705 // timestamp, so only update on the first frame with a given timestamp 706 if (mediaTimeUs == mAnchorTimeMediaUs) { 707 return; 708 } 709 setAudioFirstAnchorTimeIfNeeded(mediaTimeUs); 710 int64_t nowUs = ALooper::GetNowUs(); 711 setAnchorTime( 712 mediaTimeUs, nowUs + getPendingAudioPlayoutDurationUs(nowUs), mNumFramesWritten); 713} 714 715void NuPlayer::Renderer::postDrainVideoQueue() { 716 if (mDrainVideoQueuePending 717 || mSyncQueues 718 || (mPaused && mVideoSampleReceived)) { 719 return; 720 } 721 722 if (mVideoQueue.empty()) { 723 return; 724 } 725 726 QueueEntry &entry = *mVideoQueue.begin(); 727 728 sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id()); 729 msg->setInt32("generation", mVideoQueueGeneration); 730 731 if (entry.mBuffer == NULL) { 732 // EOS doesn't carry a timestamp. 733 msg->post(); 734 mDrainVideoQueuePending = true; 735 return; 736 } 737 738 int64_t delayUs; 739 int64_t nowUs = ALooper::GetNowUs(); 740 int64_t realTimeUs; 741 if (mFlags & FLAG_REAL_TIME) { 742 int64_t mediaTimeUs; 743 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 744 realTimeUs = mediaTimeUs; 745 } else { 746 int64_t mediaTimeUs; 747 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 748 749 if (mAnchorTimeMediaUs < 0) { 750 setAnchorTime(mediaTimeUs, nowUs); 751 realTimeUs = nowUs; 752 } else { 753 realTimeUs = getRealTimeUs(mediaTimeUs, nowUs); 754 } 755 if (!mHasAudio) { 756 mAnchorMaxMediaUs = mediaTimeUs + 100000; // smooth out videos >= 10fps 757 } 758 759 // Heuristics to handle situation when media time changed without a 760 // discontinuity. If we have not drained an audio buffer that was 761 // received after this buffer, repost in 10 msec. Otherwise repost 762 // in 500 msec. 763 delayUs = realTimeUs - nowUs; 764 if (delayUs > 500000) { 765 int64_t postDelayUs = 500000; 766 if (mHasAudio && (mLastAudioBufferDrained - entry.mBufferOrdinal) <= 0) { 767 postDelayUs = 10000; 768 } 769 msg->setWhat(kWhatPostDrainVideoQueue); 770 msg->post(postDelayUs); 771 mVideoScheduler->restart(); 772 ALOGI("possible video time jump of %dms, retrying in %dms", 773 (int)(delayUs / 1000), (int)(postDelayUs / 1000)); 774 mDrainVideoQueuePending = true; 775 return; 776 } 777 } 778 779 realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000; 780 int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000); 781 782 delayUs = realTimeUs - nowUs; 783 784 ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs); 785 // post 2 display refreshes before rendering is due 786 msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0); 787 788 mDrainVideoQueuePending = true; 789} 790 791void NuPlayer::Renderer::onDrainVideoQueue() { 792 if (mVideoQueue.empty()) { 793 return; 794 } 795 796 QueueEntry *entry = &*mVideoQueue.begin(); 797 798 if (entry->mBuffer == NULL) { 799 // EOS 800 801 notifyEOS(false /* audio */, entry->mFinalResult); 802 803 mVideoQueue.erase(mVideoQueue.begin()); 804 entry = NULL; 805 806 setVideoLateByUs(0); 807 return; 808 } 809 810 int64_t nowUs = -1; 811 int64_t realTimeUs; 812 if (mFlags & FLAG_REAL_TIME) { 813 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs)); 814 } else { 815 int64_t mediaTimeUs; 816 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 817 818 nowUs = ALooper::GetNowUs(); 819 realTimeUs = getRealTimeUs(mediaTimeUs, nowUs); 820 } 821 822 bool tooLate = false; 823 824 if (!mPaused) { 825 if (nowUs == -1) { 826 nowUs = ALooper::GetNowUs(); 827 } 828 setVideoLateByUs(nowUs - realTimeUs); 829 tooLate = (mVideoLateByUs > 40000); 830 831 if (tooLate) { 832 ALOGV("video late by %lld us (%.2f secs)", 833 mVideoLateByUs, mVideoLateByUs / 1E6); 834 } else { 835 ALOGV("rendering video at media time %.2f secs", 836 (mFlags & FLAG_REAL_TIME ? realTimeUs : 837 (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6); 838 } 839 } else { 840 setVideoLateByUs(0); 841 if (!mVideoSampleReceived && !mHasAudio) { 842 // This will ensure that the first frame after a flush won't be used as anchor 843 // when renderer is in paused state, because resume can happen any time after seek. 844 setAnchorTime(-1, -1); 845 } 846 } 847 848 entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll); 849 entry->mNotifyConsumed->setInt32("render", !tooLate); 850 entry->mNotifyConsumed->post(); 851 mVideoQueue.erase(mVideoQueue.begin()); 852 entry = NULL; 853 854 mVideoSampleReceived = true; 855 856 if (!mPaused) { 857 if (!mVideoRenderingStarted) { 858 mVideoRenderingStarted = true; 859 notifyVideoRenderingStart(); 860 } 861 notifyIfMediaRenderingStarted(); 862 } 863} 864 865void NuPlayer::Renderer::notifyVideoRenderingStart() { 866 sp<AMessage> notify = mNotify->dup(); 867 notify->setInt32("what", kWhatVideoRenderingStart); 868 notify->post(); 869} 870 871void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) { 872 sp<AMessage> notify = mNotify->dup(); 873 notify->setInt32("what", kWhatEOS); 874 notify->setInt32("audio", static_cast<int32_t>(audio)); 875 notify->setInt32("finalResult", finalResult); 876 notify->post(delayUs); 877} 878 879void NuPlayer::Renderer::notifyAudioOffloadTearDown() { 880 (new AMessage(kWhatAudioOffloadTearDown, id()))->post(); 881} 882 883void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) { 884 int32_t audio; 885 CHECK(msg->findInt32("audio", &audio)); 886 887 setHasMedia(audio); 888 889 if (mHasVideo) { 890 if (mVideoScheduler == NULL) { 891 mVideoScheduler = new VideoFrameScheduler(); 892 mVideoScheduler->init(); 893 } 894 } 895 896 if (dropBufferWhileFlushing(audio, msg)) { 897 return; 898 } 899 900 sp<ABuffer> buffer; 901 CHECK(msg->findBuffer("buffer", &buffer)); 902 903 sp<AMessage> notifyConsumed; 904 CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed)); 905 906 QueueEntry entry; 907 entry.mBuffer = buffer; 908 entry.mNotifyConsumed = notifyConsumed; 909 entry.mOffset = 0; 910 entry.mFinalResult = OK; 911 entry.mBufferOrdinal = ++mTotalBuffersQueued; 912 913 if (audio) { 914 Mutex::Autolock autoLock(mLock); 915 mAudioQueue.push_back(entry); 916 postDrainAudioQueue_l(); 917 } else { 918 mVideoQueue.push_back(entry); 919 postDrainVideoQueue(); 920 } 921 922 Mutex::Autolock autoLock(mLock); 923 if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) { 924 return; 925 } 926 927 sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer; 928 sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer; 929 930 if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) { 931 // EOS signalled on either queue. 932 syncQueuesDone_l(); 933 return; 934 } 935 936 int64_t firstAudioTimeUs; 937 int64_t firstVideoTimeUs; 938 CHECK(firstAudioBuffer->meta() 939 ->findInt64("timeUs", &firstAudioTimeUs)); 940 CHECK(firstVideoBuffer->meta() 941 ->findInt64("timeUs", &firstVideoTimeUs)); 942 943 int64_t diff = firstVideoTimeUs - firstAudioTimeUs; 944 945 ALOGV("queueDiff = %.2f secs", diff / 1E6); 946 947 if (diff > 100000ll) { 948 // Audio data starts More than 0.1 secs before video. 949 // Drop some audio. 950 951 (*mAudioQueue.begin()).mNotifyConsumed->post(); 952 mAudioQueue.erase(mAudioQueue.begin()); 953 return; 954 } 955 956 syncQueuesDone_l(); 957} 958 959void NuPlayer::Renderer::syncQueuesDone_l() { 960 if (!mSyncQueues) { 961 return; 962 } 963 964 mSyncQueues = false; 965 966 if (!mAudioQueue.empty()) { 967 postDrainAudioQueue_l(); 968 } 969 970 if (!mVideoQueue.empty()) { 971 postDrainVideoQueue(); 972 } 973} 974 975void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) { 976 int32_t audio; 977 CHECK(msg->findInt32("audio", &audio)); 978 979 if (dropBufferWhileFlushing(audio, msg)) { 980 return; 981 } 982 983 int32_t finalResult; 984 CHECK(msg->findInt32("finalResult", &finalResult)); 985 986 QueueEntry entry; 987 entry.mOffset = 0; 988 entry.mFinalResult = finalResult; 989 990 if (audio) { 991 Mutex::Autolock autoLock(mLock); 992 if (mAudioQueue.empty() && mSyncQueues) { 993 syncQueuesDone_l(); 994 } 995 mAudioQueue.push_back(entry); 996 postDrainAudioQueue_l(); 997 } else { 998 if (mVideoQueue.empty() && mSyncQueues) { 999 Mutex::Autolock autoLock(mLock); 1000 syncQueuesDone_l(); 1001 } 1002 mVideoQueue.push_back(entry); 1003 postDrainVideoQueue(); 1004 } 1005} 1006 1007void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { 1008 int32_t audio; 1009 CHECK(msg->findInt32("audio", &audio)); 1010 1011 { 1012 Mutex::Autolock autoLock(mFlushLock); 1013 if (audio) { 1014 mFlushingAudio = false; 1015 } else { 1016 mFlushingVideo = false; 1017 } 1018 } 1019 1020 // If we're currently syncing the queues, i.e. dropping audio while 1021 // aligning the first audio/video buffer times and only one of the 1022 // two queues has data, we may starve that queue by not requesting 1023 // more buffers from the decoder. If the other source then encounters 1024 // a discontinuity that leads to flushing, we'll never find the 1025 // corresponding discontinuity on the other queue. 1026 // Therefore we'll stop syncing the queues if at least one of them 1027 // is flushed. 1028 { 1029 Mutex::Autolock autoLock(mLock); 1030 syncQueuesDone_l(); 1031 setPauseStartedTimeRealUs(-1); 1032 setAnchorTime(-1, -1); 1033 } 1034 1035 ALOGV("flushing %s", audio ? "audio" : "video"); 1036 if (audio) { 1037 { 1038 Mutex::Autolock autoLock(mLock); 1039 flushQueue(&mAudioQueue); 1040 1041 ++mAudioQueueGeneration; 1042 prepareForMediaRenderingStart(); 1043 1044 if (offloadingAudio()) { 1045 setAudioFirstAnchorTime(-1); 1046 } 1047 } 1048 1049 mDrainAudioQueuePending = false; 1050 1051 if (offloadingAudio()) { 1052 mAudioSink->pause(); 1053 mAudioSink->flush(); 1054 mAudioSink->start(); 1055 } 1056 } else { 1057 flushQueue(&mVideoQueue); 1058 1059 mDrainVideoQueuePending = false; 1060 ++mVideoQueueGeneration; 1061 1062 if (mVideoScheduler != NULL) { 1063 mVideoScheduler->restart(); 1064 } 1065 1066 prepareForMediaRenderingStart(); 1067 } 1068 1069 mVideoSampleReceived = false; 1070 notifyFlushComplete(audio); 1071} 1072 1073void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) { 1074 while (!queue->empty()) { 1075 QueueEntry *entry = &*queue->begin(); 1076 1077 if (entry->mBuffer != NULL) { 1078 entry->mNotifyConsumed->post(); 1079 } 1080 1081 queue->erase(queue->begin()); 1082 entry = NULL; 1083 } 1084} 1085 1086void NuPlayer::Renderer::notifyFlushComplete(bool audio) { 1087 sp<AMessage> notify = mNotify->dup(); 1088 notify->setInt32("what", kWhatFlushComplete); 1089 notify->setInt32("audio", static_cast<int32_t>(audio)); 1090 notify->post(); 1091} 1092 1093bool NuPlayer::Renderer::dropBufferWhileFlushing( 1094 bool audio, const sp<AMessage> &msg) { 1095 bool flushing = false; 1096 1097 { 1098 Mutex::Autolock autoLock(mFlushLock); 1099 if (audio) { 1100 flushing = mFlushingAudio; 1101 } else { 1102 flushing = mFlushingVideo; 1103 } 1104 } 1105 1106 if (!flushing) { 1107 return false; 1108 } 1109 1110 sp<AMessage> notifyConsumed; 1111 if (msg->findMessage("notifyConsumed", ¬ifyConsumed)) { 1112 notifyConsumed->post(); 1113 } 1114 1115 return true; 1116} 1117 1118void NuPlayer::Renderer::onAudioSinkChanged() { 1119 if (offloadingAudio()) { 1120 return; 1121 } 1122 CHECK(!mDrainAudioQueuePending); 1123 mNumFramesWritten = 0; 1124 mAnchorNumFramesWritten = -1; 1125 uint32_t written; 1126 if (mAudioSink->getFramesWritten(&written) == OK) { 1127 mNumFramesWritten = written; 1128 } 1129} 1130 1131void NuPlayer::Renderer::onDisableOffloadAudio() { 1132 Mutex::Autolock autoLock(mLock); 1133 mFlags &= ~FLAG_OFFLOAD_AUDIO; 1134 ++mAudioQueueGeneration; 1135} 1136 1137void NuPlayer::Renderer::onPause() { 1138 if (mPaused) { 1139 ALOGW("Renderer::onPause() called while already paused!"); 1140 return; 1141 } 1142 { 1143 Mutex::Autolock autoLock(mLock); 1144 ++mAudioQueueGeneration; 1145 ++mVideoQueueGeneration; 1146 prepareForMediaRenderingStart(); 1147 mPaused = true; 1148 setPauseStartedTimeRealUs(ALooper::GetNowUs()); 1149 } 1150 1151 mDrainAudioQueuePending = false; 1152 mDrainVideoQueuePending = false; 1153 1154 if (mHasAudio) { 1155 mAudioSink->pause(); 1156 startAudioOffloadPauseTimeout(); 1157 } 1158 1159 ALOGV("now paused audio queue has %d entries, video has %d entries", 1160 mAudioQueue.size(), mVideoQueue.size()); 1161} 1162 1163void NuPlayer::Renderer::onResume() { 1164 if (!mPaused) { 1165 return; 1166 } 1167 1168 if (mHasAudio) { 1169 cancelAudioOffloadPauseTimeout(); 1170 mAudioSink->start(); 1171 } 1172 1173 Mutex::Autolock autoLock(mLock); 1174 mPaused = false; 1175 if (mPauseStartedTimeRealUs != -1) { 1176 int64_t newAnchorRealUs = 1177 mAnchorTimeRealUs + ALooper::GetNowUs() - mPauseStartedTimeRealUs; 1178 setAnchorTime( 1179 mAnchorTimeMediaUs, newAnchorRealUs, mAnchorNumFramesWritten, true /* resume */); 1180 } 1181 1182 if (!mAudioQueue.empty()) { 1183 postDrainAudioQueue_l(); 1184 } 1185 1186 if (!mVideoQueue.empty()) { 1187 postDrainVideoQueue(); 1188 } 1189} 1190 1191void NuPlayer::Renderer::onSetVideoFrameRate(float fps) { 1192 if (mVideoScheduler == NULL) { 1193 mVideoScheduler = new VideoFrameScheduler(); 1194 } 1195 mVideoScheduler->init(fps); 1196} 1197 1198// TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs() 1199// as it acquires locks and may query the audio driver. 1200// 1201// Some calls could conceivably retrieve extrapolated data instead of 1202// accessing getTimestamp() or getPosition() every time a data buffer with 1203// a media time is received. 1204// 1205int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) { 1206 uint32_t numFramesPlayed; 1207 int64_t numFramesPlayedAt; 1208 AudioTimestamp ts; 1209 static const int64_t kStaleTimestamp100ms = 100000; 1210 1211 status_t res = mAudioSink->getTimestamp(ts); 1212 if (res == OK) { // case 1: mixing audio tracks and offloaded tracks. 1213 numFramesPlayed = ts.mPosition; 1214 numFramesPlayedAt = 1215 ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000; 1216 const int64_t timestampAge = nowUs - numFramesPlayedAt; 1217 if (timestampAge > kStaleTimestamp100ms) { 1218 // This is an audio FIXME. 1219 // getTimestamp returns a timestamp which may come from audio mixing threads. 1220 // After pausing, the MixerThread may go idle, thus the mTime estimate may 1221 // become stale. Assuming that the MixerThread runs 20ms, with FastMixer at 5ms, 1222 // the max latency should be about 25ms with an average around 12ms (to be verified). 1223 // For safety we use 100ms. 1224 ALOGV("getTimestamp: returned stale timestamp nowUs(%lld) numFramesPlayedAt(%lld)", 1225 (long long)nowUs, (long long)numFramesPlayedAt); 1226 numFramesPlayedAt = nowUs - kStaleTimestamp100ms; 1227 } 1228 //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAt); 1229 } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track 1230 numFramesPlayed = 0; 1231 numFramesPlayedAt = nowUs; 1232 //ALOGD("getTimestamp: WOULD_BLOCK %d %lld", 1233 // numFramesPlayed, (long long)numFramesPlayedAt); 1234 } else { // case 3: transitory at new track or audio fast tracks. 1235 res = mAudioSink->getPosition(&numFramesPlayed); 1236 CHECK_EQ(res, (status_t)OK); 1237 numFramesPlayedAt = nowUs; 1238 numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */ 1239 //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt); 1240 } 1241 1242 // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours. 1243 //CHECK_EQ(numFramesPlayed & (1 << 31), 0); // can't be negative until 12.4 hrs, test 1244 int64_t durationUs = (int64_t)((int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame()) 1245 + nowUs - numFramesPlayedAt; 1246 if (durationUs < 0) { 1247 // Occurs when numFramesPlayed position is very small and the following: 1248 // (1) In case 1, the time nowUs is computed before getTimestamp() is called and 1249 // numFramesPlayedAt is greater than nowUs by time more than numFramesPlayed. 1250 // (2) In case 3, using getPosition and adding mAudioSink->latency() to 1251 // numFramesPlayedAt, by a time amount greater than numFramesPlayed. 1252 // 1253 // Both of these are transitory conditions. 1254 ALOGV("getPlayedOutAudioDurationUs: negative duration %lld set to zero", (long long)durationUs); 1255 durationUs = 0; 1256 } 1257 ALOGV("getPlayedOutAudioDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)", 1258 (long long)durationUs, (long long)nowUs, numFramesPlayed, (long long)numFramesPlayedAt); 1259 return durationUs; 1260} 1261 1262void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reason) { 1263 if (mAudioOffloadTornDown) { 1264 return; 1265 } 1266 mAudioOffloadTornDown = true; 1267 1268 int64_t currentPositionUs; 1269 if (getCurrentPosition(¤tPositionUs) != OK) { 1270 currentPositionUs = 0; 1271 } 1272 1273 mAudioSink->stop(); 1274 mAudioSink->flush(); 1275 1276 sp<AMessage> notify = mNotify->dup(); 1277 notify->setInt32("what", kWhatAudioOffloadTearDown); 1278 notify->setInt64("positionUs", currentPositionUs); 1279 notify->setInt32("reason", reason); 1280 notify->post(); 1281} 1282 1283void NuPlayer::Renderer::startAudioOffloadPauseTimeout() { 1284 if (offloadingAudio()) { 1285 sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, id()); 1286 msg->setInt32("generation", mAudioOffloadPauseTimeoutGeneration); 1287 msg->post(kOffloadPauseMaxUs); 1288 } 1289} 1290 1291void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() { 1292 if (offloadingAudio()) { 1293 ++mAudioOffloadPauseTimeoutGeneration; 1294 } 1295} 1296 1297bool NuPlayer::Renderer::onOpenAudioSink( 1298 const sp<AMessage> &format, 1299 bool offloadOnly, 1300 bool hasVideo, 1301 uint32_t flags) { 1302 ALOGV("openAudioSink: offloadOnly(%d) offloadingAudio(%d)", 1303 offloadOnly, offloadingAudio()); 1304 bool audioSinkChanged = false; 1305 1306 int32_t numChannels; 1307 CHECK(format->findInt32("channel-count", &numChannels)); 1308 1309 int32_t channelMask; 1310 if (!format->findInt32("channel-mask", &channelMask)) { 1311 // signal to the AudioSink to derive the mask from count. 1312 channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER; 1313 } 1314 1315 int32_t sampleRate; 1316 CHECK(format->findInt32("sample-rate", &sampleRate)); 1317 1318 if (offloadingAudio()) { 1319 audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT; 1320 AString mime; 1321 CHECK(format->findString("mime", &mime)); 1322 status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str()); 1323 1324 if (err != OK) { 1325 ALOGE("Couldn't map mime \"%s\" to a valid " 1326 "audio_format", mime.c_str()); 1327 onDisableOffloadAudio(); 1328 } else { 1329 ALOGV("Mime \"%s\" mapped to audio_format 0x%x", 1330 mime.c_str(), audioFormat); 1331 1332 int avgBitRate = -1; 1333 format->findInt32("bit-rate", &avgBitRate); 1334 1335 int32_t aacProfile = -1; 1336 if (audioFormat == AUDIO_FORMAT_AAC 1337 && format->findInt32("aac-profile", &aacProfile)) { 1338 // Redefine AAC format as per aac profile 1339 mapAACProfileToAudioFormat( 1340 audioFormat, 1341 aacProfile); 1342 } 1343 1344 audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER; 1345 offloadInfo.duration_us = -1; 1346 format->findInt64( 1347 "durationUs", &offloadInfo.duration_us); 1348 offloadInfo.sample_rate = sampleRate; 1349 offloadInfo.channel_mask = channelMask; 1350 offloadInfo.format = audioFormat; 1351 offloadInfo.stream_type = AUDIO_STREAM_MUSIC; 1352 offloadInfo.bit_rate = avgBitRate; 1353 offloadInfo.has_video = hasVideo; 1354 offloadInfo.is_streaming = true; 1355 1356 if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) { 1357 ALOGV("openAudioSink: no change in offload mode"); 1358 // no change from previous configuration, everything ok. 1359 return offloadingAudio(); 1360 } 1361 ALOGV("openAudioSink: try to open AudioSink in offload mode"); 1362 uint32_t offloadFlags = flags; 1363 offloadFlags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD; 1364 offloadFlags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER; 1365 audioSinkChanged = true; 1366 mAudioSink->close(); 1367 err = mAudioSink->open( 1368 sampleRate, 1369 numChannels, 1370 (audio_channel_mask_t)channelMask, 1371 audioFormat, 1372 8 /* bufferCount */, 1373 &NuPlayer::Renderer::AudioSinkCallback, 1374 this, 1375 (audio_output_flags_t)offloadFlags, 1376 &offloadInfo); 1377 1378 if (err == OK) { 1379 // If the playback is offloaded to h/w, we pass 1380 // the HAL some metadata information. 1381 // We don't want to do this for PCM because it 1382 // will be going through the AudioFlinger mixer 1383 // before reaching the hardware. 1384 // TODO 1385 mCurrentOffloadInfo = offloadInfo; 1386 err = mAudioSink->start(); 1387 ALOGV_IF(err == OK, "openAudioSink: offload succeeded"); 1388 } 1389 if (err != OK) { 1390 // Clean up, fall back to non offload mode. 1391 mAudioSink->close(); 1392 onDisableOffloadAudio(); 1393 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER; 1394 ALOGV("openAudioSink: offload failed"); 1395 } 1396 } 1397 } 1398 if (!offloadOnly && !offloadingAudio()) { 1399 ALOGV("openAudioSink: open AudioSink in NON-offload mode"); 1400 uint32_t pcmFlags = flags; 1401 pcmFlags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD; 1402 audioSinkChanged = true; 1403 mAudioSink->close(); 1404 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER; 1405 CHECK_EQ(mAudioSink->open( 1406 sampleRate, 1407 numChannels, 1408 (audio_channel_mask_t)channelMask, 1409 AUDIO_FORMAT_PCM_16_BIT, 1410 8 /* bufferCount */, 1411 NULL, 1412 NULL, 1413 (audio_output_flags_t)pcmFlags), 1414 (status_t)OK); 1415 mAudioSink->start(); 1416 } 1417 if (audioSinkChanged) { 1418 onAudioSinkChanged(); 1419 } 1420 1421 return offloadingAudio(); 1422} 1423 1424void NuPlayer::Renderer::onCloseAudioSink() { 1425 mAudioSink->close(); 1426 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER; 1427} 1428 1429} // namespace android 1430 1431