NuPlayerRenderer.cpp revision 8b67ce7210caa4f119a687e9b0946b339db08265
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "NuPlayerRenderer" 19#include <utils/Log.h> 20 21#include "NuPlayerRenderer.h" 22 23#include <cutils/properties.h> 24 25#include <media/stagefright/foundation/ABuffer.h> 26#include <media/stagefright/foundation/ADebug.h> 27#include <media/stagefright/foundation/AMessage.h> 28#include <media/stagefright/foundation/AUtils.h> 29#include <media/stagefright/MediaErrors.h> 30#include <media/stagefright/MetaData.h> 31#include <media/stagefright/Utils.h> 32 33#include <VideoFrameScheduler.h> 34 35#include <inttypes.h> 36 37namespace android { 38 39// Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink 40// is closed to allow the audio DSP to power down. 41static const int64_t kOffloadPauseMaxUs = 60000000ll; 42 43// static 44const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll; 45 46static bool sFrameAccurateAVsync = false; 47 48static void readProperties() { 49 char value[PROPERTY_VALUE_MAX]; 50 if (property_get("persist.sys.media.avsync", value, NULL)) { 51 sFrameAccurateAVsync = 52 !strcmp("1", value) || !strcasecmp("true", value); 53 } 54} 55 56NuPlayer::Renderer::Renderer( 57 const sp<MediaPlayerBase::AudioSink> &sink, 58 const sp<AMessage> ¬ify, 59 uint32_t flags) 60 : mAudioSink(sink), 61 mNotify(notify), 62 mFlags(flags), 63 mNumFramesWritten(0), 64 mDrainAudioQueuePending(false), 65 mDrainVideoQueuePending(false), 66 mAudioQueueGeneration(0), 67 mVideoQueueGeneration(0), 68 mAudioFirstAnchorTimeMediaUs(-1), 69 mAnchorTimeMediaUs(-1), 70 mAnchorTimeRealUs(-1), 71 mVideoLateByUs(0ll), 72 mHasAudio(false), 73 mHasVideo(false), 74 mPauseStartedTimeRealUs(-1), 75 mFlushingAudio(false), 76 mFlushingVideo(false), 77 mSyncQueues(false), 78 mPaused(false), 79 mVideoSampleReceived(false), 80 mVideoRenderingStarted(false), 81 mVideoRenderingStartGeneration(0), 82 mAudioRenderingStartGeneration(0), 83 mAudioOffloadPauseTimeoutGeneration(0), 84 mAudioOffloadTornDown(false), 85 mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER), 86 mTotalBuffersQueued(0), 87 mLastAudioBufferDrained(0) { 88 readProperties(); 89} 90 91NuPlayer::Renderer::~Renderer() { 92 if (offloadingAudio()) { 93 mAudioSink->stop(); 94 mAudioSink->flush(); 95 mAudioSink->close(); 96 } 97} 98 99void NuPlayer::Renderer::queueBuffer( 100 bool audio, 101 const sp<ABuffer> &buffer, 102 const sp<AMessage> ¬ifyConsumed) { 103 sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id()); 104 msg->setInt32("audio", static_cast<int32_t>(audio)); 105 msg->setBuffer("buffer", buffer); 106 msg->setMessage("notifyConsumed", notifyConsumed); 107 msg->post(); 108} 109 110void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) { 111 CHECK_NE(finalResult, (status_t)OK); 112 113 sp<AMessage> msg = new AMessage(kWhatQueueEOS, id()); 114 msg->setInt32("audio", static_cast<int32_t>(audio)); 115 msg->setInt32("finalResult", finalResult); 116 msg->post(); 117} 118 119void NuPlayer::Renderer::flush(bool audio) { 120 { 121 Mutex::Autolock autoLock(mFlushLock); 122 if (audio) { 123 if (mFlushingAudio) { 124 return; 125 } 126 mFlushingAudio = true; 127 } else { 128 if (mFlushingVideo) { 129 return; 130 } 131 mFlushingVideo = true; 132 } 133 } 134 135 sp<AMessage> msg = new AMessage(kWhatFlush, id()); 136 msg->setInt32("audio", static_cast<int32_t>(audio)); 137 msg->post(); 138} 139 140void NuPlayer::Renderer::signalTimeDiscontinuity() { 141 Mutex::Autolock autoLock(mLock); 142 // CHECK(mAudioQueue.empty()); 143 // CHECK(mVideoQueue.empty()); 144 setAudioFirstAnchorTime(-1); 145 setAnchorTime(-1, -1); 146 setVideoLateByUs(0); 147 mSyncQueues = false; 148} 149 150void NuPlayer::Renderer::signalAudioSinkChanged() { 151 (new AMessage(kWhatAudioSinkChanged, id()))->post(); 152} 153 154void NuPlayer::Renderer::signalDisableOffloadAudio() { 155 (new AMessage(kWhatDisableOffloadAudio, id()))->post(); 156} 157 158void NuPlayer::Renderer::pause() { 159 (new AMessage(kWhatPause, id()))->post(); 160} 161 162void NuPlayer::Renderer::resume() { 163 (new AMessage(kWhatResume, id()))->post(); 164} 165 166void NuPlayer::Renderer::setVideoFrameRate(float fps) { 167 sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, id()); 168 msg->setFloat("frame-rate", fps); 169 msg->post(); 170} 171 172status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) { 173 return getCurrentPosition(mediaUs, ALooper::GetNowUs()); 174} 175 176status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs, int64_t nowUs) { 177 Mutex::Autolock autoLock(mTimeLock); 178 if (!mHasAudio && !mHasVideo) { 179 return NO_INIT; 180 } 181 182 if (mAnchorTimeMediaUs < 0) { 183 return NO_INIT; 184 } 185 int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs; 186 187 if (mPauseStartedTimeRealUs != -1) { 188 positionUs -= (nowUs - mPauseStartedTimeRealUs); 189 } 190 191 if (positionUs < mAudioFirstAnchorTimeMediaUs) { 192 positionUs = mAudioFirstAnchorTimeMediaUs; 193 } 194 195 *mediaUs = (positionUs <= 0) ? 0 : positionUs; 196 return OK; 197} 198 199void NuPlayer::Renderer::setHasMedia(bool audio) { 200 Mutex::Autolock autoLock(mTimeLock); 201 if (audio) { 202 mHasAudio = true; 203 } else { 204 mHasVideo = true; 205 } 206} 207 208void NuPlayer::Renderer::setAudioFirstAnchorTime(int64_t mediaUs) { 209 Mutex::Autolock autoLock(mTimeLock); 210 mAudioFirstAnchorTimeMediaUs = mediaUs; 211} 212 213void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs) { 214 Mutex::Autolock autoLock(mTimeLock); 215 if (mAudioFirstAnchorTimeMediaUs == -1) { 216 mAudioFirstAnchorTimeMediaUs = mediaUs; 217 } 218} 219 220void NuPlayer::Renderer::setAnchorTime(int64_t mediaUs, int64_t realUs, bool resume) { 221 Mutex::Autolock autoLock(mTimeLock); 222 mAnchorTimeMediaUs = mediaUs; 223 mAnchorTimeRealUs = realUs; 224 if (resume) { 225 mPauseStartedTimeRealUs = -1; 226 } 227} 228 229void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) { 230 Mutex::Autolock autoLock(mTimeLock); 231 mVideoLateByUs = lateUs; 232} 233 234int64_t NuPlayer::Renderer::getVideoLateByUs() { 235 Mutex::Autolock autoLock(mTimeLock); 236 return mVideoLateByUs; 237} 238 239void NuPlayer::Renderer::setPauseStartedTimeRealUs(int64_t realUs) { 240 Mutex::Autolock autoLock(mTimeLock); 241 mPauseStartedTimeRealUs = realUs; 242} 243 244bool NuPlayer::Renderer::openAudioSink( 245 const sp<AMessage> &format, 246 bool offloadOnly, 247 bool hasVideo, 248 uint32_t flags) { 249 sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, id()); 250 msg->setMessage("format", format); 251 msg->setInt32("offload-only", offloadOnly); 252 msg->setInt32("has-video", hasVideo); 253 msg->setInt32("flags", flags); 254 255 sp<AMessage> response; 256 msg->postAndAwaitResponse(&response); 257 258 int32_t offload; 259 CHECK(response->findInt32("offload", &offload)); 260 return (offload != 0); 261} 262 263void NuPlayer::Renderer::closeAudioSink() { 264 sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, id()); 265 266 sp<AMessage> response; 267 msg->postAndAwaitResponse(&response); 268} 269 270void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { 271 switch (msg->what()) { 272 case kWhatOpenAudioSink: 273 { 274 sp<AMessage> format; 275 CHECK(msg->findMessage("format", &format)); 276 277 int32_t offloadOnly; 278 CHECK(msg->findInt32("offload-only", &offloadOnly)); 279 280 int32_t hasVideo; 281 CHECK(msg->findInt32("has-video", &hasVideo)); 282 283 uint32_t flags; 284 CHECK(msg->findInt32("flags", (int32_t *)&flags)); 285 286 bool offload = onOpenAudioSink(format, offloadOnly, hasVideo, flags); 287 288 sp<AMessage> response = new AMessage; 289 response->setInt32("offload", offload); 290 291 uint32_t replyID; 292 CHECK(msg->senderAwaitsResponse(&replyID)); 293 response->postReply(replyID); 294 295 break; 296 } 297 298 case kWhatCloseAudioSink: 299 { 300 uint32_t replyID; 301 CHECK(msg->senderAwaitsResponse(&replyID)); 302 303 onCloseAudioSink(); 304 305 sp<AMessage> response = new AMessage; 306 response->postReply(replyID); 307 break; 308 } 309 310 case kWhatStopAudioSink: 311 { 312 mAudioSink->stop(); 313 break; 314 } 315 316 case kWhatDrainAudioQueue: 317 { 318 int32_t generation; 319 CHECK(msg->findInt32("generation", &generation)); 320 if (generation != mAudioQueueGeneration) { 321 break; 322 } 323 324 mDrainAudioQueuePending = false; 325 326 if (onDrainAudioQueue()) { 327 uint32_t numFramesPlayed; 328 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), 329 (status_t)OK); 330 331 uint32_t numFramesPendingPlayout = 332 mNumFramesWritten - numFramesPlayed; 333 334 // This is how long the audio sink will have data to 335 // play back. 336 int64_t delayUs = 337 mAudioSink->msecsPerFrame() 338 * numFramesPendingPlayout * 1000ll; 339 340 // Let's give it more data after about half that time 341 // has elapsed. 342 // kWhatDrainAudioQueue is used for non-offloading mode, 343 // and mLock is used only for offloading mode. Therefore, 344 // no need to acquire mLock here. 345 postDrainAudioQueue_l(delayUs / 2); 346 } 347 break; 348 } 349 350 case kWhatDrainVideoQueue: 351 { 352 int32_t generation; 353 CHECK(msg->findInt32("generation", &generation)); 354 if (generation != mVideoQueueGeneration) { 355 break; 356 } 357 358 mDrainVideoQueuePending = false; 359 360 onDrainVideoQueue(); 361 362 postDrainVideoQueue(); 363 break; 364 } 365 366 case kWhatPostDrainVideoQueue: 367 { 368 int32_t generation; 369 CHECK(msg->findInt32("generation", &generation)); 370 if (generation != mVideoQueueGeneration) { 371 break; 372 } 373 374 mDrainVideoQueuePending = false; 375 postDrainVideoQueue(); 376 break; 377 } 378 379 case kWhatQueueBuffer: 380 { 381 onQueueBuffer(msg); 382 break; 383 } 384 385 case kWhatQueueEOS: 386 { 387 onQueueEOS(msg); 388 break; 389 } 390 391 case kWhatFlush: 392 { 393 onFlush(msg); 394 break; 395 } 396 397 case kWhatAudioSinkChanged: 398 { 399 onAudioSinkChanged(); 400 break; 401 } 402 403 case kWhatDisableOffloadAudio: 404 { 405 onDisableOffloadAudio(); 406 break; 407 } 408 409 case kWhatPause: 410 { 411 onPause(); 412 break; 413 } 414 415 case kWhatResume: 416 { 417 onResume(); 418 break; 419 } 420 421 case kWhatSetVideoFrameRate: 422 { 423 float fps; 424 CHECK(msg->findFloat("frame-rate", &fps)); 425 onSetVideoFrameRate(fps); 426 break; 427 } 428 429 case kWhatAudioOffloadTearDown: 430 { 431 onAudioOffloadTearDown(kDueToError); 432 break; 433 } 434 435 case kWhatAudioOffloadPauseTimeout: 436 { 437 int32_t generation; 438 CHECK(msg->findInt32("generation", &generation)); 439 if (generation != mAudioOffloadPauseTimeoutGeneration) { 440 break; 441 } 442 ALOGV("Audio Offload tear down due to pause timeout."); 443 onAudioOffloadTearDown(kDueToTimeout); 444 break; 445 } 446 447 default: 448 TRESPASS(); 449 break; 450 } 451} 452 453void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) { 454 if (mDrainAudioQueuePending || mSyncQueues || mPaused 455 || offloadingAudio()) { 456 return; 457 } 458 459 if (mAudioQueue.empty()) { 460 return; 461 } 462 463 mDrainAudioQueuePending = true; 464 sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id()); 465 msg->setInt32("generation", mAudioQueueGeneration); 466 msg->post(delayUs); 467} 468 469void NuPlayer::Renderer::prepareForMediaRenderingStart() { 470 mAudioRenderingStartGeneration = mAudioQueueGeneration; 471 mVideoRenderingStartGeneration = mVideoQueueGeneration; 472} 473 474void NuPlayer::Renderer::notifyIfMediaRenderingStarted() { 475 if (mVideoRenderingStartGeneration == mVideoQueueGeneration && 476 mAudioRenderingStartGeneration == mAudioQueueGeneration) { 477 mVideoRenderingStartGeneration = -1; 478 mAudioRenderingStartGeneration = -1; 479 480 sp<AMessage> notify = mNotify->dup(); 481 notify->setInt32("what", kWhatMediaRenderingStart); 482 notify->post(); 483 } 484} 485 486// static 487size_t NuPlayer::Renderer::AudioSinkCallback( 488 MediaPlayerBase::AudioSink * /* audioSink */, 489 void *buffer, 490 size_t size, 491 void *cookie, 492 MediaPlayerBase::AudioSink::cb_event_t event) { 493 NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie; 494 495 switch (event) { 496 case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER: 497 { 498 return me->fillAudioBuffer(buffer, size); 499 break; 500 } 501 502 case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END: 503 { 504 me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM); 505 break; 506 } 507 508 case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN: 509 { 510 me->notifyAudioOffloadTearDown(); 511 break; 512 } 513 } 514 515 return 0; 516} 517 518size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) { 519 Mutex::Autolock autoLock(mLock); 520 521 if (!offloadingAudio() || mPaused) { 522 return 0; 523 } 524 525 bool hasEOS = false; 526 527 size_t sizeCopied = 0; 528 bool firstEntry = true; 529 while (sizeCopied < size && !mAudioQueue.empty()) { 530 QueueEntry *entry = &*mAudioQueue.begin(); 531 532 if (entry->mBuffer == NULL) { // EOS 533 hasEOS = true; 534 mAudioQueue.erase(mAudioQueue.begin()); 535 entry = NULL; 536 break; 537 } 538 539 if (firstEntry && entry->mOffset == 0) { 540 firstEntry = false; 541 int64_t mediaTimeUs; 542 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 543 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 544 onNewAudioMediaTime(mediaTimeUs); 545 } 546 547 size_t copy = entry->mBuffer->size() - entry->mOffset; 548 size_t sizeRemaining = size - sizeCopied; 549 if (copy > sizeRemaining) { 550 copy = sizeRemaining; 551 } 552 553 memcpy((char *)buffer + sizeCopied, 554 entry->mBuffer->data() + entry->mOffset, 555 copy); 556 557 entry->mOffset += copy; 558 if (entry->mOffset == entry->mBuffer->size()) { 559 entry->mNotifyConsumed->post(); 560 mAudioQueue.erase(mAudioQueue.begin()); 561 entry = NULL; 562 } 563 sizeCopied += copy; 564 notifyIfMediaRenderingStarted(); 565 } 566 567 if (hasEOS) { 568 (new AMessage(kWhatStopAudioSink, id()))->post(); 569 } 570 571 return sizeCopied; 572} 573 574bool NuPlayer::Renderer::onDrainAudioQueue() { 575 uint32_t numFramesPlayed; 576 if (mAudioSink->getPosition(&numFramesPlayed) != OK) { 577 return false; 578 } 579 580 ssize_t numFramesAvailableToWrite = 581 mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed); 582 583#if 0 584 if (numFramesAvailableToWrite == mAudioSink->frameCount()) { 585 ALOGI("audio sink underrun"); 586 } else { 587 ALOGV("audio queue has %d frames left to play", 588 mAudioSink->frameCount() - numFramesAvailableToWrite); 589 } 590#endif 591 592 size_t numBytesAvailableToWrite = 593 numFramesAvailableToWrite * mAudioSink->frameSize(); 594 595 while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) { 596 QueueEntry *entry = &*mAudioQueue.begin(); 597 598 mLastAudioBufferDrained = entry->mBufferOrdinal; 599 600 if (entry->mBuffer == NULL) { 601 // EOS 602 int64_t postEOSDelayUs = 0; 603 if (mAudioSink->needsTrailingPadding()) { 604 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs()); 605 } 606 notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs); 607 608 mAudioQueue.erase(mAudioQueue.begin()); 609 entry = NULL; 610 return false; 611 } 612 613 if (entry->mOffset == 0) { 614 int64_t mediaTimeUs; 615 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 616 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 617 onNewAudioMediaTime(mediaTimeUs); 618 } 619 620 size_t copy = entry->mBuffer->size() - entry->mOffset; 621 if (copy > numBytesAvailableToWrite) { 622 copy = numBytesAvailableToWrite; 623 } 624 625 ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy); 626 if (written < 0) { 627 // An error in AudioSink write is fatal here. 628 LOG_ALWAYS_FATAL("AudioSink write error(%zd) when writing %zu bytes", written, copy); 629 } 630 631 entry->mOffset += written; 632 if (entry->mOffset == entry->mBuffer->size()) { 633 entry->mNotifyConsumed->post(); 634 mAudioQueue.erase(mAudioQueue.begin()); 635 636 entry = NULL; 637 } 638 639 numBytesAvailableToWrite -= written; 640 size_t copiedFrames = written / mAudioSink->frameSize(); 641 mNumFramesWritten += copiedFrames; 642 643 notifyIfMediaRenderingStarted(); 644 645 if (written != (ssize_t)copy) { 646 // A short count was received from AudioSink::write() 647 // 648 // AudioSink write should block until exactly the number of bytes are delivered. 649 // But it may return with a short count (without an error) when: 650 // 651 // 1) Size to be copied is not a multiple of the frame size. We consider this fatal. 652 // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded. 653 654 // (Case 1) 655 // Must be a multiple of the frame size. If it is not a multiple of a frame size, it 656 // needs to fail, as we should not carry over fractional frames between calls. 657 CHECK_EQ(copy % mAudioSink->frameSize(), 0); 658 659 // (Case 2) 660 // Return early to the caller. 661 // Beware of calling immediately again as this may busy-loop if you are not careful. 662 ALOGW("AudioSink write short frame count %zd < %zu", written, copy); 663 break; 664 } 665 } 666 return !mAudioQueue.empty(); 667} 668 669int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) { 670 int64_t writtenAudioDurationUs = 671 mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame(); 672 return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs); 673} 674 675int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) { 676 int64_t currentPositionUs; 677 if (getCurrentPosition(¤tPositionUs, nowUs) != OK) { 678 // If failed to get current position, e.g. due to audio clock is not ready, then just 679 // play out video immediately without delay. 680 return nowUs; 681 } 682 return (mediaTimeUs - currentPositionUs) + nowUs; 683} 684 685void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) { 686 // TRICKY: vorbis decoder generates multiple frames with the same 687 // timestamp, so only update on the first frame with a given timestamp 688 if (mediaTimeUs == mAnchorTimeMediaUs) { 689 return; 690 } 691 setAudioFirstAnchorTimeIfNeeded(mediaTimeUs); 692 int64_t nowUs = ALooper::GetNowUs(); 693 setAnchorTime(mediaTimeUs, nowUs + getPendingAudioPlayoutDurationUs(nowUs)); 694} 695 696void NuPlayer::Renderer::postDrainVideoQueue() { 697 if (mDrainVideoQueuePending 698 || mSyncQueues 699 || (mPaused && mVideoSampleReceived)) { 700 return; 701 } 702 703 if (mVideoQueue.empty()) { 704 return; 705 } 706 707 QueueEntry &entry = *mVideoQueue.begin(); 708 709 sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id()); 710 msg->setInt32("generation", mVideoQueueGeneration); 711 712 if (entry.mBuffer == NULL) { 713 // EOS doesn't carry a timestamp. 714 msg->post(); 715 mDrainVideoQueuePending = true; 716 return; 717 } 718 719 int64_t delayUs; 720 int64_t nowUs = ALooper::GetNowUs(); 721 int64_t realTimeUs; 722 if (mFlags & FLAG_REAL_TIME) { 723 int64_t mediaTimeUs; 724 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 725 realTimeUs = mediaTimeUs; 726 } else { 727 int64_t mediaTimeUs; 728 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 729 730 if (mAnchorTimeMediaUs < 0) { 731 setAnchorTime(mediaTimeUs, nowUs); 732 realTimeUs = nowUs; 733 } else { 734 realTimeUs = getRealTimeUs(mediaTimeUs, nowUs); 735 } 736 737 // Heuristics to handle situation when media time changed without a 738 // discontinuity. If we have not drained an audio buffer that was 739 // received after this buffer, repost in 10 msec. Otherwise repost 740 // in 500 msec. 741 delayUs = realTimeUs - nowUs; 742 if (delayUs > 500000) { 743 int64_t postDelayUs = 500000; 744 if (mHasAudio && (mLastAudioBufferDrained - entry.mBufferOrdinal) <= 0) { 745 postDelayUs = 10000; 746 } 747 msg->setWhat(kWhatPostDrainVideoQueue); 748 msg->post(postDelayUs); 749 mVideoScheduler->restart(); 750 ALOGI("possible video time jump of %dms, retrying in %dms", 751 (int)(delayUs / 1000), (int)(postDelayUs / 1000)); 752 mDrainVideoQueuePending = true; 753 return; 754 } 755 } 756 757 realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000; 758 int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000); 759 760 delayUs = realTimeUs - nowUs; 761 762 ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs); 763 // post 2 display refreshes before rendering is due 764 // FIXME currently this increases power consumption, so unless frame-accurate 765 // AV sync is requested, post closer to required render time (at 0.63 vsyncs) 766 if (!sFrameAccurateAVsync) { 767 twoVsyncsUs >>= 4; 768 } 769 msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0); 770 771 mDrainVideoQueuePending = true; 772} 773 774void NuPlayer::Renderer::onDrainVideoQueue() { 775 if (mVideoQueue.empty()) { 776 return; 777 } 778 779 QueueEntry *entry = &*mVideoQueue.begin(); 780 781 if (entry->mBuffer == NULL) { 782 // EOS 783 784 notifyEOS(false /* audio */, entry->mFinalResult); 785 786 mVideoQueue.erase(mVideoQueue.begin()); 787 entry = NULL; 788 789 setVideoLateByUs(0); 790 return; 791 } 792 793 int64_t nowUs = -1; 794 int64_t realTimeUs; 795 if (mFlags & FLAG_REAL_TIME) { 796 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs)); 797 } else { 798 int64_t mediaTimeUs; 799 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 800 801 nowUs = ALooper::GetNowUs(); 802 realTimeUs = getRealTimeUs(mediaTimeUs, nowUs); 803 } 804 805 bool tooLate = false; 806 807 if (!mPaused) { 808 if (nowUs == -1) { 809 nowUs = ALooper::GetNowUs(); 810 } 811 setVideoLateByUs(nowUs - realTimeUs); 812 tooLate = (mVideoLateByUs > 40000); 813 814 if (tooLate) { 815 ALOGV("video late by %lld us (%.2f secs)", 816 mVideoLateByUs, mVideoLateByUs / 1E6); 817 } else { 818 ALOGV("rendering video at media time %.2f secs", 819 (mFlags & FLAG_REAL_TIME ? realTimeUs : 820 (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6); 821 } 822 } else { 823 setVideoLateByUs(0); 824 if (!mVideoSampleReceived && !mHasAudio) { 825 // This will ensure that the first frame after a flush won't be used as anchor 826 // when renderer is in paused state, because resume can happen any time after seek. 827 setAnchorTime(-1, -1); 828 } 829 } 830 831 entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll); 832 entry->mNotifyConsumed->setInt32("render", !tooLate); 833 entry->mNotifyConsumed->post(); 834 mVideoQueue.erase(mVideoQueue.begin()); 835 entry = NULL; 836 837 mVideoSampleReceived = true; 838 839 if (!mPaused) { 840 if (!mVideoRenderingStarted) { 841 mVideoRenderingStarted = true; 842 notifyVideoRenderingStart(); 843 } 844 notifyIfMediaRenderingStarted(); 845 } 846} 847 848void NuPlayer::Renderer::notifyVideoRenderingStart() { 849 sp<AMessage> notify = mNotify->dup(); 850 notify->setInt32("what", kWhatVideoRenderingStart); 851 notify->post(); 852} 853 854void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) { 855 sp<AMessage> notify = mNotify->dup(); 856 notify->setInt32("what", kWhatEOS); 857 notify->setInt32("audio", static_cast<int32_t>(audio)); 858 notify->setInt32("finalResult", finalResult); 859 notify->post(delayUs); 860} 861 862void NuPlayer::Renderer::notifyAudioOffloadTearDown() { 863 (new AMessage(kWhatAudioOffloadTearDown, id()))->post(); 864} 865 866void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) { 867 int32_t audio; 868 CHECK(msg->findInt32("audio", &audio)); 869 870 setHasMedia(audio); 871 872 if (mHasVideo) { 873 if (mVideoScheduler == NULL) { 874 mVideoScheduler = new VideoFrameScheduler(); 875 mVideoScheduler->init(); 876 } 877 } 878 879 if (dropBufferWhileFlushing(audio, msg)) { 880 return; 881 } 882 883 sp<ABuffer> buffer; 884 CHECK(msg->findBuffer("buffer", &buffer)); 885 886 sp<AMessage> notifyConsumed; 887 CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed)); 888 889 QueueEntry entry; 890 entry.mBuffer = buffer; 891 entry.mNotifyConsumed = notifyConsumed; 892 entry.mOffset = 0; 893 entry.mFinalResult = OK; 894 entry.mBufferOrdinal = ++mTotalBuffersQueued; 895 896 if (audio) { 897 Mutex::Autolock autoLock(mLock); 898 mAudioQueue.push_back(entry); 899 postDrainAudioQueue_l(); 900 } else { 901 mVideoQueue.push_back(entry); 902 postDrainVideoQueue(); 903 } 904 905 Mutex::Autolock autoLock(mLock); 906 if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) { 907 return; 908 } 909 910 sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer; 911 sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer; 912 913 if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) { 914 // EOS signalled on either queue. 915 syncQueuesDone_l(); 916 return; 917 } 918 919 int64_t firstAudioTimeUs; 920 int64_t firstVideoTimeUs; 921 CHECK(firstAudioBuffer->meta() 922 ->findInt64("timeUs", &firstAudioTimeUs)); 923 CHECK(firstVideoBuffer->meta() 924 ->findInt64("timeUs", &firstVideoTimeUs)); 925 926 int64_t diff = firstVideoTimeUs - firstAudioTimeUs; 927 928 ALOGV("queueDiff = %.2f secs", diff / 1E6); 929 930 if (diff > 100000ll) { 931 // Audio data starts More than 0.1 secs before video. 932 // Drop some audio. 933 934 (*mAudioQueue.begin()).mNotifyConsumed->post(); 935 mAudioQueue.erase(mAudioQueue.begin()); 936 return; 937 } 938 939 syncQueuesDone_l(); 940} 941 942void NuPlayer::Renderer::syncQueuesDone_l() { 943 if (!mSyncQueues) { 944 return; 945 } 946 947 mSyncQueues = false; 948 949 if (!mAudioQueue.empty()) { 950 postDrainAudioQueue_l(); 951 } 952 953 if (!mVideoQueue.empty()) { 954 postDrainVideoQueue(); 955 } 956} 957 958void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) { 959 int32_t audio; 960 CHECK(msg->findInt32("audio", &audio)); 961 962 if (dropBufferWhileFlushing(audio, msg)) { 963 return; 964 } 965 966 int32_t finalResult; 967 CHECK(msg->findInt32("finalResult", &finalResult)); 968 969 QueueEntry entry; 970 entry.mOffset = 0; 971 entry.mFinalResult = finalResult; 972 973 if (audio) { 974 Mutex::Autolock autoLock(mLock); 975 if (mAudioQueue.empty() && mSyncQueues) { 976 syncQueuesDone_l(); 977 } 978 mAudioQueue.push_back(entry); 979 postDrainAudioQueue_l(); 980 } else { 981 if (mVideoQueue.empty() && mSyncQueues) { 982 Mutex::Autolock autoLock(mLock); 983 syncQueuesDone_l(); 984 } 985 mVideoQueue.push_back(entry); 986 postDrainVideoQueue(); 987 } 988} 989 990void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { 991 int32_t audio; 992 CHECK(msg->findInt32("audio", &audio)); 993 994 { 995 Mutex::Autolock autoLock(mFlushLock); 996 if (audio) { 997 mFlushingAudio = false; 998 } else { 999 mFlushingVideo = false; 1000 } 1001 } 1002 1003 // If we're currently syncing the queues, i.e. dropping audio while 1004 // aligning the first audio/video buffer times and only one of the 1005 // two queues has data, we may starve that queue by not requesting 1006 // more buffers from the decoder. If the other source then encounters 1007 // a discontinuity that leads to flushing, we'll never find the 1008 // corresponding discontinuity on the other queue. 1009 // Therefore we'll stop syncing the queues if at least one of them 1010 // is flushed. 1011 { 1012 Mutex::Autolock autoLock(mLock); 1013 syncQueuesDone_l(); 1014 setPauseStartedTimeRealUs(-1); 1015 setAnchorTime(-1, -1); 1016 } 1017 1018 ALOGV("flushing %s", audio ? "audio" : "video"); 1019 if (audio) { 1020 { 1021 Mutex::Autolock autoLock(mLock); 1022 flushQueue(&mAudioQueue); 1023 1024 ++mAudioQueueGeneration; 1025 prepareForMediaRenderingStart(); 1026 1027 if (offloadingAudio()) { 1028 setAudioFirstAnchorTime(-1); 1029 } 1030 } 1031 1032 mDrainAudioQueuePending = false; 1033 1034 if (offloadingAudio()) { 1035 mAudioSink->pause(); 1036 mAudioSink->flush(); 1037 mAudioSink->start(); 1038 } 1039 } else { 1040 flushQueue(&mVideoQueue); 1041 1042 mDrainVideoQueuePending = false; 1043 ++mVideoQueueGeneration; 1044 1045 if (mVideoScheduler != NULL) { 1046 mVideoScheduler->restart(); 1047 } 1048 1049 prepareForMediaRenderingStart(); 1050 } 1051 1052 mVideoSampleReceived = false; 1053 notifyFlushComplete(audio); 1054} 1055 1056void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) { 1057 while (!queue->empty()) { 1058 QueueEntry *entry = &*queue->begin(); 1059 1060 if (entry->mBuffer != NULL) { 1061 entry->mNotifyConsumed->post(); 1062 } 1063 1064 queue->erase(queue->begin()); 1065 entry = NULL; 1066 } 1067} 1068 1069void NuPlayer::Renderer::notifyFlushComplete(bool audio) { 1070 sp<AMessage> notify = mNotify->dup(); 1071 notify->setInt32("what", kWhatFlushComplete); 1072 notify->setInt32("audio", static_cast<int32_t>(audio)); 1073 notify->post(); 1074} 1075 1076bool NuPlayer::Renderer::dropBufferWhileFlushing( 1077 bool audio, const sp<AMessage> &msg) { 1078 bool flushing = false; 1079 1080 { 1081 Mutex::Autolock autoLock(mFlushLock); 1082 if (audio) { 1083 flushing = mFlushingAudio; 1084 } else { 1085 flushing = mFlushingVideo; 1086 } 1087 } 1088 1089 if (!flushing) { 1090 return false; 1091 } 1092 1093 sp<AMessage> notifyConsumed; 1094 if (msg->findMessage("notifyConsumed", ¬ifyConsumed)) { 1095 notifyConsumed->post(); 1096 } 1097 1098 return true; 1099} 1100 1101void NuPlayer::Renderer::onAudioSinkChanged() { 1102 if (offloadingAudio()) { 1103 return; 1104 } 1105 CHECK(!mDrainAudioQueuePending); 1106 mNumFramesWritten = 0; 1107 uint32_t written; 1108 if (mAudioSink->getFramesWritten(&written) == OK) { 1109 mNumFramesWritten = written; 1110 } 1111} 1112 1113void NuPlayer::Renderer::onDisableOffloadAudio() { 1114 Mutex::Autolock autoLock(mLock); 1115 mFlags &= ~FLAG_OFFLOAD_AUDIO; 1116 ++mAudioQueueGeneration; 1117} 1118 1119void NuPlayer::Renderer::onPause() { 1120 if (mPaused) { 1121 ALOGW("Renderer::onPause() called while already paused!"); 1122 return; 1123 } 1124 { 1125 Mutex::Autolock autoLock(mLock); 1126 ++mAudioQueueGeneration; 1127 ++mVideoQueueGeneration; 1128 prepareForMediaRenderingStart(); 1129 mPaused = true; 1130 setPauseStartedTimeRealUs(ALooper::GetNowUs()); 1131 } 1132 1133 mDrainAudioQueuePending = false; 1134 mDrainVideoQueuePending = false; 1135 1136 if (mHasAudio) { 1137 mAudioSink->pause(); 1138 startAudioOffloadPauseTimeout(); 1139 } 1140 1141 ALOGV("now paused audio queue has %d entries, video has %d entries", 1142 mAudioQueue.size(), mVideoQueue.size()); 1143} 1144 1145void NuPlayer::Renderer::onResume() { 1146 readProperties(); 1147 1148 if (!mPaused) { 1149 return; 1150 } 1151 1152 if (mHasAudio) { 1153 cancelAudioOffloadPauseTimeout(); 1154 mAudioSink->start(); 1155 } 1156 1157 Mutex::Autolock autoLock(mLock); 1158 mPaused = false; 1159 if (mPauseStartedTimeRealUs != -1) { 1160 int64_t newAnchorRealUs = 1161 mAnchorTimeRealUs + ALooper::GetNowUs() - mPauseStartedTimeRealUs; 1162 setAnchorTime(mAnchorTimeMediaUs, newAnchorRealUs, true /* resume */); 1163 } 1164 1165 if (!mAudioQueue.empty()) { 1166 postDrainAudioQueue_l(); 1167 } 1168 1169 if (!mVideoQueue.empty()) { 1170 postDrainVideoQueue(); 1171 } 1172} 1173 1174void NuPlayer::Renderer::onSetVideoFrameRate(float fps) { 1175 if (mVideoScheduler == NULL) { 1176 mVideoScheduler = new VideoFrameScheduler(); 1177 } 1178 mVideoScheduler->init(fps); 1179} 1180 1181// TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs() 1182// as it acquires locks and may query the audio driver. 1183// 1184// Some calls could conceivably retrieve extrapolated data instead of 1185// accessing getTimestamp() or getPosition() every time a data buffer with 1186// a media time is received. 1187// 1188int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) { 1189 uint32_t numFramesPlayed; 1190 int64_t numFramesPlayedAt; 1191 AudioTimestamp ts; 1192 static const int64_t kStaleTimestamp100ms = 100000; 1193 1194 status_t res = mAudioSink->getTimestamp(ts); 1195 if (res == OK) { // case 1: mixing audio tracks and offloaded tracks. 1196 numFramesPlayed = ts.mPosition; 1197 numFramesPlayedAt = 1198 ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000; 1199 const int64_t timestampAge = nowUs - numFramesPlayedAt; 1200 if (timestampAge > kStaleTimestamp100ms) { 1201 // This is an audio FIXME. 1202 // getTimestamp returns a timestamp which may come from audio mixing threads. 1203 // After pausing, the MixerThread may go idle, thus the mTime estimate may 1204 // become stale. Assuming that the MixerThread runs 20ms, with FastMixer at 5ms, 1205 // the max latency should be about 25ms with an average around 12ms (to be verified). 1206 // For safety we use 100ms. 1207 ALOGV("getTimestamp: returned stale timestamp nowUs(%lld) numFramesPlayedAt(%lld)", 1208 (long long)nowUs, (long long)numFramesPlayedAt); 1209 numFramesPlayedAt = nowUs - kStaleTimestamp100ms; 1210 } 1211 //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAt); 1212 } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track 1213 numFramesPlayed = 0; 1214 numFramesPlayedAt = nowUs; 1215 //ALOGD("getTimestamp: WOULD_BLOCK %d %lld", 1216 // numFramesPlayed, (long long)numFramesPlayedAt); 1217 } else { // case 3: transitory at new track or audio fast tracks. 1218 res = mAudioSink->getPosition(&numFramesPlayed); 1219 CHECK_EQ(res, (status_t)OK); 1220 numFramesPlayedAt = nowUs; 1221 numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */ 1222 //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt); 1223 } 1224 1225 // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours. 1226 //CHECK_EQ(numFramesPlayed & (1 << 31), 0); // can't be negative until 12.4 hrs, test 1227 int64_t durationUs = (int64_t)((int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame()) 1228 + nowUs - numFramesPlayedAt; 1229 if (durationUs < 0) { 1230 // Occurs when numFramesPlayed position is very small and the following: 1231 // (1) In case 1, the time nowUs is computed before getTimestamp() is called and 1232 // numFramesPlayedAt is greater than nowUs by time more than numFramesPlayed. 1233 // (2) In case 3, using getPosition and adding mAudioSink->latency() to 1234 // numFramesPlayedAt, by a time amount greater than numFramesPlayed. 1235 // 1236 // Both of these are transitory conditions. 1237 ALOGV("getPlayedOutAudioDurationUs: negative duration %lld set to zero", (long long)durationUs); 1238 durationUs = 0; 1239 } 1240 ALOGV("getPlayedOutAudioDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)", 1241 (long long)durationUs, (long long)nowUs, numFramesPlayed, (long long)numFramesPlayedAt); 1242 return durationUs; 1243} 1244 1245void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reason) { 1246 if (mAudioOffloadTornDown) { 1247 return; 1248 } 1249 mAudioOffloadTornDown = true; 1250 1251 int64_t currentPositionUs; 1252 if (getCurrentPosition(¤tPositionUs) != OK) { 1253 currentPositionUs = 0; 1254 } 1255 1256 mAudioSink->stop(); 1257 mAudioSink->flush(); 1258 1259 sp<AMessage> notify = mNotify->dup(); 1260 notify->setInt32("what", kWhatAudioOffloadTearDown); 1261 notify->setInt64("positionUs", currentPositionUs); 1262 notify->setInt32("reason", reason); 1263 notify->post(); 1264} 1265 1266void NuPlayer::Renderer::startAudioOffloadPauseTimeout() { 1267 if (offloadingAudio()) { 1268 sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, id()); 1269 msg->setInt32("generation", mAudioOffloadPauseTimeoutGeneration); 1270 msg->post(kOffloadPauseMaxUs); 1271 } 1272} 1273 1274void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() { 1275 if (offloadingAudio()) { 1276 ++mAudioOffloadPauseTimeoutGeneration; 1277 } 1278} 1279 1280bool NuPlayer::Renderer::onOpenAudioSink( 1281 const sp<AMessage> &format, 1282 bool offloadOnly, 1283 bool hasVideo, 1284 uint32_t flags) { 1285 ALOGV("openAudioSink: offloadOnly(%d) offloadingAudio(%d)", 1286 offloadOnly, offloadingAudio()); 1287 bool audioSinkChanged = false; 1288 1289 int32_t numChannels; 1290 CHECK(format->findInt32("channel-count", &numChannels)); 1291 1292 int32_t channelMask; 1293 if (!format->findInt32("channel-mask", &channelMask)) { 1294 // signal to the AudioSink to derive the mask from count. 1295 channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER; 1296 } 1297 1298 int32_t sampleRate; 1299 CHECK(format->findInt32("sample-rate", &sampleRate)); 1300 1301 if (offloadingAudio()) { 1302 audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT; 1303 AString mime; 1304 CHECK(format->findString("mime", &mime)); 1305 status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str()); 1306 1307 if (err != OK) { 1308 ALOGE("Couldn't map mime \"%s\" to a valid " 1309 "audio_format", mime.c_str()); 1310 onDisableOffloadAudio(); 1311 } else { 1312 ALOGV("Mime \"%s\" mapped to audio_format 0x%x", 1313 mime.c_str(), audioFormat); 1314 1315 int avgBitRate = -1; 1316 format->findInt32("bit-rate", &avgBitRate); 1317 1318 int32_t aacProfile = -1; 1319 if (audioFormat == AUDIO_FORMAT_AAC 1320 && format->findInt32("aac-profile", &aacProfile)) { 1321 // Redefine AAC format as per aac profile 1322 mapAACProfileToAudioFormat( 1323 audioFormat, 1324 aacProfile); 1325 } 1326 1327 audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER; 1328 offloadInfo.duration_us = -1; 1329 format->findInt64( 1330 "durationUs", &offloadInfo.duration_us); 1331 offloadInfo.sample_rate = sampleRate; 1332 offloadInfo.channel_mask = channelMask; 1333 offloadInfo.format = audioFormat; 1334 offloadInfo.stream_type = AUDIO_STREAM_MUSIC; 1335 offloadInfo.bit_rate = avgBitRate; 1336 offloadInfo.has_video = hasVideo; 1337 offloadInfo.is_streaming = true; 1338 1339 if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) { 1340 ALOGV("openAudioSink: no change in offload mode"); 1341 // no change from previous configuration, everything ok. 1342 return offloadingAudio(); 1343 } 1344 ALOGV("openAudioSink: try to open AudioSink in offload mode"); 1345 flags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD; 1346 flags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER; 1347 audioSinkChanged = true; 1348 mAudioSink->close(); 1349 err = mAudioSink->open( 1350 sampleRate, 1351 numChannels, 1352 (audio_channel_mask_t)channelMask, 1353 audioFormat, 1354 8 /* bufferCount */, 1355 &NuPlayer::Renderer::AudioSinkCallback, 1356 this, 1357 (audio_output_flags_t)flags, 1358 &offloadInfo); 1359 1360 if (err == OK) { 1361 // If the playback is offloaded to h/w, we pass 1362 // the HAL some metadata information. 1363 // We don't want to do this for PCM because it 1364 // will be going through the AudioFlinger mixer 1365 // before reaching the hardware. 1366 // TODO 1367 mCurrentOffloadInfo = offloadInfo; 1368 err = mAudioSink->start(); 1369 ALOGV_IF(err == OK, "openAudioSink: offload succeeded"); 1370 } 1371 if (err != OK) { 1372 // Clean up, fall back to non offload mode. 1373 mAudioSink->close(); 1374 onDisableOffloadAudio(); 1375 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER; 1376 ALOGV("openAudioSink: offload failed"); 1377 } 1378 } 1379 } 1380 if (!offloadOnly && !offloadingAudio()) { 1381 flags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD; 1382 ALOGV("openAudioSink: open AudioSink in NON-offload mode"); 1383 1384 audioSinkChanged = true; 1385 mAudioSink->close(); 1386 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER; 1387 CHECK_EQ(mAudioSink->open( 1388 sampleRate, 1389 numChannels, 1390 (audio_channel_mask_t)channelMask, 1391 AUDIO_FORMAT_PCM_16_BIT, 1392 8 /* bufferCount */, 1393 NULL, 1394 NULL, 1395 (audio_output_flags_t)flags), 1396 (status_t)OK); 1397 mAudioSink->start(); 1398 } 1399 if (audioSinkChanged) { 1400 onAudioSinkChanged(); 1401 } 1402 1403 return offloadingAudio(); 1404} 1405 1406void NuPlayer::Renderer::onCloseAudioSink() { 1407 mAudioSink->close(); 1408 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER; 1409} 1410 1411} // namespace android 1412 1413