NuPlayerRenderer.cpp revision f06060f9544c71ebdc1e0b1b8d73f6cb275e6311
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "NuPlayerRenderer" 19#include <utils/Log.h> 20 21#include "NuPlayerRenderer.h" 22 23#include <cutils/properties.h> 24 25#include <media/stagefright/foundation/ABuffer.h> 26#include <media/stagefright/foundation/ADebug.h> 27#include <media/stagefright/foundation/AMessage.h> 28#include <media/stagefright/foundation/AUtils.h> 29#include <media/stagefright/MediaErrors.h> 30#include <media/stagefright/MetaData.h> 31#include <media/stagefright/Utils.h> 32 33#include <VideoFrameScheduler.h> 34 35#include <inttypes.h> 36 37namespace android { 38 39// Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink 40// is closed to allow the audio DSP to power down. 41static const int64_t kOffloadPauseMaxUs = 60000000ll; 42 43// static 44const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll; 45 46static bool sFrameAccurateAVsync = false; 47 48static void readProperties() { 49 char value[PROPERTY_VALUE_MAX]; 50 if (property_get("persist.sys.media.avsync", value, NULL)) { 51 sFrameAccurateAVsync = 52 !strcmp("1", value) || !strcasecmp("true", value); 53 } 54} 55 56NuPlayer::Renderer::Renderer( 57 const sp<MediaPlayerBase::AudioSink> &sink, 58 const sp<AMessage> ¬ify, 59 uint32_t flags) 60 : mAudioSink(sink), 61 mNotify(notify), 62 mFlags(flags), 63 mNumFramesWritten(0), 64 mDrainAudioQueuePending(false), 65 mDrainVideoQueuePending(false), 66 mAudioQueueGeneration(0), 67 mVideoQueueGeneration(0), 68 mAudioFirstAnchorTimeMediaUs(-1), 69 mAnchorTimeMediaUs(-1), 70 mAnchorTimeRealUs(-1), 71 mVideoLateByUs(0ll), 72 mHasAudio(false), 73 mHasVideo(false), 74 mPauseStartedTimeRealUs(-1), 75 mFlushingAudio(false), 76 mFlushingVideo(false), 77 mSyncQueues(false), 78 mPaused(false), 79 mVideoSampleReceived(false), 80 mVideoRenderingStarted(false), 81 mVideoRenderingStartGeneration(0), 82 mAudioRenderingStartGeneration(0), 83 mAudioOffloadPauseTimeoutGeneration(0), 84 mAudioOffloadTornDown(false), 85 mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER), 86 mTotalBuffersQueued(0), 87 mLastAudioBufferDrained(0) { 88 readProperties(); 89} 90 91NuPlayer::Renderer::~Renderer() { 92 if (offloadingAudio()) { 93 mAudioSink->stop(); 94 mAudioSink->flush(); 95 mAudioSink->close(); 96 } 97} 98 99void NuPlayer::Renderer::queueBuffer( 100 bool audio, 101 const sp<ABuffer> &buffer, 102 const sp<AMessage> ¬ifyConsumed) { 103 sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id()); 104 msg->setInt32("audio", static_cast<int32_t>(audio)); 105 msg->setBuffer("buffer", buffer); 106 msg->setMessage("notifyConsumed", notifyConsumed); 107 msg->post(); 108} 109 110void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) { 111 CHECK_NE(finalResult, (status_t)OK); 112 113 sp<AMessage> msg = new AMessage(kWhatQueueEOS, id()); 114 msg->setInt32("audio", static_cast<int32_t>(audio)); 115 msg->setInt32("finalResult", finalResult); 116 msg->post(); 117} 118 119void NuPlayer::Renderer::flush(bool audio) { 120 { 121 Mutex::Autolock autoLock(mFlushLock); 122 if (audio) { 123 if (mFlushingAudio) { 124 return; 125 } 126 mFlushingAudio = true; 127 } else { 128 if (mFlushingVideo) { 129 return; 130 } 131 mFlushingVideo = true; 132 } 133 } 134 135 sp<AMessage> msg = new AMessage(kWhatFlush, id()); 136 msg->setInt32("audio", static_cast<int32_t>(audio)); 137 msg->post(); 138} 139 140void NuPlayer::Renderer::signalTimeDiscontinuity() { 141 Mutex::Autolock autoLock(mLock); 142 // CHECK(mAudioQueue.empty()); 143 // CHECK(mVideoQueue.empty()); 144 setAudioFirstAnchorTime(-1); 145 setAnchorTime(-1, -1); 146 setVideoLateByUs(0); 147 mSyncQueues = false; 148} 149 150void NuPlayer::Renderer::signalAudioSinkChanged() { 151 (new AMessage(kWhatAudioSinkChanged, id()))->post(); 152} 153 154void NuPlayer::Renderer::signalDisableOffloadAudio() { 155 (new AMessage(kWhatDisableOffloadAudio, id()))->post(); 156} 157 158void NuPlayer::Renderer::signalEnableOffloadAudio() { 159 (new AMessage(kWhatEnableOffloadAudio, id()))->post(); 160} 161 162void NuPlayer::Renderer::pause() { 163 (new AMessage(kWhatPause, id()))->post(); 164} 165 166void NuPlayer::Renderer::resume() { 167 (new AMessage(kWhatResume, id()))->post(); 168} 169 170void NuPlayer::Renderer::setVideoFrameRate(float fps) { 171 sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, id()); 172 msg->setFloat("frame-rate", fps); 173 msg->post(); 174} 175 176status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) { 177 return getCurrentPosition(mediaUs, ALooper::GetNowUs()); 178} 179 180status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs, int64_t nowUs) { 181 Mutex::Autolock autoLock(mTimeLock); 182 if (!mHasAudio && !mHasVideo) { 183 return NO_INIT; 184 } 185 186 if (mAnchorTimeMediaUs < 0) { 187 return NO_INIT; 188 } 189 int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs; 190 191 if (mPauseStartedTimeRealUs != -1) { 192 positionUs -= (nowUs - mPauseStartedTimeRealUs); 193 } 194 195 if (positionUs < mAudioFirstAnchorTimeMediaUs) { 196 positionUs = mAudioFirstAnchorTimeMediaUs; 197 } 198 199 *mediaUs = (positionUs <= 0) ? 0 : positionUs; 200 return OK; 201} 202 203void NuPlayer::Renderer::setHasMedia(bool audio) { 204 Mutex::Autolock autoLock(mTimeLock); 205 if (audio) { 206 mHasAudio = true; 207 } else { 208 mHasVideo = true; 209 } 210} 211 212void NuPlayer::Renderer::setAudioFirstAnchorTime(int64_t mediaUs) { 213 Mutex::Autolock autoLock(mTimeLock); 214 mAudioFirstAnchorTimeMediaUs = mediaUs; 215} 216 217void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs) { 218 Mutex::Autolock autoLock(mTimeLock); 219 if (mAudioFirstAnchorTimeMediaUs == -1) { 220 mAudioFirstAnchorTimeMediaUs = mediaUs; 221 } 222} 223 224void NuPlayer::Renderer::setAnchorTime(int64_t mediaUs, int64_t realUs, bool resume) { 225 Mutex::Autolock autoLock(mTimeLock); 226 mAnchorTimeMediaUs = mediaUs; 227 mAnchorTimeRealUs = realUs; 228 if (resume) { 229 mPauseStartedTimeRealUs = -1; 230 } 231} 232 233void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) { 234 Mutex::Autolock autoLock(mTimeLock); 235 mVideoLateByUs = lateUs; 236} 237 238int64_t NuPlayer::Renderer::getVideoLateByUs() { 239 Mutex::Autolock autoLock(mTimeLock); 240 return mVideoLateByUs; 241} 242 243void NuPlayer::Renderer::setPauseStartedTimeRealUs(int64_t realUs) { 244 Mutex::Autolock autoLock(mTimeLock); 245 mPauseStartedTimeRealUs = realUs; 246} 247 248bool NuPlayer::Renderer::openAudioSink( 249 const sp<AMessage> &format, 250 bool offloadOnly, 251 bool hasVideo, 252 uint32_t flags) { 253 sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, id()); 254 msg->setMessage("format", format); 255 msg->setInt32("offload-only", offloadOnly); 256 msg->setInt32("has-video", hasVideo); 257 msg->setInt32("flags", flags); 258 259 sp<AMessage> response; 260 msg->postAndAwaitResponse(&response); 261 262 int32_t offload; 263 CHECK(response->findInt32("offload", &offload)); 264 return (offload != 0); 265} 266 267void NuPlayer::Renderer::closeAudioSink() { 268 sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, id()); 269 270 sp<AMessage> response; 271 msg->postAndAwaitResponse(&response); 272} 273 274void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { 275 switch (msg->what()) { 276 case kWhatOpenAudioSink: 277 { 278 sp<AMessage> format; 279 CHECK(msg->findMessage("format", &format)); 280 281 int32_t offloadOnly; 282 CHECK(msg->findInt32("offload-only", &offloadOnly)); 283 284 int32_t hasVideo; 285 CHECK(msg->findInt32("has-video", &hasVideo)); 286 287 uint32_t flags; 288 CHECK(msg->findInt32("flags", (int32_t *)&flags)); 289 290 bool offload = onOpenAudioSink(format, offloadOnly, hasVideo, flags); 291 292 sp<AMessage> response = new AMessage; 293 response->setInt32("offload", offload); 294 295 uint32_t replyID; 296 CHECK(msg->senderAwaitsResponse(&replyID)); 297 response->postReply(replyID); 298 299 break; 300 } 301 302 case kWhatCloseAudioSink: 303 { 304 uint32_t replyID; 305 CHECK(msg->senderAwaitsResponse(&replyID)); 306 307 onCloseAudioSink(); 308 309 sp<AMessage> response = new AMessage; 310 response->postReply(replyID); 311 break; 312 } 313 314 case kWhatStopAudioSink: 315 { 316 mAudioSink->stop(); 317 break; 318 } 319 320 case kWhatDrainAudioQueue: 321 { 322 int32_t generation; 323 CHECK(msg->findInt32("generation", &generation)); 324 if (generation != mAudioQueueGeneration) { 325 break; 326 } 327 328 mDrainAudioQueuePending = false; 329 330 if (onDrainAudioQueue()) { 331 uint32_t numFramesPlayed; 332 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), 333 (status_t)OK); 334 335 uint32_t numFramesPendingPlayout = 336 mNumFramesWritten - numFramesPlayed; 337 338 // This is how long the audio sink will have data to 339 // play back. 340 int64_t delayUs = 341 mAudioSink->msecsPerFrame() 342 * numFramesPendingPlayout * 1000ll; 343 344 // Let's give it more data after about half that time 345 // has elapsed. 346 // kWhatDrainAudioQueue is used for non-offloading mode, 347 // and mLock is used only for offloading mode. Therefore, 348 // no need to acquire mLock here. 349 postDrainAudioQueue_l(delayUs / 2); 350 } 351 break; 352 } 353 354 case kWhatDrainVideoQueue: 355 { 356 int32_t generation; 357 CHECK(msg->findInt32("generation", &generation)); 358 if (generation != mVideoQueueGeneration) { 359 break; 360 } 361 362 mDrainVideoQueuePending = false; 363 364 onDrainVideoQueue(); 365 366 postDrainVideoQueue(); 367 break; 368 } 369 370 case kWhatPostDrainVideoQueue: 371 { 372 int32_t generation; 373 CHECK(msg->findInt32("generation", &generation)); 374 if (generation != mVideoQueueGeneration) { 375 break; 376 } 377 378 mDrainVideoQueuePending = false; 379 postDrainVideoQueue(); 380 break; 381 } 382 383 case kWhatQueueBuffer: 384 { 385 onQueueBuffer(msg); 386 break; 387 } 388 389 case kWhatQueueEOS: 390 { 391 onQueueEOS(msg); 392 break; 393 } 394 395 case kWhatFlush: 396 { 397 onFlush(msg); 398 break; 399 } 400 401 case kWhatAudioSinkChanged: 402 { 403 onAudioSinkChanged(); 404 break; 405 } 406 407 case kWhatDisableOffloadAudio: 408 { 409 onDisableOffloadAudio(); 410 break; 411 } 412 413 case kWhatEnableOffloadAudio: 414 { 415 onEnableOffloadAudio(); 416 break; 417 } 418 419 case kWhatPause: 420 { 421 onPause(); 422 break; 423 } 424 425 case kWhatResume: 426 { 427 onResume(); 428 break; 429 } 430 431 case kWhatSetVideoFrameRate: 432 { 433 float fps; 434 CHECK(msg->findFloat("frame-rate", &fps)); 435 onSetVideoFrameRate(fps); 436 break; 437 } 438 439 case kWhatAudioOffloadTearDown: 440 { 441 onAudioOffloadTearDown(kDueToError); 442 break; 443 } 444 445 case kWhatAudioOffloadPauseTimeout: 446 { 447 int32_t generation; 448 CHECK(msg->findInt32("generation", &generation)); 449 if (generation != mAudioOffloadPauseTimeoutGeneration) { 450 break; 451 } 452 ALOGV("Audio Offload tear down due to pause timeout."); 453 onAudioOffloadTearDown(kDueToTimeout); 454 break; 455 } 456 457 default: 458 TRESPASS(); 459 break; 460 } 461} 462 463void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) { 464 if (mDrainAudioQueuePending || mSyncQueues || mPaused 465 || offloadingAudio()) { 466 return; 467 } 468 469 if (mAudioQueue.empty()) { 470 return; 471 } 472 473 mDrainAudioQueuePending = true; 474 sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id()); 475 msg->setInt32("generation", mAudioQueueGeneration); 476 msg->post(delayUs); 477} 478 479void NuPlayer::Renderer::prepareForMediaRenderingStart() { 480 mAudioRenderingStartGeneration = mAudioQueueGeneration; 481 mVideoRenderingStartGeneration = mVideoQueueGeneration; 482} 483 484void NuPlayer::Renderer::notifyIfMediaRenderingStarted() { 485 if (mVideoRenderingStartGeneration == mVideoQueueGeneration && 486 mAudioRenderingStartGeneration == mAudioQueueGeneration) { 487 mVideoRenderingStartGeneration = -1; 488 mAudioRenderingStartGeneration = -1; 489 490 sp<AMessage> notify = mNotify->dup(); 491 notify->setInt32("what", kWhatMediaRenderingStart); 492 notify->post(); 493 } 494} 495 496// static 497size_t NuPlayer::Renderer::AudioSinkCallback( 498 MediaPlayerBase::AudioSink * /* audioSink */, 499 void *buffer, 500 size_t size, 501 void *cookie, 502 MediaPlayerBase::AudioSink::cb_event_t event) { 503 NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie; 504 505 switch (event) { 506 case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER: 507 { 508 return me->fillAudioBuffer(buffer, size); 509 break; 510 } 511 512 case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END: 513 { 514 me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM); 515 break; 516 } 517 518 case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN: 519 { 520 me->notifyAudioOffloadTearDown(); 521 break; 522 } 523 } 524 525 return 0; 526} 527 528size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) { 529 Mutex::Autolock autoLock(mLock); 530 531 if (!offloadingAudio() || mPaused) { 532 return 0; 533 } 534 535 bool hasEOS = false; 536 537 size_t sizeCopied = 0; 538 bool firstEntry = true; 539 while (sizeCopied < size && !mAudioQueue.empty()) { 540 QueueEntry *entry = &*mAudioQueue.begin(); 541 542 if (entry->mBuffer == NULL) { // EOS 543 hasEOS = true; 544 mAudioQueue.erase(mAudioQueue.begin()); 545 entry = NULL; 546 break; 547 } 548 549 if (firstEntry && entry->mOffset == 0) { 550 firstEntry = false; 551 int64_t mediaTimeUs; 552 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 553 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 554 onNewAudioMediaTime(mediaTimeUs); 555 } 556 557 size_t copy = entry->mBuffer->size() - entry->mOffset; 558 size_t sizeRemaining = size - sizeCopied; 559 if (copy > sizeRemaining) { 560 copy = sizeRemaining; 561 } 562 563 memcpy((char *)buffer + sizeCopied, 564 entry->mBuffer->data() + entry->mOffset, 565 copy); 566 567 entry->mOffset += copy; 568 if (entry->mOffset == entry->mBuffer->size()) { 569 entry->mNotifyConsumed->post(); 570 mAudioQueue.erase(mAudioQueue.begin()); 571 entry = NULL; 572 } 573 sizeCopied += copy; 574 notifyIfMediaRenderingStarted(); 575 } 576 577 if (hasEOS) { 578 (new AMessage(kWhatStopAudioSink, id()))->post(); 579 } 580 581 return sizeCopied; 582} 583 584bool NuPlayer::Renderer::onDrainAudioQueue() { 585 uint32_t numFramesPlayed; 586 if (mAudioSink->getPosition(&numFramesPlayed) != OK) { 587 return false; 588 } 589 590 ssize_t numFramesAvailableToWrite = 591 mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed); 592 593#if 0 594 if (numFramesAvailableToWrite == mAudioSink->frameCount()) { 595 ALOGI("audio sink underrun"); 596 } else { 597 ALOGV("audio queue has %d frames left to play", 598 mAudioSink->frameCount() - numFramesAvailableToWrite); 599 } 600#endif 601 602 size_t numBytesAvailableToWrite = 603 numFramesAvailableToWrite * mAudioSink->frameSize(); 604 605 while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) { 606 QueueEntry *entry = &*mAudioQueue.begin(); 607 608 mLastAudioBufferDrained = entry->mBufferOrdinal; 609 610 if (entry->mBuffer == NULL) { 611 // EOS 612 int64_t postEOSDelayUs = 0; 613 if (mAudioSink->needsTrailingPadding()) { 614 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs()); 615 } 616 notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs); 617 618 mAudioQueue.erase(mAudioQueue.begin()); 619 entry = NULL; 620 return false; 621 } 622 623 if (entry->mOffset == 0) { 624 int64_t mediaTimeUs; 625 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 626 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 627 onNewAudioMediaTime(mediaTimeUs); 628 } 629 630 size_t copy = entry->mBuffer->size() - entry->mOffset; 631 if (copy > numBytesAvailableToWrite) { 632 copy = numBytesAvailableToWrite; 633 } 634 635 ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy); 636 if (written < 0) { 637 // An error in AudioSink write is fatal here. 638 LOG_ALWAYS_FATAL("AudioSink write error(%zd) when writing %zu bytes", written, copy); 639 } 640 641 entry->mOffset += written; 642 if (entry->mOffset == entry->mBuffer->size()) { 643 entry->mNotifyConsumed->post(); 644 mAudioQueue.erase(mAudioQueue.begin()); 645 646 entry = NULL; 647 } 648 649 numBytesAvailableToWrite -= written; 650 size_t copiedFrames = written / mAudioSink->frameSize(); 651 mNumFramesWritten += copiedFrames; 652 653 notifyIfMediaRenderingStarted(); 654 655 if (written != (ssize_t)copy) { 656 // A short count was received from AudioSink::write() 657 // 658 // AudioSink write should block until exactly the number of bytes are delivered. 659 // But it may return with a short count (without an error) when: 660 // 661 // 1) Size to be copied is not a multiple of the frame size. We consider this fatal. 662 // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded. 663 664 // (Case 1) 665 // Must be a multiple of the frame size. If it is not a multiple of a frame size, it 666 // needs to fail, as we should not carry over fractional frames between calls. 667 CHECK_EQ(copy % mAudioSink->frameSize(), 0); 668 669 // (Case 2) 670 // Return early to the caller. 671 // Beware of calling immediately again as this may busy-loop if you are not careful. 672 ALOGW("AudioSink write short frame count %zd < %zu", written, copy); 673 break; 674 } 675 } 676 return !mAudioQueue.empty(); 677} 678 679int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) { 680 int64_t writtenAudioDurationUs = 681 mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame(); 682 return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs); 683} 684 685int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) { 686 int64_t currentPositionUs; 687 if (getCurrentPosition(¤tPositionUs, nowUs) != OK) { 688 // If failed to get current position, e.g. due to audio clock is not ready, then just 689 // play out video immediately without delay. 690 return nowUs; 691 } 692 return (mediaTimeUs - currentPositionUs) + nowUs; 693} 694 695void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) { 696 // TRICKY: vorbis decoder generates multiple frames with the same 697 // timestamp, so only update on the first frame with a given timestamp 698 if (mediaTimeUs == mAnchorTimeMediaUs) { 699 return; 700 } 701 setAudioFirstAnchorTimeIfNeeded(mediaTimeUs); 702 int64_t nowUs = ALooper::GetNowUs(); 703 setAnchorTime(mediaTimeUs, nowUs + getPendingAudioPlayoutDurationUs(nowUs)); 704} 705 706void NuPlayer::Renderer::postDrainVideoQueue() { 707 if (mDrainVideoQueuePending 708 || mSyncQueues 709 || (mPaused && mVideoSampleReceived)) { 710 return; 711 } 712 713 if (mVideoQueue.empty()) { 714 return; 715 } 716 717 QueueEntry &entry = *mVideoQueue.begin(); 718 719 sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id()); 720 msg->setInt32("generation", mVideoQueueGeneration); 721 722 if (entry.mBuffer == NULL) { 723 // EOS doesn't carry a timestamp. 724 msg->post(); 725 mDrainVideoQueuePending = true; 726 return; 727 } 728 729 int64_t delayUs; 730 int64_t nowUs = ALooper::GetNowUs(); 731 int64_t realTimeUs; 732 if (mFlags & FLAG_REAL_TIME) { 733 int64_t mediaTimeUs; 734 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 735 realTimeUs = mediaTimeUs; 736 } else { 737 int64_t mediaTimeUs; 738 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 739 740 if (mAnchorTimeMediaUs < 0) { 741 setAnchorTime(mediaTimeUs, nowUs); 742 realTimeUs = nowUs; 743 } else { 744 realTimeUs = getRealTimeUs(mediaTimeUs, nowUs); 745 } 746 747 // Heuristics to handle situation when media time changed without a 748 // discontinuity. If we have not drained an audio buffer that was 749 // received after this buffer, repost in 10 msec. Otherwise repost 750 // in 500 msec. 751 delayUs = realTimeUs - nowUs; 752 if (delayUs > 500000) { 753 int64_t postDelayUs = 500000; 754 if (mHasAudio && (mLastAudioBufferDrained - entry.mBufferOrdinal) <= 0) { 755 postDelayUs = 10000; 756 } 757 msg->setWhat(kWhatPostDrainVideoQueue); 758 msg->post(postDelayUs); 759 mVideoScheduler->restart(); 760 ALOGI("possible video time jump of %dms, retrying in %dms", 761 (int)(delayUs / 1000), (int)(postDelayUs / 1000)); 762 mDrainVideoQueuePending = true; 763 return; 764 } 765 } 766 767 realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000; 768 int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000); 769 770 delayUs = realTimeUs - nowUs; 771 772 ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs); 773 // post 2 display refreshes before rendering is due 774 // FIXME currently this increases power consumption, so unless frame-accurate 775 // AV sync is requested, post closer to required render time (at 0.63 vsyncs) 776 if (!sFrameAccurateAVsync) { 777 twoVsyncsUs >>= 4; 778 } 779 msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0); 780 781 mDrainVideoQueuePending = true; 782} 783 784void NuPlayer::Renderer::onDrainVideoQueue() { 785 if (mVideoQueue.empty()) { 786 return; 787 } 788 789 QueueEntry *entry = &*mVideoQueue.begin(); 790 791 if (entry->mBuffer == NULL) { 792 // EOS 793 794 notifyEOS(false /* audio */, entry->mFinalResult); 795 796 mVideoQueue.erase(mVideoQueue.begin()); 797 entry = NULL; 798 799 setVideoLateByUs(0); 800 return; 801 } 802 803 int64_t nowUs = -1; 804 int64_t realTimeUs; 805 if (mFlags & FLAG_REAL_TIME) { 806 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs)); 807 } else { 808 int64_t mediaTimeUs; 809 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 810 811 nowUs = ALooper::GetNowUs(); 812 realTimeUs = getRealTimeUs(mediaTimeUs, nowUs); 813 } 814 815 bool tooLate = false; 816 817 if (!mPaused) { 818 if (nowUs == -1) { 819 nowUs = ALooper::GetNowUs(); 820 } 821 setVideoLateByUs(nowUs - realTimeUs); 822 tooLate = (mVideoLateByUs > 40000); 823 824 if (tooLate) { 825 ALOGV("video late by %lld us (%.2f secs)", 826 mVideoLateByUs, mVideoLateByUs / 1E6); 827 } else { 828 ALOGV("rendering video at media time %.2f secs", 829 (mFlags & FLAG_REAL_TIME ? realTimeUs : 830 (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6); 831 } 832 } else { 833 setVideoLateByUs(0); 834 if (!mVideoSampleReceived && !mHasAudio) { 835 // This will ensure that the first frame after a flush won't be used as anchor 836 // when renderer is in paused state, because resume can happen any time after seek. 837 setAnchorTime(-1, -1); 838 } 839 } 840 841 entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll); 842 entry->mNotifyConsumed->setInt32("render", !tooLate); 843 entry->mNotifyConsumed->post(); 844 mVideoQueue.erase(mVideoQueue.begin()); 845 entry = NULL; 846 847 mVideoSampleReceived = true; 848 849 if (!mPaused) { 850 if (!mVideoRenderingStarted) { 851 mVideoRenderingStarted = true; 852 notifyVideoRenderingStart(); 853 } 854 notifyIfMediaRenderingStarted(); 855 } 856} 857 858void NuPlayer::Renderer::notifyVideoRenderingStart() { 859 sp<AMessage> notify = mNotify->dup(); 860 notify->setInt32("what", kWhatVideoRenderingStart); 861 notify->post(); 862} 863 864void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) { 865 sp<AMessage> notify = mNotify->dup(); 866 notify->setInt32("what", kWhatEOS); 867 notify->setInt32("audio", static_cast<int32_t>(audio)); 868 notify->setInt32("finalResult", finalResult); 869 notify->post(delayUs); 870} 871 872void NuPlayer::Renderer::notifyAudioOffloadTearDown() { 873 (new AMessage(kWhatAudioOffloadTearDown, id()))->post(); 874} 875 876void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) { 877 int32_t audio; 878 CHECK(msg->findInt32("audio", &audio)); 879 880 setHasMedia(audio); 881 882 if (mHasVideo) { 883 if (mVideoScheduler == NULL) { 884 mVideoScheduler = new VideoFrameScheduler(); 885 mVideoScheduler->init(); 886 } 887 } 888 889 if (dropBufferWhileFlushing(audio, msg)) { 890 return; 891 } 892 893 sp<ABuffer> buffer; 894 CHECK(msg->findBuffer("buffer", &buffer)); 895 896 sp<AMessage> notifyConsumed; 897 CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed)); 898 899 QueueEntry entry; 900 entry.mBuffer = buffer; 901 entry.mNotifyConsumed = notifyConsumed; 902 entry.mOffset = 0; 903 entry.mFinalResult = OK; 904 entry.mBufferOrdinal = ++mTotalBuffersQueued; 905 906 if (audio) { 907 Mutex::Autolock autoLock(mLock); 908 mAudioQueue.push_back(entry); 909 postDrainAudioQueue_l(); 910 } else { 911 mVideoQueue.push_back(entry); 912 postDrainVideoQueue(); 913 } 914 915 Mutex::Autolock autoLock(mLock); 916 if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) { 917 return; 918 } 919 920 sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer; 921 sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer; 922 923 if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) { 924 // EOS signalled on either queue. 925 syncQueuesDone_l(); 926 return; 927 } 928 929 int64_t firstAudioTimeUs; 930 int64_t firstVideoTimeUs; 931 CHECK(firstAudioBuffer->meta() 932 ->findInt64("timeUs", &firstAudioTimeUs)); 933 CHECK(firstVideoBuffer->meta() 934 ->findInt64("timeUs", &firstVideoTimeUs)); 935 936 int64_t diff = firstVideoTimeUs - firstAudioTimeUs; 937 938 ALOGV("queueDiff = %.2f secs", diff / 1E6); 939 940 if (diff > 100000ll) { 941 // Audio data starts More than 0.1 secs before video. 942 // Drop some audio. 943 944 (*mAudioQueue.begin()).mNotifyConsumed->post(); 945 mAudioQueue.erase(mAudioQueue.begin()); 946 return; 947 } 948 949 syncQueuesDone_l(); 950} 951 952void NuPlayer::Renderer::syncQueuesDone_l() { 953 if (!mSyncQueues) { 954 return; 955 } 956 957 mSyncQueues = false; 958 959 if (!mAudioQueue.empty()) { 960 postDrainAudioQueue_l(); 961 } 962 963 if (!mVideoQueue.empty()) { 964 postDrainVideoQueue(); 965 } 966} 967 968void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) { 969 int32_t audio; 970 CHECK(msg->findInt32("audio", &audio)); 971 972 if (dropBufferWhileFlushing(audio, msg)) { 973 return; 974 } 975 976 int32_t finalResult; 977 CHECK(msg->findInt32("finalResult", &finalResult)); 978 979 QueueEntry entry; 980 entry.mOffset = 0; 981 entry.mFinalResult = finalResult; 982 983 if (audio) { 984 Mutex::Autolock autoLock(mLock); 985 if (mAudioQueue.empty() && mSyncQueues) { 986 syncQueuesDone_l(); 987 } 988 mAudioQueue.push_back(entry); 989 postDrainAudioQueue_l(); 990 } else { 991 if (mVideoQueue.empty() && mSyncQueues) { 992 Mutex::Autolock autoLock(mLock); 993 syncQueuesDone_l(); 994 } 995 mVideoQueue.push_back(entry); 996 postDrainVideoQueue(); 997 } 998} 999 1000void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { 1001 int32_t audio; 1002 CHECK(msg->findInt32("audio", &audio)); 1003 1004 { 1005 Mutex::Autolock autoLock(mFlushLock); 1006 if (audio) { 1007 mFlushingAudio = false; 1008 } else { 1009 mFlushingVideo = false; 1010 } 1011 } 1012 1013 // If we're currently syncing the queues, i.e. dropping audio while 1014 // aligning the first audio/video buffer times and only one of the 1015 // two queues has data, we may starve that queue by not requesting 1016 // more buffers from the decoder. If the other source then encounters 1017 // a discontinuity that leads to flushing, we'll never find the 1018 // corresponding discontinuity on the other queue. 1019 // Therefore we'll stop syncing the queues if at least one of them 1020 // is flushed. 1021 { 1022 Mutex::Autolock autoLock(mLock); 1023 syncQueuesDone_l(); 1024 setPauseStartedTimeRealUs(-1); 1025 setAnchorTime(-1, -1); 1026 } 1027 1028 ALOGV("flushing %s", audio ? "audio" : "video"); 1029 if (audio) { 1030 { 1031 Mutex::Autolock autoLock(mLock); 1032 flushQueue(&mAudioQueue); 1033 1034 ++mAudioQueueGeneration; 1035 prepareForMediaRenderingStart(); 1036 1037 if (offloadingAudio()) { 1038 setAudioFirstAnchorTime(-1); 1039 } 1040 } 1041 1042 mDrainAudioQueuePending = false; 1043 1044 if (offloadingAudio()) { 1045 mAudioSink->pause(); 1046 mAudioSink->flush(); 1047 mAudioSink->start(); 1048 } 1049 } else { 1050 flushQueue(&mVideoQueue); 1051 1052 mDrainVideoQueuePending = false; 1053 ++mVideoQueueGeneration; 1054 1055 if (mVideoScheduler != NULL) { 1056 mVideoScheduler->restart(); 1057 } 1058 1059 prepareForMediaRenderingStart(); 1060 } 1061 1062 mVideoSampleReceived = false; 1063 notifyFlushComplete(audio); 1064} 1065 1066void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) { 1067 while (!queue->empty()) { 1068 QueueEntry *entry = &*queue->begin(); 1069 1070 if (entry->mBuffer != NULL) { 1071 entry->mNotifyConsumed->post(); 1072 } 1073 1074 queue->erase(queue->begin()); 1075 entry = NULL; 1076 } 1077} 1078 1079void NuPlayer::Renderer::notifyFlushComplete(bool audio) { 1080 sp<AMessage> notify = mNotify->dup(); 1081 notify->setInt32("what", kWhatFlushComplete); 1082 notify->setInt32("audio", static_cast<int32_t>(audio)); 1083 notify->post(); 1084} 1085 1086bool NuPlayer::Renderer::dropBufferWhileFlushing( 1087 bool audio, const sp<AMessage> &msg) { 1088 bool flushing = false; 1089 1090 { 1091 Mutex::Autolock autoLock(mFlushLock); 1092 if (audio) { 1093 flushing = mFlushingAudio; 1094 } else { 1095 flushing = mFlushingVideo; 1096 } 1097 } 1098 1099 if (!flushing) { 1100 return false; 1101 } 1102 1103 sp<AMessage> notifyConsumed; 1104 if (msg->findMessage("notifyConsumed", ¬ifyConsumed)) { 1105 notifyConsumed->post(); 1106 } 1107 1108 return true; 1109} 1110 1111void NuPlayer::Renderer::onAudioSinkChanged() { 1112 if (offloadingAudio()) { 1113 return; 1114 } 1115 CHECK(!mDrainAudioQueuePending); 1116 mNumFramesWritten = 0; 1117 uint32_t written; 1118 if (mAudioSink->getFramesWritten(&written) == OK) { 1119 mNumFramesWritten = written; 1120 } 1121} 1122 1123void NuPlayer::Renderer::onDisableOffloadAudio() { 1124 Mutex::Autolock autoLock(mLock); 1125 mFlags &= ~FLAG_OFFLOAD_AUDIO; 1126 ++mAudioQueueGeneration; 1127} 1128 1129void NuPlayer::Renderer::onEnableOffloadAudio() { 1130 Mutex::Autolock autoLock(mLock); 1131 mFlags |= FLAG_OFFLOAD_AUDIO; 1132 ++mAudioQueueGeneration; 1133} 1134 1135void NuPlayer::Renderer::onPause() { 1136 if (mPaused) { 1137 ALOGW("Renderer::onPause() called while already paused!"); 1138 return; 1139 } 1140 { 1141 Mutex::Autolock autoLock(mLock); 1142 ++mAudioQueueGeneration; 1143 ++mVideoQueueGeneration; 1144 prepareForMediaRenderingStart(); 1145 mPaused = true; 1146 setPauseStartedTimeRealUs(ALooper::GetNowUs()); 1147 } 1148 1149 mDrainAudioQueuePending = false; 1150 mDrainVideoQueuePending = false; 1151 1152 if (mHasAudio) { 1153 mAudioSink->pause(); 1154 startAudioOffloadPauseTimeout(); 1155 } 1156 1157 ALOGV("now paused audio queue has %d entries, video has %d entries", 1158 mAudioQueue.size(), mVideoQueue.size()); 1159} 1160 1161void NuPlayer::Renderer::onResume() { 1162 readProperties(); 1163 1164 if (!mPaused) { 1165 return; 1166 } 1167 1168 if (mHasAudio) { 1169 cancelAudioOffloadPauseTimeout(); 1170 mAudioSink->start(); 1171 } 1172 1173 Mutex::Autolock autoLock(mLock); 1174 mPaused = false; 1175 if (mPauseStartedTimeRealUs != -1) { 1176 int64_t newAnchorRealUs = 1177 mAnchorTimeRealUs + ALooper::GetNowUs() - mPauseStartedTimeRealUs; 1178 setAnchorTime(mAnchorTimeMediaUs, newAnchorRealUs, true /* resume */); 1179 } 1180 1181 if (!mAudioQueue.empty()) { 1182 postDrainAudioQueue_l(); 1183 } 1184 1185 if (!mVideoQueue.empty()) { 1186 postDrainVideoQueue(); 1187 } 1188} 1189 1190void NuPlayer::Renderer::onSetVideoFrameRate(float fps) { 1191 if (mVideoScheduler == NULL) { 1192 mVideoScheduler = new VideoFrameScheduler(); 1193 } 1194 mVideoScheduler->init(fps); 1195} 1196 1197// TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs() 1198// as it acquires locks and may query the audio driver. 1199// 1200// Some calls could conceivably retrieve extrapolated data instead of 1201// accessing getTimestamp() or getPosition() every time a data buffer with 1202// a media time is received. 1203// 1204int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) { 1205 uint32_t numFramesPlayed; 1206 int64_t numFramesPlayedAt; 1207 AudioTimestamp ts; 1208 static const int64_t kStaleTimestamp100ms = 100000; 1209 1210 status_t res = mAudioSink->getTimestamp(ts); 1211 if (res == OK) { // case 1: mixing audio tracks and offloaded tracks. 1212 numFramesPlayed = ts.mPosition; 1213 numFramesPlayedAt = 1214 ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000; 1215 const int64_t timestampAge = nowUs - numFramesPlayedAt; 1216 if (timestampAge > kStaleTimestamp100ms) { 1217 // This is an audio FIXME. 1218 // getTimestamp returns a timestamp which may come from audio mixing threads. 1219 // After pausing, the MixerThread may go idle, thus the mTime estimate may 1220 // become stale. Assuming that the MixerThread runs 20ms, with FastMixer at 5ms, 1221 // the max latency should be about 25ms with an average around 12ms (to be verified). 1222 // For safety we use 100ms. 1223 ALOGV("getTimestamp: returned stale timestamp nowUs(%lld) numFramesPlayedAt(%lld)", 1224 (long long)nowUs, (long long)numFramesPlayedAt); 1225 numFramesPlayedAt = nowUs - kStaleTimestamp100ms; 1226 } 1227 //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAt); 1228 } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track 1229 numFramesPlayed = 0; 1230 numFramesPlayedAt = nowUs; 1231 //ALOGD("getTimestamp: WOULD_BLOCK %d %lld", 1232 // numFramesPlayed, (long long)numFramesPlayedAt); 1233 } else { // case 3: transitory at new track or audio fast tracks. 1234 res = mAudioSink->getPosition(&numFramesPlayed); 1235 CHECK_EQ(res, (status_t)OK); 1236 numFramesPlayedAt = nowUs; 1237 numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */ 1238 //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt); 1239 } 1240 1241 // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours. 1242 //CHECK_EQ(numFramesPlayed & (1 << 31), 0); // can't be negative until 12.4 hrs, test 1243 int64_t durationUs = (int64_t)((int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame()) 1244 + nowUs - numFramesPlayedAt; 1245 if (durationUs < 0) { 1246 // Occurs when numFramesPlayed position is very small and the following: 1247 // (1) In case 1, the time nowUs is computed before getTimestamp() is called and 1248 // numFramesPlayedAt is greater than nowUs by time more than numFramesPlayed. 1249 // (2) In case 3, using getPosition and adding mAudioSink->latency() to 1250 // numFramesPlayedAt, by a time amount greater than numFramesPlayed. 1251 // 1252 // Both of these are transitory conditions. 1253 ALOGV("getPlayedOutAudioDurationUs: negative duration %lld set to zero", (long long)durationUs); 1254 durationUs = 0; 1255 } 1256 ALOGV("getPlayedOutAudioDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)", 1257 (long long)durationUs, (long long)nowUs, numFramesPlayed, (long long)numFramesPlayedAt); 1258 return durationUs; 1259} 1260 1261void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reason) { 1262 if (mAudioOffloadTornDown) { 1263 return; 1264 } 1265 mAudioOffloadTornDown = true; 1266 1267 int64_t currentPositionUs; 1268 if (getCurrentPosition(¤tPositionUs) != OK) { 1269 currentPositionUs = 0; 1270 } 1271 1272 mAudioSink->stop(); 1273 mAudioSink->flush(); 1274 1275 sp<AMessage> notify = mNotify->dup(); 1276 notify->setInt32("what", kWhatAudioOffloadTearDown); 1277 notify->setInt64("positionUs", currentPositionUs); 1278 notify->setInt32("reason", reason); 1279 notify->post(); 1280} 1281 1282void NuPlayer::Renderer::startAudioOffloadPauseTimeout() { 1283 if (offloadingAudio()) { 1284 sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, id()); 1285 msg->setInt32("generation", mAudioOffloadPauseTimeoutGeneration); 1286 msg->post(kOffloadPauseMaxUs); 1287 } 1288} 1289 1290void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() { 1291 if (offloadingAudio()) { 1292 ++mAudioOffloadPauseTimeoutGeneration; 1293 } 1294} 1295 1296bool NuPlayer::Renderer::onOpenAudioSink( 1297 const sp<AMessage> &format, 1298 bool offloadOnly, 1299 bool hasVideo, 1300 uint32_t flags) { 1301 ALOGV("openAudioSink: offloadOnly(%d) offloadingAudio(%d)", 1302 offloadOnly, offloadingAudio()); 1303 bool audioSinkChanged = false; 1304 1305 int32_t numChannels; 1306 CHECK(format->findInt32("channel-count", &numChannels)); 1307 1308 int32_t channelMask; 1309 if (!format->findInt32("channel-mask", &channelMask)) { 1310 // signal to the AudioSink to derive the mask from count. 1311 channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER; 1312 } 1313 1314 int32_t sampleRate; 1315 CHECK(format->findInt32("sample-rate", &sampleRate)); 1316 1317 if (offloadingAudio()) { 1318 audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT; 1319 AString mime; 1320 CHECK(format->findString("mime", &mime)); 1321 status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str()); 1322 1323 if (err != OK) { 1324 ALOGE("Couldn't map mime \"%s\" to a valid " 1325 "audio_format", mime.c_str()); 1326 onDisableOffloadAudio(); 1327 } else { 1328 ALOGV("Mime \"%s\" mapped to audio_format 0x%x", 1329 mime.c_str(), audioFormat); 1330 1331 int avgBitRate = -1; 1332 format->findInt32("bit-rate", &avgBitRate); 1333 1334 int32_t aacProfile = -1; 1335 if (audioFormat == AUDIO_FORMAT_AAC 1336 && format->findInt32("aac-profile", &aacProfile)) { 1337 // Redefine AAC format as per aac profile 1338 mapAACProfileToAudioFormat( 1339 audioFormat, 1340 aacProfile); 1341 } 1342 1343 audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER; 1344 offloadInfo.duration_us = -1; 1345 format->findInt64( 1346 "durationUs", &offloadInfo.duration_us); 1347 offloadInfo.sample_rate = sampleRate; 1348 offloadInfo.channel_mask = channelMask; 1349 offloadInfo.format = audioFormat; 1350 offloadInfo.stream_type = AUDIO_STREAM_MUSIC; 1351 offloadInfo.bit_rate = avgBitRate; 1352 offloadInfo.has_video = hasVideo; 1353 offloadInfo.is_streaming = true; 1354 1355 if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) { 1356 ALOGV("openAudioSink: no change in offload mode"); 1357 // no change from previous configuration, everything ok. 1358 return offloadingAudio(); 1359 } 1360 ALOGV("openAudioSink: try to open AudioSink in offload mode"); 1361 flags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD; 1362 flags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER; 1363 audioSinkChanged = true; 1364 mAudioSink->close(); 1365 err = mAudioSink->open( 1366 sampleRate, 1367 numChannels, 1368 (audio_channel_mask_t)channelMask, 1369 audioFormat, 1370 8 /* bufferCount */, 1371 &NuPlayer::Renderer::AudioSinkCallback, 1372 this, 1373 (audio_output_flags_t)flags, 1374 &offloadInfo); 1375 1376 if (err == OK) { 1377 // If the playback is offloaded to h/w, we pass 1378 // the HAL some metadata information. 1379 // We don't want to do this for PCM because it 1380 // will be going through the AudioFlinger mixer 1381 // before reaching the hardware. 1382 // TODO 1383 mCurrentOffloadInfo = offloadInfo; 1384 err = mAudioSink->start(); 1385 ALOGV_IF(err == OK, "openAudioSink: offload succeeded"); 1386 } 1387 if (err != OK) { 1388 // Clean up, fall back to non offload mode. 1389 mAudioSink->close(); 1390 onDisableOffloadAudio(); 1391 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER; 1392 ALOGV("openAudioSink: offload failed"); 1393 } 1394 } 1395 } 1396 if (!offloadOnly && !offloadingAudio()) { 1397 flags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD; 1398 ALOGV("openAudioSink: open AudioSink in NON-offload mode"); 1399 1400 audioSinkChanged = true; 1401 mAudioSink->close(); 1402 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER; 1403 CHECK_EQ(mAudioSink->open( 1404 sampleRate, 1405 numChannels, 1406 (audio_channel_mask_t)channelMask, 1407 AUDIO_FORMAT_PCM_16_BIT, 1408 8 /* bufferCount */, 1409 NULL, 1410 NULL, 1411 (audio_output_flags_t)flags), 1412 (status_t)OK); 1413 mAudioSink->start(); 1414 } 1415 if (audioSinkChanged) { 1416 onAudioSinkChanged(); 1417 } 1418 if (offloadingAudio()) { 1419 mAudioOffloadTornDown = false; 1420 } 1421 1422 return offloadingAudio(); 1423} 1424 1425void NuPlayer::Renderer::onCloseAudioSink() { 1426 mAudioSink->close(); 1427 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER; 1428} 1429 1430} // namespace android 1431 1432