android_AudioSfDecoder.cpp revision 91540f92d7f1bcda423859af6bd82df083c2afab
1/* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define USE_LOG SLAndroidLogLevel_Verbose 18 19#include "sles_allinclusive.h" 20#include "android/android_AudioSfDecoder.h" 21 22#include <media/stagefright/foundation/ADebug.h> 23 24 25#define SIZE_CACHED_HIGH_BYTES 1000000 26#define SIZE_CACHED_MED_BYTES 700000 27#define SIZE_CACHED_LOW_BYTES 400000 28 29namespace android { 30 31//-------------------------------------------------------------------------------------------------- 32AudioSfDecoder::AudioSfDecoder(const AudioPlayback_Parameters* params) : GenericPlayer(params), 33 mDataSource(0), 34 mAudioSource(0), 35 mAudioSourceStarted(false), 36 mBitrate(-1), 37 mChannelMask(UNKNOWN_CHANNELMASK), 38 mDurationUsec(ANDROID_UNKNOWN_TIME), 39 mDecodeBuffer(NULL), 40 mSeekTimeMsec(0), 41 mLastDecodedPositionUs(ANDROID_UNKNOWN_TIME) 42{ 43 SL_LOGD("AudioSfDecoder::AudioSfDecoder()"); 44} 45 46 47AudioSfDecoder::~AudioSfDecoder() { 48 SL_LOGD("AudioSfDecoder::~AudioSfDecoder()"); 49} 50 51 52void AudioSfDecoder::preDestroy() { 53 GenericPlayer::preDestroy(); 54 SL_LOGD("AudioSfDecoder::preDestroy()"); 55 { 56 Mutex::Autolock _l(mBufferSourceLock); 57 58 if (NULL != mDecodeBuffer) { 59 mDecodeBuffer->release(); 60 mDecodeBuffer = NULL; 61 } 62 63 if ((mAudioSource != 0) && mAudioSourceStarted) { 64 mAudioSource->stop(); 65 mAudioSourceStarted = false; 66 } 67 } 68} 69 70 71//-------------------------------------------------- 72void AudioSfDecoder::play() { 73 SL_LOGD("AudioSfDecoder::play"); 74 75 GenericPlayer::play(); 76 (new AMessage(kWhatDecode, id()))->post(); 77} 78 79 80void AudioSfDecoder::getPositionMsec(int* msec) { 81 int64_t timeUsec = getPositionUsec(); 82 if (timeUsec == ANDROID_UNKNOWN_TIME) { 83 *msec = ANDROID_UNKNOWN_TIME; 84 } else { 85 *msec = timeUsec / 1000; 86 } 87} 88 89 90void AudioSfDecoder::startPrefetch_async() { 91 SL_LOGV("AudioSfDecoder::startPrefetch_async()"); 92 93 if (wantPrefetch()) { 94 SL_LOGV("AudioSfDecoder::startPrefetch_async(): sending check cache msg"); 95 96 mStateFlags |= kFlagPreparing | kFlagBuffering; 97 98 (new AMessage(kWhatCheckCache, id()))->post(); 99 } 100} 101 102 103//-------------------------------------------------- 104uint32_t AudioSfDecoder::getPcmFormatKeyCount() const { 105 return NB_PCMMETADATA_KEYS; 106} 107 108 109//-------------------------------------------------- 110bool AudioSfDecoder::getPcmFormatKeySize(uint32_t index, uint32_t* pKeySize) { 111 if (index >= NB_PCMMETADATA_KEYS) { 112 return false; 113 } else { 114 *pKeySize = strlen(kPcmDecodeMetadataKeys[index]) +1; 115 return true; 116 } 117} 118 119 120//-------------------------------------------------- 121bool AudioSfDecoder::getPcmFormatKeyName(uint32_t index, uint32_t keySize, char* keyName) { 122 uint32_t actualKeySize; 123 if (!getPcmFormatKeySize(index, &actualKeySize)) { 124 return false; 125 } 126 if (keySize < actualKeySize) { 127 return false; 128 } 129 strncpy(keyName, kPcmDecodeMetadataKeys[index], actualKeySize); 130 return true; 131} 132 133 134//-------------------------------------------------- 135bool AudioSfDecoder::getPcmFormatValueSize(uint32_t index, uint32_t* pValueSize) { 136 if (index >= NB_PCMMETADATA_KEYS) { 137 *pValueSize = 0; 138 return false; 139 } else { 140 *pValueSize = sizeof(uint32_t); 141 return true; 142 } 143} 144 145 146//-------------------------------------------------- 147bool AudioSfDecoder::getPcmFormatKeyValue(uint32_t index, uint32_t size, uint32_t* pValue) { 148 uint32_t valueSize = 0; 149 if (!getPcmFormatValueSize(index, &valueSize)) { 150 return false; 151 } else if (size != valueSize) { 152 // this ensures we are accessing mPcmFormatValues with a valid size for that index 153 SL_LOGE("Error retrieving metadata value at index %d: using size of %d, should be %d", 154 index, size, valueSize); 155 return false; 156 } else { 157 android::Mutex::Autolock autoLock(mPcmFormatLock); 158 *pValue = mPcmFormatValues[index]; 159 return true; 160 } 161} 162 163 164//-------------------------------------------------- 165// Event handlers 166// it is strictly verboten to call those methods outside of the event loop 167 168// Initializes the data and audio sources, and update the PCM format info 169// post-condition: upon successful initialization based on the player data locator 170// GenericPlayer::onPrepare() was called 171// mDataSource != 0 172// mAudioSource != 0 173// mAudioSourceStarted == true 174// All error returns from this method are via notifyPrepared(status) followed by "return". 175void AudioSfDecoder::onPrepare() { 176 SL_LOGD("AudioSfDecoder::onPrepare()"); 177 Mutex::Autolock _l(mBufferSourceLock); 178 179 { 180 android::Mutex::Autolock autoLock(mPcmFormatLock); 181 // Initialize the PCM format info with the known parameters before the start of the decode 182 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_BITSPERSAMPLE] = SL_PCMSAMPLEFORMAT_FIXED_16; 183 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CONTAINERSIZE] = 16; 184 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_ENDIANNESS] = SL_BYTEORDER_LITTLEENDIAN; 185 // initialization with the default values: they will be replaced by the actual values 186 // once the decoder has figured them out 187 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = mChannelCount; 188 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLESPERSEC] = mSampleRateHz; 189 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] = mChannelMask; 190 } 191 192 //--------------------------------- 193 // Instantiate and initialize the data source for the decoder 194 sp<DataSource> dataSource; 195 196 switch (mDataLocatorType) { 197 198 case kDataLocatorNone: 199 SL_LOGE("AudioSfDecoder::onPrepare: no data locator set"); 200 notifyPrepared(MEDIA_ERROR_BASE); 201 return; 202 203 case kDataLocatorUri: 204 dataSource = DataSource::CreateFromURI(mDataLocator.uriRef); 205 if (dataSource == NULL) { 206 SL_LOGE("AudioSfDecoder::onPrepare(): Error opening %s", mDataLocator.uriRef); 207 notifyPrepared(MEDIA_ERROR_BASE); 208 return; 209 } 210 break; 211 212 case kDataLocatorFd: 213 { 214 // As FileSource unconditionally takes ownership of the fd and closes it, then 215 // we have to make a dup for FileSource if the app wants to keep ownership itself 216 int fd = mDataLocator.fdi.fd; 217 if (mDataLocator.fdi.mCloseAfterUse) { 218 mDataLocator.fdi.mCloseAfterUse = false; 219 } else { 220 fd = ::dup(fd); 221 } 222 dataSource = new FileSource(fd, mDataLocator.fdi.offset, mDataLocator.fdi.length); 223 status_t err = dataSource->initCheck(); 224 if (err != OK) { 225 notifyPrepared(err); 226 return; 227 } 228 break; 229 } 230 231 default: 232 TRESPASS(); 233 } 234 235 //--------------------------------- 236 // Instanciate and initialize the decoder attached to the data source 237 sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource); 238 if (extractor == NULL) { 239 SL_LOGE("AudioSfDecoder::onPrepare: Could not instantiate extractor."); 240 notifyPrepared(ERROR_UNSUPPORTED); 241 return; 242 } 243 244 ssize_t audioTrackIndex = -1; 245 bool isRawAudio = false; 246 for (size_t i = 0; i < extractor->countTracks(); ++i) { 247 sp<MetaData> meta = extractor->getTrackMetaData(i); 248 249 const char *mime; 250 CHECK(meta->findCString(kKeyMIMEType, &mime)); 251 252 if (!strncasecmp("audio/", mime, 6)) { 253 audioTrackIndex = i; 254 255 if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime)) { 256 isRawAudio = true; 257 } 258 break; 259 } 260 } 261 262 if (audioTrackIndex < 0) { 263 SL_LOGE("AudioSfDecoder::onPrepare: Could not find a supported audio track."); 264 notifyPrepared(ERROR_UNSUPPORTED); 265 return; 266 } 267 268 sp<MediaSource> source = extractor->getTrack(audioTrackIndex); 269 sp<MetaData> meta = source->getFormat(); 270 271 // we can't trust the OMXCodec (if there is one) to issue a INFO_FORMAT_CHANGED so we want 272 // to have some meaningful values as soon as possible. 273 bool hasChannelCount = meta->findInt32(kKeyChannelCount, &mChannelCount); 274 int32_t sr; 275 bool hasSampleRate = meta->findInt32(kKeySampleRate, &sr); 276 if (hasSampleRate) { 277 mSampleRateHz = (uint32_t) sr; 278 } 279 280 off64_t size; 281 int64_t durationUs; 282 if (dataSource->getSize(&size) == OK 283 && meta->findInt64(kKeyDuration, &durationUs)) { 284 if (durationUs != 0) { 285 mBitrate = size * 8000000ll / durationUs; // in bits/sec 286 } else { 287 mBitrate = -1; 288 } 289 mDurationUsec = durationUs; 290 mDurationMsec = durationUs / 1000; 291 } else { 292 mBitrate = -1; 293 mDurationUsec = ANDROID_UNKNOWN_TIME; 294 mDurationMsec = ANDROID_UNKNOWN_TIME; 295 } 296 297 // the audio content is not raw PCM, so we need a decoder 298 if (!isRawAudio) { 299 OMXClient client; 300 CHECK_EQ(client.connect(), (status_t)OK); 301 302 source = OMXCodec::Create( 303 client.interface(), meta, false /* createEncoder */, 304 source); 305 306 if (source == NULL) { 307 SL_LOGE("AudioSfDecoder::onPrepare: Could not instantiate decoder."); 308 notifyPrepared(ERROR_UNSUPPORTED); 309 return; 310 } 311 312 meta = source->getFormat(); 313 } 314 315 316 if (source->start() != OK) { 317 SL_LOGE("AudioSfDecoder::onPrepare: Failed to start source/decoder."); 318 notifyPrepared(MEDIA_ERROR_BASE); 319 return; 320 } 321 322 //--------------------------------- 323 // The data source, and audio source (a decoder if required) are ready to be used 324 mDataSource = dataSource; 325 mAudioSource = source; 326 mAudioSourceStarted = true; 327 328 if (!hasChannelCount) { 329 CHECK(meta->findInt32(kKeyChannelCount, &mChannelCount)); 330 } 331 332 if (!hasSampleRate) { 333 CHECK(meta->findInt32(kKeySampleRate, &sr)); 334 mSampleRateHz = (uint32_t) sr; 335 } 336 // FIXME add code below once channel mask support is in, currently initialized to default 337 // if (meta->findInt32(kKeyChannelMask, &mChannelMask)) { 338 // mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] = mChannelMask; 339 // } 340 341 if (!wantPrefetch()) { 342 SL_LOGV("AudioSfDecoder::onPrepare: no need to prefetch"); 343 // doesn't need prefetching, notify good to go 344 mCacheStatus = kStatusHigh; 345 mCacheFill = 1000; 346 notifyStatus(); 347 notifyCacheFill(); 348 } 349 350 { 351 android::Mutex::Autolock autoLock(mPcmFormatLock); 352 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLESPERSEC] = mSampleRateHz; 353 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = mChannelCount; 354 } 355 356 // at this point we have enough information about the source to create the sink that 357 // will consume the data 358 createAudioSink(); 359 360 // signal successful completion of prepare 361 mStateFlags |= kFlagPrepared; 362 363 GenericPlayer::onPrepare(); 364 SL_LOGD("AudioSfDecoder::onPrepare() done, mStateFlags=0x%x", mStateFlags); 365} 366 367 368void AudioSfDecoder::onPause() { 369 SL_LOGV("AudioSfDecoder::onPause()"); 370 GenericPlayer::onPause(); 371 pauseAudioSink(); 372} 373 374 375void AudioSfDecoder::onPlay() { 376 SL_LOGV("AudioSfDecoder::onPlay()"); 377 GenericPlayer::onPlay(); 378 startAudioSink(); 379} 380 381 382void AudioSfDecoder::onSeek(const sp<AMessage> &msg) { 383 SL_LOGV("AudioSfDecoder::onSeek"); 384 int64_t timeMsec; 385 CHECK(msg->findInt64(WHATPARAM_SEEK_SEEKTIME_MS, &timeMsec)); 386 387 Mutex::Autolock _l(mTimeLock); 388 mStateFlags |= kFlagSeeking; 389 mSeekTimeMsec = timeMsec; 390 mLastDecodedPositionUs = ANDROID_UNKNOWN_TIME; 391} 392 393 394void AudioSfDecoder::onLoop(const sp<AMessage> &msg) { 395 SL_LOGV("AudioSfDecoder::onLoop"); 396 int32_t loop; 397 CHECK(msg->findInt32(WHATPARAM_LOOP_LOOPING, &loop)); 398 399 if (loop) { 400 //SL_LOGV("AudioSfDecoder::onLoop start looping"); 401 mStateFlags |= kFlagLooping; 402 } else { 403 //SL_LOGV("AudioSfDecoder::onLoop stop looping"); 404 mStateFlags &= ~kFlagLooping; 405 } 406} 407 408 409void AudioSfDecoder::onCheckCache(const sp<AMessage> &msg) { 410 //SL_LOGV("AudioSfDecoder::onCheckCache"); 411 bool eos; 412 CacheStatus_t status = getCacheRemaining(&eos); 413 414 if (eos || status == kStatusHigh 415 || ((mStateFlags & kFlagPreparing) && (status >= kStatusEnough))) { 416 if (mStateFlags & kFlagPlaying) { 417 startAudioSink(); 418 } 419 mStateFlags &= ~kFlagBuffering; 420 421 SL_LOGV("AudioSfDecoder::onCheckCache: buffering done."); 422 423 if (mStateFlags & kFlagPreparing) { 424 //SL_LOGV("AudioSfDecoder::onCheckCache: preparation done."); 425 mStateFlags &= ~kFlagPreparing; 426 } 427 428 if (mStateFlags & kFlagPlaying) { 429 (new AMessage(kWhatDecode, id()))->post(); 430 } 431 return; 432 } 433 434 msg->post(100000); 435} 436 437 438void AudioSfDecoder::onDecode() { 439 SL_LOGV("AudioSfDecoder::onDecode"); 440 441 //-------------------------------- Need to buffer some more before decoding? 442 bool eos; 443 if (mDataSource == 0) { 444 // application set play state to paused which failed, then set play state to playing 445 return; 446 } 447 448 if (wantPrefetch() 449 && (getCacheRemaining(&eos) == kStatusLow) 450 && !eos) { 451 SL_LOGV("buffering more."); 452 453 if (mStateFlags & kFlagPlaying) { 454 pauseAudioSink(); 455 } 456 mStateFlags |= kFlagBuffering; 457 (new AMessage(kWhatCheckCache, id()))->post(100000); 458 return; 459 } 460 461 if (!(mStateFlags & (kFlagPlaying | kFlagBuffering | kFlagPreparing))) { 462 // don't decode if we're not buffering, prefetching or playing 463 //SL_LOGV("don't decode: not buffering, prefetching or playing"); 464 return; 465 } 466 467 //-------------------------------- Decode 468 status_t err; 469 MediaSource::ReadOptions readOptions; 470 if (mStateFlags & kFlagSeeking) { 471 assert(mSeekTimeMsec != ANDROID_UNKNOWN_TIME); 472 readOptions.setSeekTo(mSeekTimeMsec * 1000); 473 } 474 475 int64_t timeUsec = ANDROID_UNKNOWN_TIME; 476 { 477 Mutex::Autolock _l(mBufferSourceLock); 478 479 if (NULL != mDecodeBuffer) { 480 // the current decoded buffer hasn't been rendered, drop it 481 mDecodeBuffer->release(); 482 mDecodeBuffer = NULL; 483 } 484 if(!mAudioSourceStarted) { 485 return; 486 } 487 err = mAudioSource->read(&mDecodeBuffer, &readOptions); 488 if (err == OK) { 489 CHECK(mDecodeBuffer->meta_data()->findInt64(kKeyTime, &timeUsec)); 490 } 491 } 492 493 { 494 Mutex::Autolock _l(mTimeLock); 495 if (mStateFlags & kFlagSeeking) { 496 mStateFlags &= ~kFlagSeeking; 497 mSeekTimeMsec = ANDROID_UNKNOWN_TIME; 498 } 499 if (timeUsec != ANDROID_UNKNOWN_TIME) { 500 mLastDecodedPositionUs = timeUsec; 501 } 502 } 503 504 //-------------------------------- Handle return of decode 505 if (err != OK) { 506 bool continueDecoding = false; 507 switch(err) { 508 case ERROR_END_OF_STREAM: 509 if (0 < mDurationUsec) { 510 Mutex::Autolock _l(mTimeLock); 511 mLastDecodedPositionUs = mDurationUsec; 512 } 513 // handle notification and looping at end of stream 514 if (mStateFlags & kFlagPlaying) { 515 notify(PLAYEREVENT_ENDOFSTREAM, 1, true); 516 } 517 if (mStateFlags & kFlagLooping) { 518 seek(0); 519 // kick-off decoding again 520 continueDecoding = true; 521 } 522 break; 523 case INFO_FORMAT_CHANGED: 524 SL_LOGD("MediaSource::read encountered INFO_FORMAT_CHANGED"); 525 // reconfigure output 526 { 527 Mutex::Autolock _l(mBufferSourceLock); 528 hasNewDecodeParams(); 529 } 530 continueDecoding = true; 531 break; 532 case INFO_DISCONTINUITY: 533 SL_LOGD("MediaSource::read encountered INFO_DISCONTINUITY"); 534 continueDecoding = true; 535 break; 536 default: 537 SL_LOGE("MediaSource::read returned error %d", err); 538 break; 539 } 540 if (continueDecoding) { 541 if (NULL == mDecodeBuffer) { 542 (new AMessage(kWhatDecode, id()))->post(); 543 return; 544 } 545 } else { 546 return; 547 } 548 } 549 550 //-------------------------------- Render 551 sp<AMessage> msg = new AMessage(kWhatRender, id()); 552 msg->post(); 553} 554 555 556void AudioSfDecoder::onRender() { 557 //SL_LOGV("AudioSfDecoder::onRender"); 558 559 Mutex::Autolock _l(mBufferSourceLock); 560 561 if (NULL == mDecodeBuffer) { 562 // nothing to render, move along 563 SL_LOGV("AudioSfDecoder::onRender NULL buffer, exiting"); 564 return; 565 } 566 567 mDecodeBuffer->release(); 568 mDecodeBuffer = NULL; 569 570} 571 572 573void AudioSfDecoder::onMessageReceived(const sp<AMessage> &msg) { 574 switch (msg->what()) { 575 case kWhatPrepare: 576 onPrepare(); 577 break; 578 579 case kWhatDecode: 580 onDecode(); 581 break; 582 583 case kWhatRender: 584 onRender(); 585 break; 586 587 case kWhatCheckCache: 588 onCheckCache(msg); 589 break; 590 591 case kWhatNotif: 592 onNotify(msg); 593 break; 594 595 case kWhatPlay: 596 onPlay(); 597 break; 598 599 case kWhatPause: 600 onPause(); 601 break; 602 603/* 604 case kWhatSeek: 605 onSeek(msg); 606 break; 607 608 case kWhatLoop: 609 onLoop(msg); 610 break; 611*/ 612 default: 613 GenericPlayer::onMessageReceived(msg); 614 break; 615 } 616} 617 618//-------------------------------------------------- 619// Prepared state, prefetch status notifications 620void AudioSfDecoder::notifyPrepared(status_t prepareRes) { 621 assert(!(mStateFlags & (kFlagPrepared | kFlagPreparedUnsuccessfully))); 622 if (NO_ERROR == prepareRes) { 623 // The "then" fork is not currently used, but is kept here to make it easier 624 // to replace by a new signalPrepareCompletion(status) if we re-visit this later. 625 mStateFlags |= kFlagPrepared; 626 } else { 627 mStateFlags |= kFlagPreparedUnsuccessfully; 628 } 629 // Do not call the superclass onPrepare to notify, because it uses a default error 630 // status code but we can provide a more specific one. 631 // GenericPlayer::onPrepare(); 632 notify(PLAYEREVENT_PREPARED, (int32_t)prepareRes, true); 633 SL_LOGD("AudioSfDecoder::onPrepare() done, mStateFlags=0x%x", mStateFlags); 634} 635 636 637void AudioSfDecoder::onNotify(const sp<AMessage> &msg) { 638 notif_cbf_t notifyClient; 639 void* notifyUser; 640 { 641 android::Mutex::Autolock autoLock(mNotifyClientLock); 642 if (NULL == mNotifyClient) { 643 return; 644 } else { 645 notifyClient = mNotifyClient; 646 notifyUser = mNotifyUser; 647 } 648 } 649 int32_t val; 650 if (msg->findInt32(PLAYEREVENT_PREFETCHSTATUSCHANGE, &val)) { 651 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_PREFETCHSTATUSCHANGE, val); 652 notifyClient(kEventPrefetchStatusChange, val, 0, notifyUser); 653 } 654 else if (msg->findInt32(PLAYEREVENT_PREFETCHFILLLEVELUPDATE, &val)) { 655 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_PREFETCHFILLLEVELUPDATE, val); 656 notifyClient(kEventPrefetchFillLevelUpdate, val, 0, notifyUser); 657 } 658 else if (msg->findInt32(PLAYEREVENT_ENDOFSTREAM, &val)) { 659 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_ENDOFSTREAM, val); 660 notifyClient(kEventEndOfStream, val, 0, notifyUser); 661 } 662 else { 663 GenericPlayer::onNotify(msg); 664 } 665} 666 667 668//-------------------------------------------------- 669// Private utility functions 670 671bool AudioSfDecoder::wantPrefetch() { 672 if (mDataSource != 0) { 673 return (mDataSource->flags() & DataSource::kWantsPrefetching); 674 } else { 675 // happens if an improper data locator was passed, if the media extractor couldn't be 676 // initialized, if there is no audio track in the media, if the OMX decoder couldn't be 677 // instantiated, if the source couldn't be opened, or if the MediaSource 678 // couldn't be started 679 SL_LOGV("AudioSfDecoder::wantPrefetch() tries to access NULL mDataSource"); 680 return false; 681 } 682} 683 684 685int64_t AudioSfDecoder::getPositionUsec() { 686 Mutex::Autolock _l(mTimeLock); 687 if (mStateFlags & kFlagSeeking) { 688 return mSeekTimeMsec * 1000; 689 } else { 690 if (mLastDecodedPositionUs < 0) { 691 return ANDROID_UNKNOWN_TIME; 692 } else { 693 return mLastDecodedPositionUs; 694 } 695 } 696} 697 698 699CacheStatus_t AudioSfDecoder::getCacheRemaining(bool *eos) { 700 sp<NuCachedSource2> cachedSource = 701 static_cast<NuCachedSource2 *>(mDataSource.get()); 702 703 CacheStatus_t oldStatus = mCacheStatus; 704 705 status_t finalStatus; 706 size_t dataRemaining = cachedSource->approxDataRemaining(&finalStatus); 707 *eos = (finalStatus != OK); 708 709 CHECK_GE(mBitrate, 0); 710 711 int64_t dataRemainingUs = dataRemaining * 8000000ll / mBitrate; 712 //SL_LOGV("AudioSfDecoder::getCacheRemaining: approx %.2f secs remaining (eos=%d)", 713 // dataRemainingUs / 1E6, *eos); 714 715 if (*eos) { 716 // data is buffered up to the end of the stream, it can't get any better than this 717 mCacheStatus = kStatusHigh; 718 mCacheFill = 1000; 719 720 } else { 721 if (mDurationUsec > 0) { 722 // known duration: 723 724 // fill level is ratio of how much has been played + how much is 725 // cached, divided by total duration 726 uint32_t currentPositionUsec = getPositionUsec(); 727 if (currentPositionUsec == ANDROID_UNKNOWN_TIME) { 728 // if we don't know where we are, assume the worst for the fill ratio 729 currentPositionUsec = 0; 730 } 731 if (mDurationUsec > 0) { 732 mCacheFill = (int16_t) ((1000.0 733 * (double)(currentPositionUsec + dataRemainingUs) / mDurationUsec)); 734 } else { 735 mCacheFill = 0; 736 } 737 //SL_LOGV("cacheFill = %d", mCacheFill); 738 739 // cache status is evaluated against duration thresholds 740 if (dataRemainingUs > DURATION_CACHED_HIGH_MS*1000) { 741 mCacheStatus = kStatusHigh; 742 //LOGV("high"); 743 } else if (dataRemainingUs > DURATION_CACHED_MED_MS*1000) { 744 //LOGV("enough"); 745 mCacheStatus = kStatusEnough; 746 } else if (dataRemainingUs < DURATION_CACHED_LOW_MS*1000) { 747 //LOGV("low"); 748 mCacheStatus = kStatusLow; 749 } else { 750 mCacheStatus = kStatusIntermediate; 751 } 752 753 } else { 754 // unknown duration: 755 756 // cache status is evaluated against cache amount thresholds 757 // (no duration so we don't have the bitrate either, could be derived from format?) 758 if (dataRemaining > SIZE_CACHED_HIGH_BYTES) { 759 mCacheStatus = kStatusHigh; 760 } else if (dataRemaining > SIZE_CACHED_MED_BYTES) { 761 mCacheStatus = kStatusEnough; 762 } else if (dataRemaining < SIZE_CACHED_LOW_BYTES) { 763 mCacheStatus = kStatusLow; 764 } else { 765 mCacheStatus = kStatusIntermediate; 766 } 767 } 768 769 } 770 771 if (oldStatus != mCacheStatus) { 772 notifyStatus(); 773 } 774 775 if (abs(mCacheFill - mLastNotifiedCacheFill) > mCacheFillNotifThreshold) { 776 notifyCacheFill(); 777 } 778 779 return mCacheStatus; 780} 781 782 783void AudioSfDecoder::hasNewDecodeParams() { 784 785 if ((mAudioSource != 0) && mAudioSourceStarted) { 786 sp<MetaData> meta = mAudioSource->getFormat(); 787 788 SL_LOGV("old sample rate = %d, channel count = %d", mSampleRateHz, mChannelCount); 789 790 CHECK(meta->findInt32(kKeyChannelCount, &mChannelCount)); 791 int32_t sr; 792 CHECK(meta->findInt32(kKeySampleRate, &sr)); 793 mSampleRateHz = (uint32_t) sr; 794 SL_LOGV("format changed: new sample rate = %d, channel count = %d", 795 mSampleRateHz, mChannelCount); 796 797 { 798 android::Mutex::Autolock autoLock(mPcmFormatLock); 799 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = mChannelCount; 800 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLESPERSEC] = mSampleRateHz; 801 } 802 } 803 804 // alert users of those params 805 updateAudioSink(); 806} 807 808} // namespace android 809