android_AudioSfDecoder.cpp revision 54cad4f35a090a06e655fcc9e072e1d38f9e7689
1/* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define USE_LOG SLAndroidLogLevel_Debug 18 19#include "sles_allinclusive.h" 20#include "android/android_AudioSfDecoder.h" 21 22#include <media/stagefright/foundation/ADebug.h> 23 24 25#define SIZE_CACHED_HIGH_BYTES 1000000 26#define SIZE_CACHED_MED_BYTES 700000 27#define SIZE_CACHED_LOW_BYTES 400000 28 29namespace android { 30 31// keep in sync with the entries of kPcmDecodeMetadataKeys[] defined in android_AudioSfDecoder.h 32#define ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS 0 33#define ANDROID_KEY_INDEX_PCMFORMAT_SAMPLESPERSEC 1 34#define ANDROID_KEY_INDEX_PCMFORMAT_BITSPERSAMPLE 2 35#define ANDROID_KEY_INDEX_PCMFORMAT_CONTAINERSIZE 3 36#define ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK 4 37#define ANDROID_KEY_INDEX_PCMFORMAT_ENDIANNESS 5 38 39//-------------------------------------------------------------------------------------------------- 40AudioSfDecoder::AudioSfDecoder(const AudioPlayback_Parameters* params) : GenericPlayer(params), 41 mDataSource(0), 42 mAudioSource(0), 43 mAudioSourceStarted(false), 44 mBitrate(-1), 45 mChannelMask(UNKNOWN_CHANNELMASK), 46 mDurationUsec(-1), 47 mDecodeBuffer(NULL), 48 mTimeDelta(-1), 49 mSeekTimeMsec(0), 50 mLastDecodedPositionUs(-1), 51 mPcmFormatKeyCount(0) 52{ 53 SL_LOGD("AudioSfDecoder::AudioSfDecoder()"); 54} 55 56 57AudioSfDecoder::~AudioSfDecoder() { 58 SL_LOGD("AudioSfDecoder::~AudioSfDecoder()"); 59} 60 61 62void AudioSfDecoder::preDestroy() { 63 GenericPlayer::preDestroy(); 64 SL_LOGD("AudioSfDecoder::preDestroy()"); 65 { 66 Mutex::Autolock _l(mBufferSourceLock); 67 68 if (NULL != mDecodeBuffer) { 69 mDecodeBuffer->release(); 70 mDecodeBuffer = NULL; 71 } 72 73 if ((mAudioSource != 0) && mAudioSourceStarted) { 74 mAudioSource->stop(); 75 mAudioSourceStarted = false; 76 } 77 } 78} 79 80 81//-------------------------------------------------- 82void AudioSfDecoder::play() { 83 SL_LOGD("AudioSfDecoder::play"); 84 85 GenericPlayer::play(); 86 (new AMessage(kWhatDecode, id()))->post(); 87} 88 89 90void AudioSfDecoder::startPrefetch_async() { 91 SL_LOGV("AudioSfDecoder::startPrefetch_async()"); 92 93 if (wantPrefetch()) { 94 SL_LOGV("AudioSfDecoder::startPrefetch_async(): sending check cache msg"); 95 96 mStateFlags |= kFlagPreparing | kFlagBuffering; 97 98 (new AMessage(kWhatCheckCache, id()))->post(); 99 } 100} 101 102 103//-------------------------------------------------- 104uint32_t AudioSfDecoder::getPcmFormatKeyCount() { 105 android::Mutex::Autolock autoLock(mPcmFormatLock); 106 return mPcmFormatKeyCount; 107} 108 109 110//-------------------------------------------------- 111bool AudioSfDecoder::getPcmFormatKeySize(uint32_t index, uint32_t* pKeySize) { 112 uint32_t keyCount = getPcmFormatKeyCount(); 113 if (index >= keyCount) { 114 return false; 115 } else { 116 *pKeySize = strlen(kPcmDecodeMetadataKeys[index]) +1; 117 return true; 118 } 119} 120 121 122//-------------------------------------------------- 123bool AudioSfDecoder::getPcmFormatKeyName(uint32_t index, uint32_t keySize, char* keyName) { 124 uint32_t actualKeySize; 125 if (!getPcmFormatKeySize(index, &actualKeySize)) { 126 return false; 127 } 128 if (keySize < actualKeySize) { 129 return false; 130 } 131 strncpy(keyName, kPcmDecodeMetadataKeys[index], actualKeySize); 132 return true; 133} 134 135 136//-------------------------------------------------- 137bool AudioSfDecoder::getPcmFormatValueSize(uint32_t index, uint32_t* pValueSize) { 138 uint32_t keyCount = getPcmFormatKeyCount(); 139 if (index >= keyCount) { 140 *pValueSize = 0; 141 return false; 142 } else { 143 *pValueSize = sizeof(uint32_t); 144 return true; 145 } 146} 147 148 149//-------------------------------------------------- 150bool AudioSfDecoder::getPcmFormatKeyValue(uint32_t index, uint32_t size, uint32_t* pValue) { 151 uint32_t valueSize = 0; 152 if (!getPcmFormatValueSize(index, &valueSize)) { 153 return false; 154 } else if (size != valueSize) { 155 // this ensures we are accessing mPcmFormatValues with a valid size for that index 156 SL_LOGE("Error retrieving metadata value at index %d: using size of %d, should be %d", 157 index, size, valueSize); 158 return false; 159 } else { 160 *pValue = mPcmFormatValues[index]; 161 return true; 162 } 163} 164 165 166//-------------------------------------------------- 167// Event handlers 168// it is strictly verboten to call those methods outside of the event loop 169 170// Initializes the data and audio sources, and update the PCM format info 171// post-condition: upon successful initialization based on the player data locator 172// GenericPlayer::onPrepare() was called 173// mDataSource != 0 174// mAudioSource != 0 175// mAudioSourceStarted == true 176void AudioSfDecoder::onPrepare() { 177 SL_LOGD("AudioSfDecoder::onPrepare()"); 178 Mutex::Autolock _l(mBufferSourceLock); 179 180 // Initialize the PCM format info with the known parameters before the start of the decode 181 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_BITSPERSAMPLE] = SL_PCMSAMPLEFORMAT_FIXED_16; 182 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CONTAINERSIZE] = 16; 183 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_ENDIANNESS] = SL_BYTEORDER_LITTLEENDIAN; 184 // initialization with the default values: they will be replaced by the actual values 185 // once the decoder has figured them out 186 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = mChannelCount; 187 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLESPERSEC] = mSampleRateHz; 188 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] = mChannelMask; 189 190 //--------------------------------- 191 // Instantiate and initialize the data source for the decoder 192 sp<DataSource> dataSource; 193 194 switch (mDataLocatorType) { 195 196 case kDataLocatorNone: 197 SL_LOGE("AudioSfDecoder::onPrepare: no data locator set"); 198 notifyPrepared(MEDIA_ERROR_BASE); 199 return; 200 201 case kDataLocatorUri: 202 dataSource = DataSource::CreateFromURI(mDataLocator.uriRef); 203 if (dataSource == NULL) { 204 SL_LOGE("AudioSfDecoder::onPrepare(): Error opening %s", mDataLocator.uriRef); 205 notifyPrepared(MEDIA_ERROR_BASE); 206 return; 207 } 208 break; 209 210 case kDataLocatorFd: 211 { 212 dataSource = new FileSource( 213 mDataLocator.fdi.fd, mDataLocator.fdi.offset, mDataLocator.fdi.length); 214 status_t err = dataSource->initCheck(); 215 if (err != OK) { 216 notifyPrepared(err); 217 return; 218 } 219 break; 220 } 221 222 default: 223 TRESPASS(); 224 } 225 226 //--------------------------------- 227 // Instanciate and initialize the decoder attached to the data source 228 sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource); 229 if (extractor == NULL) { 230 SL_LOGE("AudioSfDecoder::onPrepare: Could not instantiate extractor."); 231 notifyPrepared(ERROR_UNSUPPORTED); 232 return; 233 } 234 235 ssize_t audioTrackIndex = -1; 236 bool isRawAudio = false; 237 for (size_t i = 0; i < extractor->countTracks(); ++i) { 238 sp<MetaData> meta = extractor->getTrackMetaData(i); 239 240 const char *mime; 241 CHECK(meta->findCString(kKeyMIMEType, &mime)); 242 243 if (!strncasecmp("audio/", mime, 6)) { 244 audioTrackIndex = i; 245 246 if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime)) { 247 isRawAudio = true; 248 } 249 break; 250 } 251 } 252 253 if (audioTrackIndex < 0) { 254 SL_LOGE("AudioSfDecoder::onPrepare: Could not find a supported audio track."); 255 notifyPrepared(ERROR_UNSUPPORTED); 256 return; 257 } 258 259 sp<MediaSource> source = extractor->getTrack(audioTrackIndex); 260 sp<MetaData> meta = source->getFormat(); 261 262 // we can't trust the OMXCodec (if there is one) to issue a INFO_FORMAT_CHANGED so we want 263 // to have some meaningful values as soon as possible. 264 bool hasChannelCount = meta->findInt32(kKeyChannelCount, &mChannelCount); 265 int32_t sr; 266 bool hasSampleRate = meta->findInt32(kKeySampleRate, &sr); 267 if (hasSampleRate) { 268 mSampleRateHz = (uint32_t) sr; 269 } 270 271 off64_t size; 272 int64_t durationUs; 273 if (dataSource->getSize(&size) == OK 274 && meta->findInt64(kKeyDuration, &durationUs)) { 275 mBitrate = size * 8000000ll / durationUs; // in bits/sec 276 mDurationUsec = durationUs; 277 mDurationMsec = durationUs / 1000; 278 } else { 279 mBitrate = -1; 280 mDurationUsec = -1; 281 } 282 283 // the audio content is not raw PCM, so we need a decoder 284 if (!isRawAudio) { 285 OMXClient client; 286 CHECK_EQ(client.connect(), (status_t)OK); 287 288 source = OMXCodec::Create( 289 client.interface(), meta, false /* createEncoder */, 290 source); 291 292 if (source == NULL) { 293 SL_LOGE("AudioSfDecoder::onPrepare: Could not instantiate decoder."); 294 notifyPrepared(ERROR_UNSUPPORTED); 295 return; 296 } 297 298 meta = source->getFormat(); 299 } 300 301 302 if (source->start() != OK) { 303 SL_LOGE("AudioSfDecoder::onPrepare: Failed to start source/decoder."); 304 notifyPrepared(MEDIA_ERROR_BASE); 305 return; 306 } 307 308 //--------------------------------- 309 // The data source, and audio source (a decoder if required) are ready to be used 310 mDataSource = dataSource; 311 mAudioSource = source; 312 mAudioSourceStarted = true; 313 314 if (!hasChannelCount) { 315 CHECK(meta->findInt32(kKeyChannelCount, &mChannelCount)); 316 } 317 318 if (!hasSampleRate) { 319 CHECK(meta->findInt32(kKeySampleRate, &sr)); 320 mSampleRateHz = (uint32_t) sr; 321 } 322 // FIXME add code below once channel mask support is in, currently initialized to default 323 // if (meta->findInt32(kKeyChannelMask, &mChannelMask)) { 324 // mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] = mChannelMask; 325 // } 326 327 if (!wantPrefetch()) { 328 SL_LOGV("AudioSfDecoder::onPrepare: no need to prefetch"); 329 // doesn't need prefetching, notify good to go 330 mCacheStatus = kStatusHigh; 331 mCacheFill = 1000; 332 notifyStatus(); 333 notifyCacheFill(); 334 } 335 336 { 337 android::Mutex::Autolock autoLock(mPcmFormatLock); 338 mPcmFormatKeyCount = NB_PCMMETADATA_KEYS; 339 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLESPERSEC] = mSampleRateHz; 340 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = mChannelCount; 341 } 342 343 // at this point we have enough information about the source to create the sink that 344 // will consume the data 345 createAudioSink(); 346 347 GenericPlayer::onPrepare(); 348 SL_LOGD("AudioSfDecoder::onPrepare() done, mStateFlags=0x%x", mStateFlags); 349} 350 351 352void AudioSfDecoder::onPause() { 353 SL_LOGV("AudioSfDecoder::onPause()"); 354 GenericPlayer::onPause(); 355 pauseAudioSink(); 356} 357 358 359void AudioSfDecoder::onPlay() { 360 SL_LOGV("AudioSfDecoder::onPlay()"); 361 GenericPlayer::onPlay(); 362 startAudioSink(); 363} 364 365 366void AudioSfDecoder::onSeek(const sp<AMessage> &msg) { 367 SL_LOGV("AudioSfDecoder::onSeek"); 368 int64_t timeMsec; 369 CHECK(msg->findInt64(WHATPARAM_SEEK_SEEKTIME_MS, &timeMsec)); 370 371 Mutex::Autolock _l(mSeekLock); 372 mStateFlags |= kFlagSeeking; 373 mSeekTimeMsec = timeMsec; 374 mTimeDelta = -1; 375 mLastDecodedPositionUs = -1; 376} 377 378 379void AudioSfDecoder::onLoop(const sp<AMessage> &msg) { 380 SL_LOGV("AudioSfDecoder::onLoop"); 381 int32_t loop; 382 CHECK(msg->findInt32(WHATPARAM_LOOP_LOOPING, &loop)); 383 384 if (loop) { 385 //SL_LOGV("AudioSfDecoder::onLoop start looping"); 386 mStateFlags |= kFlagLooping; 387 } else { 388 //SL_LOGV("AudioSfDecoder::onLoop stop looping"); 389 mStateFlags &= ~kFlagLooping; 390 } 391} 392 393 394void AudioSfDecoder::onCheckCache(const sp<AMessage> &msg) { 395 //SL_LOGV("AudioSfDecoder::onCheckCache"); 396 bool eos; 397 CacheStatus_t status = getCacheRemaining(&eos); 398 399 if (eos || status == kStatusHigh 400 || ((mStateFlags & kFlagPreparing) && (status >= kStatusEnough))) { 401 if (mStateFlags & kFlagPlaying) { 402 startAudioSink(); 403 } 404 mStateFlags &= ~kFlagBuffering; 405 406 SL_LOGV("AudioSfDecoder::onCheckCache: buffering done."); 407 408 if (mStateFlags & kFlagPreparing) { 409 //SL_LOGV("AudioSfDecoder::onCheckCache: preparation done."); 410 mStateFlags &= ~kFlagPreparing; 411 } 412 413 mTimeDelta = -1; 414 if (mStateFlags & kFlagPlaying) { 415 (new AMessage(kWhatDecode, id()))->post(); 416 } 417 return; 418 } 419 420 msg->post(100000); 421} 422 423 424void AudioSfDecoder::onDecode() { 425 SL_LOGV("AudioSfDecoder::onDecode"); 426 427 //-------------------------------- Need to buffer some more before decoding? 428 bool eos; 429 if (mDataSource == 0) { 430 // application set play state to paused which failed, then set play state to playing 431 return; 432 } 433 434 if (wantPrefetch() 435 && (getCacheRemaining(&eos) == kStatusLow) 436 && !eos) { 437 SL_LOGV("buffering more."); 438 439 if (mStateFlags & kFlagPlaying) { 440 pauseAudioSink(); 441 } 442 mStateFlags |= kFlagBuffering; 443 (new AMessage(kWhatCheckCache, id()))->post(100000); 444 return; 445 } 446 447 if (!(mStateFlags & (kFlagPlaying | kFlagBuffering | kFlagPreparing))) { 448 // don't decode if we're not buffering, prefetching or playing 449 //SL_LOGV("don't decode: not buffering, prefetching or playing"); 450 return; 451 } 452 453 //-------------------------------- Decode 454 status_t err; 455 MediaSource::ReadOptions readOptions; 456 if (mStateFlags & kFlagSeeking) { 457 readOptions.setSeekTo(mSeekTimeMsec * 1000); 458 } 459 460 { 461 Mutex::Autolock _l(mBufferSourceLock); 462 463 if (NULL != mDecodeBuffer) { 464 // the current decoded buffer hasn't been rendered, drop it 465 mDecodeBuffer->release(); 466 mDecodeBuffer = NULL; 467 } 468 if(!mAudioSourceStarted) { 469 return; 470 } 471 err = mAudioSource->read(&mDecodeBuffer, &readOptions); 472 if (err == OK) { 473 CHECK(mDecodeBuffer->meta_data()->findInt64(kKeyTime, &mLastDecodedPositionUs)); 474 } 475 } 476 477 { 478 Mutex::Autolock _l(mSeekLock); 479 if (mStateFlags & kFlagSeeking) { 480 mStateFlags &= ~kFlagSeeking; 481 } 482 } 483 484 //-------------------------------- Handle return of decode 485 if (err != OK) { 486 bool continueDecoding = false; 487 switch(err) { 488 case ERROR_END_OF_STREAM: 489 if (0 < mDurationUsec) { 490 mLastDecodedPositionUs = mDurationUsec; 491 } 492 // handle notification and looping at end of stream 493 if (mStateFlags & kFlagPlaying) { 494 notify(PLAYEREVENT_ENDOFSTREAM, 1, true); 495 } 496 if (mStateFlags & kFlagLooping) { 497 seek(0); 498 // kick-off decoding again 499 continueDecoding = true; 500 } 501 break; 502 case INFO_FORMAT_CHANGED: 503 SL_LOGD("MediaSource::read encountered INFO_FORMAT_CHANGED"); 504 // reconfigure output 505 { 506 Mutex::Autolock _l(mBufferSourceLock); 507 hasNewDecodeParams(); 508 } 509 continueDecoding = true; 510 break; 511 case INFO_DISCONTINUITY: 512 SL_LOGD("MediaSource::read encountered INFO_DISCONTINUITY"); 513 continueDecoding = true; 514 break; 515 default: 516 SL_LOGE("MediaSource::read returned error %d", err); 517 break; 518 } 519 if (continueDecoding) { 520 if (NULL == mDecodeBuffer) { 521 (new AMessage(kWhatDecode, id()))->post(); 522 return; 523 } 524 } else { 525 return; 526 } 527 } 528 529 //-------------------------------- Render 530 sp<AMessage> msg = new AMessage(kWhatRender, id()); 531 msg->post(); 532} 533 534 535void AudioSfDecoder::onRender() { 536 //SL_LOGV("AudioSfDecoder::onRender"); 537 538 Mutex::Autolock _l(mBufferSourceLock); 539 540 if (NULL == mDecodeBuffer) { 541 // nothing to render, move along 542 SL_LOGV("AudioSfDecoder::onRender NULL buffer, exiting"); 543 return; 544 } 545 546 mDecodeBuffer->release(); 547 mDecodeBuffer = NULL; 548 549} 550 551 552void AudioSfDecoder::onMessageReceived(const sp<AMessage> &msg) { 553 switch (msg->what()) { 554 case kWhatPrepare: 555 onPrepare(); 556 break; 557 558 case kWhatDecode: 559 onDecode(); 560 break; 561 562 case kWhatRender: 563 onRender(); 564 break; 565 566 case kWhatCheckCache: 567 onCheckCache(msg); 568 break; 569 570 case kWhatNotif: 571 onNotify(msg); 572 break; 573 574 case kWhatPlay: 575 onPlay(); 576 break; 577 578 case kWhatPause: 579 onPause(); 580 break; 581 582/* 583 case kWhatSeek: 584 onSeek(msg); 585 break; 586 587 case kWhatLoop: 588 onLoop(msg); 589 break; 590*/ 591 default: 592 GenericPlayer::onMessageReceived(msg); 593 break; 594 } 595} 596 597//-------------------------------------------------- 598// Prepared state, prefetch status notifications 599void AudioSfDecoder::notifyPrepared(status_t prepareRes) { 600 notify(PLAYEREVENT_PREPARED, (int32_t)prepareRes, true); 601 602} 603 604 605void AudioSfDecoder::onNotify(const sp<AMessage> &msg) { 606 notif_cbf_t notifyClient; 607 void* notifyUser; 608 { 609 android::Mutex::Autolock autoLock(mNotifyClientLock); 610 if (NULL == mNotifyClient) { 611 return; 612 } else { 613 notifyClient = mNotifyClient; 614 notifyUser = mNotifyUser; 615 } 616 } 617 int32_t val; 618 if (msg->findInt32(PLAYEREVENT_PREFETCHSTATUSCHANGE, &val)) { 619 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_PREFETCHSTATUSCHANGE, val); 620 notifyClient(kEventPrefetchStatusChange, val, 0, notifyUser); 621 } 622 else if (msg->findInt32(PLAYEREVENT_PREFETCHFILLLEVELUPDATE, &val)) { 623 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_PREFETCHFILLLEVELUPDATE, val); 624 notifyClient(kEventPrefetchFillLevelUpdate, val, 0, notifyUser); 625 } 626 else if (msg->findInt32(PLAYEREVENT_ENDOFSTREAM, &val)) { 627 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_ENDOFSTREAM, val); 628 notifyClient(kEventEndOfStream, val, 0, notifyUser); 629 } 630 else { 631 GenericPlayer::onNotify(msg); 632 } 633} 634 635 636//-------------------------------------------------- 637// Private utility functions 638 639bool AudioSfDecoder::wantPrefetch() { 640 if (mDataSource != 0) { 641 return (mDataSource->flags() & DataSource::kWantsPrefetching); 642 } else { 643 // happens if an improper data locator was passed, if the media extractor couldn't be 644 // initialized, if there is no audio track in the media, if the OMX decoder couldn't be 645 // instantiated, if the source couldn't be opened, or if the MediaSource 646 // couldn't be started 647 SL_LOGV("AudioSfDecoder::wantPrefetch() tries to access NULL mDataSource"); 648 return false; 649 } 650} 651 652 653int64_t AudioSfDecoder::getPositionUsec() { 654 Mutex::Autolock _l(mSeekLock); 655 if (mStateFlags & kFlagSeeking) { 656 return mSeekTimeMsec * 1000; 657 } else { 658 if (mLastDecodedPositionUs < 0) { 659 return 0; 660 } else { 661 return mLastDecodedPositionUs; 662 } 663 } 664} 665 666 667CacheStatus_t AudioSfDecoder::getCacheRemaining(bool *eos) { 668 sp<NuCachedSource2> cachedSource = 669 static_cast<NuCachedSource2 *>(mDataSource.get()); 670 671 CacheStatus_t oldStatus = mCacheStatus; 672 673 status_t finalStatus; 674 size_t dataRemaining = cachedSource->approxDataRemaining(&finalStatus); 675 *eos = (finalStatus != OK); 676 677 CHECK_GE(mBitrate, 0); 678 679 int64_t dataRemainingUs = dataRemaining * 8000000ll / mBitrate; 680 //SL_LOGV("AudioSfDecoder::getCacheRemaining: approx %.2f secs remaining (eos=%d)", 681 // dataRemainingUs / 1E6, *eos); 682 683 if (*eos) { 684 // data is buffered up to the end of the stream, it can't get any better than this 685 mCacheStatus = kStatusHigh; 686 mCacheFill = 1000; 687 688 } else { 689 if (mDurationUsec > 0) { 690 // known duration: 691 692 // fill level is ratio of how much has been played + how much is 693 // cached, divided by total duration 694 uint32_t currentPositionUsec = getPositionUsec(); 695 mCacheFill = (int16_t) ((1000.0 696 * (double)(currentPositionUsec + dataRemainingUs) / mDurationUsec)); 697 //SL_LOGV("cacheFill = %d", mCacheFill); 698 699 // cache status is evaluated against duration thresholds 700 if (dataRemainingUs > DURATION_CACHED_HIGH_MS*1000) { 701 mCacheStatus = kStatusHigh; 702 //LOGV("high"); 703 } else if (dataRemainingUs > DURATION_CACHED_MED_MS*1000) { 704 //LOGV("enough"); 705 mCacheStatus = kStatusEnough; 706 } else if (dataRemainingUs < DURATION_CACHED_LOW_MS*1000) { 707 //LOGV("low"); 708 mCacheStatus = kStatusLow; 709 } else { 710 mCacheStatus = kStatusIntermediate; 711 } 712 713 } else { 714 // unknown duration: 715 716 // cache status is evaluated against cache amount thresholds 717 // (no duration so we don't have the bitrate either, could be derived from format?) 718 if (dataRemaining > SIZE_CACHED_HIGH_BYTES) { 719 mCacheStatus = kStatusHigh; 720 } else if (dataRemaining > SIZE_CACHED_MED_BYTES) { 721 mCacheStatus = kStatusEnough; 722 } else if (dataRemaining < SIZE_CACHED_LOW_BYTES) { 723 mCacheStatus = kStatusLow; 724 } else { 725 mCacheStatus = kStatusIntermediate; 726 } 727 } 728 729 } 730 731 if (oldStatus != mCacheStatus) { 732 notifyStatus(); 733 } 734 735 if (abs(mCacheFill - mLastNotifiedCacheFill) > mCacheFillNotifThreshold) { 736 notifyCacheFill(); 737 } 738 739 return mCacheStatus; 740} 741 742 743void AudioSfDecoder::hasNewDecodeParams() { 744 745 if ((mAudioSource != 0) && mAudioSourceStarted) { 746 sp<MetaData> meta = mAudioSource->getFormat(); 747 748 CHECK(meta->findInt32(kKeyChannelCount, &mChannelCount)); 749 750 SL_LOGV("old sample rate = %d", mSampleRateHz); 751 int32_t sr; 752 CHECK(meta->findInt32(kKeySampleRate, &sr)); 753 mSampleRateHz = (uint32_t) sr; 754 SL_LOGV("found new sample rate = %d", mSampleRateHz); 755 756 { 757 android::Mutex::Autolock autoLock(mPcmFormatLock); 758 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = mChannelCount; 759 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLESPERSEC] = mSampleRateHz; 760 } 761 } 762 763 // alert users of those params 764 updateAudioSink(); 765} 766 767} // namespace android 768