android_AudioSfDecoder.cpp revision 209c05d9104db8b77ef0846ee8eb3b161bf44031
1/* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define USE_LOG SLAndroidLogLevel_Verbose 18 19#include "sles_allinclusive.h" 20#include "android/android_AudioSfDecoder.h" 21 22#include <media/stagefright/foundation/ADebug.h> 23 24 25#define SIZE_CACHED_HIGH_BYTES 1000000 26#define SIZE_CACHED_MED_BYTES 700000 27#define SIZE_CACHED_LOW_BYTES 400000 28 29namespace android { 30 31//-------------------------------------------------------------------------------------------------- 32AudioSfDecoder::AudioSfDecoder(const AudioPlayback_Parameters* params) : GenericPlayer(params), 33 mDataSource(0), 34 mAudioSource(0), 35 mAudioSourceStarted(false), 36 mBitrate(-1), 37 mChannelMask(UNKNOWN_CHANNELMASK), 38 mDurationUsec(ANDROID_UNKNOWN_TIME), 39 mDecodeBuffer(NULL), 40 mSeekTimeMsec(0), 41 // play event logic depends on the initial time being zero not ANDROID_UNKNOWN_TIME 42 mLastDecodedPositionUs(0) 43{ 44 SL_LOGD("AudioSfDecoder::AudioSfDecoder()"); 45} 46 47 48AudioSfDecoder::~AudioSfDecoder() { 49 SL_LOGD("AudioSfDecoder::~AudioSfDecoder()"); 50} 51 52 53void AudioSfDecoder::preDestroy() { 54 GenericPlayer::preDestroy(); 55 SL_LOGD("AudioSfDecoder::preDestroy()"); 56 { 57 Mutex::Autolock _l(mBufferSourceLock); 58 59 if (NULL != mDecodeBuffer) { 60 mDecodeBuffer->release(); 61 mDecodeBuffer = NULL; 62 } 63 64 if ((mAudioSource != 0) && mAudioSourceStarted) { 65 mAudioSource->stop(); 66 mAudioSourceStarted = false; 67 } 68 } 69} 70 71 72//-------------------------------------------------- 73void AudioSfDecoder::play() { 74 SL_LOGD("AudioSfDecoder::play"); 75 76 GenericPlayer::play(); 77 (new AMessage(kWhatDecode, id()))->post(); 78} 79 80 81void AudioSfDecoder::getPositionMsec(int* msec) { 82 int64_t timeUsec = getPositionUsec(); 83 if (timeUsec == ANDROID_UNKNOWN_TIME) { 84 *msec = ANDROID_UNKNOWN_TIME; 85 } else { 86 *msec = timeUsec / 1000; 87 } 88} 89 90 91//-------------------------------------------------- 92uint32_t AudioSfDecoder::getPcmFormatKeyCount() const { 93 return NB_PCMMETADATA_KEYS; 94} 95 96 97//-------------------------------------------------- 98bool AudioSfDecoder::getPcmFormatKeySize(uint32_t index, uint32_t* pKeySize) { 99 if (index >= NB_PCMMETADATA_KEYS) { 100 return false; 101 } else { 102 *pKeySize = strlen(kPcmDecodeMetadataKeys[index]) +1; 103 return true; 104 } 105} 106 107 108//-------------------------------------------------- 109bool AudioSfDecoder::getPcmFormatKeyName(uint32_t index, uint32_t keySize, char* keyName) { 110 uint32_t actualKeySize; 111 if (!getPcmFormatKeySize(index, &actualKeySize)) { 112 return false; 113 } 114 if (keySize < actualKeySize) { 115 return false; 116 } 117 strncpy(keyName, kPcmDecodeMetadataKeys[index], actualKeySize); 118 return true; 119} 120 121 122//-------------------------------------------------- 123bool AudioSfDecoder::getPcmFormatValueSize(uint32_t index, uint32_t* pValueSize) { 124 if (index >= NB_PCMMETADATA_KEYS) { 125 *pValueSize = 0; 126 return false; 127 } else { 128 *pValueSize = sizeof(uint32_t); 129 return true; 130 } 131} 132 133 134//-------------------------------------------------- 135bool AudioSfDecoder::getPcmFormatKeyValue(uint32_t index, uint32_t size, uint32_t* pValue) { 136 uint32_t valueSize = 0; 137 if (!getPcmFormatValueSize(index, &valueSize)) { 138 return false; 139 } else if (size != valueSize) { 140 // this ensures we are accessing mPcmFormatValues with a valid size for that index 141 SL_LOGE("Error retrieving metadata value at index %d: using size of %d, should be %d", 142 index, size, valueSize); 143 return false; 144 } else { 145 android::Mutex::Autolock autoLock(mPcmFormatLock); 146 *pValue = mPcmFormatValues[index]; 147 return true; 148 } 149} 150 151 152//-------------------------------------------------- 153// Event handlers 154// it is strictly verboten to call those methods outside of the event loop 155 156// Initializes the data and audio sources, and update the PCM format info 157// post-condition: upon successful initialization based on the player data locator 158// GenericPlayer::onPrepare() was called 159// mDataSource != 0 160// mAudioSource != 0 161// mAudioSourceStarted == true 162// All error returns from this method are via notifyPrepared(status) followed by "return". 163void AudioSfDecoder::onPrepare() { 164 SL_LOGD("AudioSfDecoder::onPrepare()"); 165 Mutex::Autolock _l(mBufferSourceLock); 166 167 { 168 android::Mutex::Autolock autoLock(mPcmFormatLock); 169 // Initialize the PCM format info with the known parameters before the start of the decode 170 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_BITSPERSAMPLE] = SL_PCMSAMPLEFORMAT_FIXED_16; 171 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CONTAINERSIZE] = 16; 172 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_ENDIANNESS] = SL_BYTEORDER_LITTLEENDIAN; 173 // initialization with the default values: they will be replaced by the actual values 174 // once the decoder has figured them out 175 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = mChannelCount; 176 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE] = mSampleRateHz; 177 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] = mChannelMask; 178 } 179 180 //--------------------------------- 181 // Instantiate and initialize the data source for the decoder 182 sp<DataSource> dataSource; 183 184 switch (mDataLocatorType) { 185 186 case kDataLocatorNone: 187 SL_LOGE("AudioSfDecoder::onPrepare: no data locator set"); 188 notifyPrepared(MEDIA_ERROR_BASE); 189 return; 190 191 case kDataLocatorUri: 192 dataSource = DataSource::CreateFromURI(mDataLocator.uriRef); 193 if (dataSource == NULL) { 194 SL_LOGE("AudioSfDecoder::onPrepare(): Error opening %s", mDataLocator.uriRef); 195 notifyPrepared(MEDIA_ERROR_BASE); 196 return; 197 } 198 break; 199 200 case kDataLocatorFd: 201 { 202 // As FileSource unconditionally takes ownership of the fd and closes it, then 203 // we have to make a dup for FileSource if the app wants to keep ownership itself 204 int fd = mDataLocator.fdi.fd; 205 if (mDataLocator.fdi.mCloseAfterUse) { 206 mDataLocator.fdi.mCloseAfterUse = false; 207 } else { 208 fd = ::dup(fd); 209 } 210 dataSource = new FileSource(fd, mDataLocator.fdi.offset, mDataLocator.fdi.length); 211 status_t err = dataSource->initCheck(); 212 if (err != OK) { 213 notifyPrepared(err); 214 return; 215 } 216 break; 217 } 218 219 default: 220 TRESPASS(); 221 } 222 223 //--------------------------------- 224 // Instanciate and initialize the decoder attached to the data source 225 sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource); 226 if (extractor == NULL) { 227 SL_LOGE("AudioSfDecoder::onPrepare: Could not instantiate extractor."); 228 notifyPrepared(ERROR_UNSUPPORTED); 229 return; 230 } 231 232 ssize_t audioTrackIndex = -1; 233 bool isRawAudio = false; 234 for (size_t i = 0; i < extractor->countTracks(); ++i) { 235 sp<MetaData> meta = extractor->getTrackMetaData(i); 236 237 const char *mime; 238 CHECK(meta->findCString(kKeyMIMEType, &mime)); 239 240 if (!strncasecmp("audio/", mime, 6)) { 241 audioTrackIndex = i; 242 243 if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime)) { 244 isRawAudio = true; 245 } 246 break; 247 } 248 } 249 250 if (audioTrackIndex < 0) { 251 SL_LOGE("AudioSfDecoder::onPrepare: Could not find a supported audio track."); 252 notifyPrepared(ERROR_UNSUPPORTED); 253 return; 254 } 255 256 sp<MediaSource> source = extractor->getTrack(audioTrackIndex); 257 sp<MetaData> meta = source->getFormat(); 258 259 // we can't trust the OMXCodec (if there is one) to issue a INFO_FORMAT_CHANGED so we want 260 // to have some meaningful values as soon as possible. 261 bool hasChannelCount = meta->findInt32(kKeyChannelCount, &mChannelCount); 262 int32_t sr; 263 bool hasSampleRate = meta->findInt32(kKeySampleRate, &sr); 264 if (hasSampleRate) { 265 mSampleRateHz = (uint32_t) sr; 266 } 267 268 off64_t size; 269 int64_t durationUs; 270 if (dataSource->getSize(&size) == OK 271 && meta->findInt64(kKeyDuration, &durationUs)) { 272 if (durationUs != 0) { 273 mBitrate = size * 8000000ll / durationUs; // in bits/sec 274 } else { 275 mBitrate = -1; 276 } 277 mDurationUsec = durationUs; 278 mDurationMsec = durationUs / 1000; 279 } else { 280 mBitrate = -1; 281 mDurationUsec = ANDROID_UNKNOWN_TIME; 282 mDurationMsec = ANDROID_UNKNOWN_TIME; 283 } 284 285 // the audio content is not raw PCM, so we need a decoder 286 if (!isRawAudio) { 287 OMXClient client; 288 CHECK_EQ(client.connect(), (status_t)OK); 289 290 source = OMXCodec::Create( 291 client.interface(), meta, false /* createEncoder */, 292 source); 293 294 if (source == NULL) { 295 SL_LOGE("AudioSfDecoder::onPrepare: Could not instantiate decoder."); 296 notifyPrepared(ERROR_UNSUPPORTED); 297 return; 298 } 299 300 meta = source->getFormat(); 301 } 302 303 304 if (source->start() != OK) { 305 SL_LOGE("AudioSfDecoder::onPrepare: Failed to start source/decoder."); 306 notifyPrepared(MEDIA_ERROR_BASE); 307 return; 308 } 309 310 //--------------------------------- 311 // The data source, and audio source (a decoder if required) are ready to be used 312 mDataSource = dataSource; 313 mAudioSource = source; 314 mAudioSourceStarted = true; 315 316 if (!hasChannelCount) { 317 CHECK(meta->findInt32(kKeyChannelCount, &mChannelCount)); 318 } 319 320 if (!hasSampleRate) { 321 CHECK(meta->findInt32(kKeySampleRate, &sr)); 322 mSampleRateHz = (uint32_t) sr; 323 } 324 // FIXME add code below once channel mask support is in, currently initialized to default 325 // value computed from the channel count 326 // if (!hasChannelMask) { 327 // CHECK(meta->findInt32(kKeyChannelMask, &mChannelMask)); 328 // } 329 mChannelMask = channelCountToMask(mChannelCount); 330 331 if (!wantPrefetch()) { 332 SL_LOGV("AudioSfDecoder::onPrepare: no need to prefetch"); 333 // doesn't need prefetching, notify good to go 334 mCacheStatus = kStatusHigh; 335 mCacheFill = 1000; 336 notifyStatus(); 337 notifyCacheFill(); 338 } 339 340 { 341 android::Mutex::Autolock autoLock(mPcmFormatLock); 342 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE] = mSampleRateHz; 343 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = mChannelCount; 344 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] = mChannelMask; 345 } 346 347 // at this point we have enough information about the source to create the sink that 348 // will consume the data 349 createAudioSink(); 350 351 // signal successful completion of prepare 352 mStateFlags |= kFlagPrepared; 353 354 GenericPlayer::onPrepare(); 355 SL_LOGD("AudioSfDecoder::onPrepare() done, mStateFlags=0x%x", mStateFlags); 356} 357 358 359void AudioSfDecoder::onPause() { 360 SL_LOGV("AudioSfDecoder::onPause()"); 361 GenericPlayer::onPause(); 362 pauseAudioSink(); 363} 364 365 366void AudioSfDecoder::onPlay() { 367 SL_LOGV("AudioSfDecoder::onPlay()"); 368 GenericPlayer::onPlay(); 369 startAudioSink(); 370} 371 372 373void AudioSfDecoder::onSeek(const sp<AMessage> &msg) { 374 SL_LOGV("AudioSfDecoder::onSeek"); 375 int64_t timeMsec; 376 CHECK(msg->findInt64(WHATPARAM_SEEK_SEEKTIME_MS, &timeMsec)); 377 378 Mutex::Autolock _l(mTimeLock); 379 mStateFlags |= kFlagSeeking; 380 mSeekTimeMsec = timeMsec; 381 // don't set mLastDecodedPositionUs to ANDROID_UNKNOWN_TIME; getPositionUsec 382 // ignores mLastDecodedPositionUs while seeking, and substitutes the seek goal instead 383 384 // nop for now 385 GenericPlayer::onSeek(msg); 386} 387 388 389void AudioSfDecoder::onLoop(const sp<AMessage> &msg) { 390 SL_LOGV("AudioSfDecoder::onLoop"); 391 int32_t loop; 392 CHECK(msg->findInt32(WHATPARAM_LOOP_LOOPING, &loop)); 393 394 if (loop) { 395 //SL_LOGV("AudioSfDecoder::onLoop start looping"); 396 mStateFlags |= kFlagLooping; 397 } else { 398 //SL_LOGV("AudioSfDecoder::onLoop stop looping"); 399 mStateFlags &= ~kFlagLooping; 400 } 401 402 // nop for now 403 GenericPlayer::onLoop(msg); 404} 405 406 407void AudioSfDecoder::onCheckCache(const sp<AMessage> &msg) { 408 //SL_LOGV("AudioSfDecoder::onCheckCache"); 409 bool eos; 410 CacheStatus_t status = getCacheRemaining(&eos); 411 412 if (eos || status == kStatusHigh 413 || ((mStateFlags & kFlagPreparing) && (status >= kStatusEnough))) { 414 if (mStateFlags & kFlagPlaying) { 415 startAudioSink(); 416 } 417 mStateFlags &= ~kFlagBuffering; 418 419 SL_LOGV("AudioSfDecoder::onCheckCache: buffering done."); 420 421 if (mStateFlags & kFlagPreparing) { 422 //SL_LOGV("AudioSfDecoder::onCheckCache: preparation done."); 423 mStateFlags &= ~kFlagPreparing; 424 } 425 426 if (mStateFlags & kFlagPlaying) { 427 (new AMessage(kWhatDecode, id()))->post(); 428 } 429 return; 430 } 431 432 msg->post(100000); 433} 434 435 436void AudioSfDecoder::onDecode() { 437 SL_LOGV("AudioSfDecoder::onDecode"); 438 439 //-------------------------------- Need to buffer some more before decoding? 440 bool eos; 441 if (mDataSource == 0) { 442 // application set play state to paused which failed, then set play state to playing 443 return; 444 } 445 446 if (wantPrefetch() 447 && (getCacheRemaining(&eos) == kStatusLow) 448 && !eos) { 449 SL_LOGV("buffering more."); 450 451 if (mStateFlags & kFlagPlaying) { 452 pauseAudioSink(); 453 } 454 mStateFlags |= kFlagBuffering; 455 (new AMessage(kWhatCheckCache, id()))->post(100000); 456 return; 457 } 458 459 if (!(mStateFlags & (kFlagPlaying | kFlagBuffering | kFlagPreparing))) { 460 // don't decode if we're not buffering, prefetching or playing 461 //SL_LOGV("don't decode: not buffering, prefetching or playing"); 462 return; 463 } 464 465 //-------------------------------- Decode 466 status_t err; 467 MediaSource::ReadOptions readOptions; 468 if (mStateFlags & kFlagSeeking) { 469 assert(mSeekTimeMsec != ANDROID_UNKNOWN_TIME); 470 readOptions.setSeekTo(mSeekTimeMsec * 1000); 471 } 472 473 int64_t timeUsec = ANDROID_UNKNOWN_TIME; 474 { 475 Mutex::Autolock _l(mBufferSourceLock); 476 477 if (NULL != mDecodeBuffer) { 478 // the current decoded buffer hasn't been rendered, drop it 479 mDecodeBuffer->release(); 480 mDecodeBuffer = NULL; 481 } 482 if(!mAudioSourceStarted) { 483 return; 484 } 485 err = mAudioSource->read(&mDecodeBuffer, &readOptions); 486 if (err == OK) { 487 // FIXME workaround apparent bug in AAC decoder: kKeyTime is 3 frames old if length is 0 488 if (mDecodeBuffer->range_length() == 0) { 489 timeUsec = ANDROID_UNKNOWN_TIME; 490 } else { 491 CHECK(mDecodeBuffer->meta_data()->findInt64(kKeyTime, &timeUsec)); 492 } 493 } 494 } 495 496 { 497 Mutex::Autolock _l(mTimeLock); 498 if (mStateFlags & kFlagSeeking) { 499 mStateFlags &= ~kFlagSeeking; 500 mSeekTimeMsec = ANDROID_UNKNOWN_TIME; 501 } 502 if (timeUsec != ANDROID_UNKNOWN_TIME) { 503 // Note that though we've decoded this position, we haven't rendered it yet. 504 // So a GetPosition called after this point will observe the advanced position, 505 // even though the PCM may not have been supplied to the sink. That's OK as 506 // we don't claim to provide frame-accurate (let alone sample-accurate) GetPosition. 507 mLastDecodedPositionUs = timeUsec; 508 } 509 } 510 511 //-------------------------------- Handle return of decode 512 if (err != OK) { 513 bool continueDecoding = false; 514 switch(err) { 515 case ERROR_END_OF_STREAM: 516 if (0 < mDurationUsec) { 517 Mutex::Autolock _l(mTimeLock); 518 mLastDecodedPositionUs = mDurationUsec; 519 } 520 // handle notification and looping at end of stream 521 if (mStateFlags & kFlagPlaying) { 522 notify(PLAYEREVENT_ENDOFSTREAM, 1, true); 523 } 524 if (mStateFlags & kFlagLooping) { 525 seek(0); 526 // kick-off decoding again 527 continueDecoding = true; 528 } 529 break; 530 case INFO_FORMAT_CHANGED: 531 SL_LOGD("MediaSource::read encountered INFO_FORMAT_CHANGED"); 532 // reconfigure output 533 { 534 Mutex::Autolock _l(mBufferSourceLock); 535 hasNewDecodeParams(); 536 } 537 continueDecoding = true; 538 break; 539 case INFO_DISCONTINUITY: 540 SL_LOGD("MediaSource::read encountered INFO_DISCONTINUITY"); 541 continueDecoding = true; 542 break; 543 default: 544 SL_LOGE("MediaSource::read returned error %d", err); 545 break; 546 } 547 if (continueDecoding) { 548 if (NULL == mDecodeBuffer) { 549 (new AMessage(kWhatDecode, id()))->post(); 550 return; 551 } 552 } else { 553 return; 554 } 555 } 556 557 //-------------------------------- Render 558 sp<AMessage> msg = new AMessage(kWhatRender, id()); 559 msg->post(); 560 561} 562 563 564void AudioSfDecoder::onMessageReceived(const sp<AMessage> &msg) { 565 switch (msg->what()) { 566 case kWhatDecode: 567 onDecode(); 568 break; 569 570 case kWhatRender: 571 onRender(); 572 break; 573 574 case kWhatCheckCache: 575 onCheckCache(msg); 576 break; 577 578 default: 579 GenericPlayer::onMessageReceived(msg); 580 break; 581 } 582} 583 584//-------------------------------------------------- 585// Prepared state, prefetch status notifications 586void AudioSfDecoder::notifyPrepared(status_t prepareRes) { 587 assert(!(mStateFlags & (kFlagPrepared | kFlagPreparedUnsuccessfully))); 588 if (NO_ERROR == prepareRes) { 589 // The "then" fork is not currently used, but is kept here to make it easier 590 // to replace by a new signalPrepareCompletion(status) if we re-visit this later. 591 mStateFlags |= kFlagPrepared; 592 } else { 593 mStateFlags |= kFlagPreparedUnsuccessfully; 594 } 595 // Do not call the superclass onPrepare to notify, because it uses a default error 596 // status code but we can provide a more specific one. 597 // GenericPlayer::onPrepare(); 598 notify(PLAYEREVENT_PREPARED, (int32_t)prepareRes, true); 599 SL_LOGD("AudioSfDecoder::onPrepare() done, mStateFlags=0x%x", mStateFlags); 600} 601 602 603void AudioSfDecoder::onNotify(const sp<AMessage> &msg) { 604 notif_cbf_t notifyClient; 605 void* notifyUser; 606 { 607 android::Mutex::Autolock autoLock(mNotifyClientLock); 608 if (NULL == mNotifyClient) { 609 return; 610 } else { 611 notifyClient = mNotifyClient; 612 notifyUser = mNotifyUser; 613 } 614 } 615 int32_t val; 616 if (msg->findInt32(PLAYEREVENT_PREFETCHSTATUSCHANGE, &val)) { 617 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_PREFETCHSTATUSCHANGE, val); 618 notifyClient(kEventPrefetchStatusChange, val, 0, notifyUser); 619 } 620 else if (msg->findInt32(PLAYEREVENT_PREFETCHFILLLEVELUPDATE, &val)) { 621 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_PREFETCHFILLLEVELUPDATE, val); 622 notifyClient(kEventPrefetchFillLevelUpdate, val, 0, notifyUser); 623 } 624 else if (msg->findInt32(PLAYEREVENT_ENDOFSTREAM, &val)) { 625 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_ENDOFSTREAM, val); 626 notifyClient(kEventEndOfStream, val, 0, notifyUser); 627 } 628 else { 629 GenericPlayer::onNotify(msg); 630 } 631} 632 633 634//-------------------------------------------------- 635// Private utility functions 636 637bool AudioSfDecoder::wantPrefetch() { 638 if (mDataSource != 0) { 639 return (mDataSource->flags() & DataSource::kWantsPrefetching); 640 } else { 641 // happens if an improper data locator was passed, if the media extractor couldn't be 642 // initialized, if there is no audio track in the media, if the OMX decoder couldn't be 643 // instantiated, if the source couldn't be opened, or if the MediaSource 644 // couldn't be started 645 SL_LOGV("AudioSfDecoder::wantPrefetch() tries to access NULL mDataSource"); 646 return false; 647 } 648} 649 650 651int64_t AudioSfDecoder::getPositionUsec() { 652 Mutex::Autolock _l(mTimeLock); 653 if (mStateFlags & kFlagSeeking) { 654 return mSeekTimeMsec * 1000; 655 } else { 656 return mLastDecodedPositionUs; 657 } 658} 659 660 661CacheStatus_t AudioSfDecoder::getCacheRemaining(bool *eos) { 662 sp<NuCachedSource2> cachedSource = 663 static_cast<NuCachedSource2 *>(mDataSource.get()); 664 665 CacheStatus_t oldStatus = mCacheStatus; 666 667 status_t finalStatus; 668 size_t dataRemaining = cachedSource->approxDataRemaining(&finalStatus); 669 *eos = (finalStatus != OK); 670 671 CHECK_GE(mBitrate, 0); 672 673 int64_t dataRemainingUs = dataRemaining * 8000000ll / mBitrate; 674 //SL_LOGV("AudioSfDecoder::getCacheRemaining: approx %.2f secs remaining (eos=%d)", 675 // dataRemainingUs / 1E6, *eos); 676 677 if (*eos) { 678 // data is buffered up to the end of the stream, it can't get any better than this 679 mCacheStatus = kStatusHigh; 680 mCacheFill = 1000; 681 682 } else { 683 if (mDurationUsec > 0) { 684 // known duration: 685 686 // fill level is ratio of how much has been played + how much is 687 // cached, divided by total duration 688 uint32_t currentPositionUsec = getPositionUsec(); 689 if (currentPositionUsec == ANDROID_UNKNOWN_TIME) { 690 // if we don't know where we are, assume the worst for the fill ratio 691 currentPositionUsec = 0; 692 } 693 if (mDurationUsec > 0) { 694 mCacheFill = (int16_t) ((1000.0 695 * (double)(currentPositionUsec + dataRemainingUs) / mDurationUsec)); 696 } else { 697 mCacheFill = 0; 698 } 699 //SL_LOGV("cacheFill = %d", mCacheFill); 700 701 // cache status is evaluated against duration thresholds 702 if (dataRemainingUs > DURATION_CACHED_HIGH_MS*1000) { 703 mCacheStatus = kStatusHigh; 704 //LOGV("high"); 705 } else if (dataRemainingUs > DURATION_CACHED_MED_MS*1000) { 706 //LOGV("enough"); 707 mCacheStatus = kStatusEnough; 708 } else if (dataRemainingUs < DURATION_CACHED_LOW_MS*1000) { 709 //LOGV("low"); 710 mCacheStatus = kStatusLow; 711 } else { 712 mCacheStatus = kStatusIntermediate; 713 } 714 715 } else { 716 // unknown duration: 717 718 // cache status is evaluated against cache amount thresholds 719 // (no duration so we don't have the bitrate either, could be derived from format?) 720 if (dataRemaining > SIZE_CACHED_HIGH_BYTES) { 721 mCacheStatus = kStatusHigh; 722 } else if (dataRemaining > SIZE_CACHED_MED_BYTES) { 723 mCacheStatus = kStatusEnough; 724 } else if (dataRemaining < SIZE_CACHED_LOW_BYTES) { 725 mCacheStatus = kStatusLow; 726 } else { 727 mCacheStatus = kStatusIntermediate; 728 } 729 } 730 731 } 732 733 if (oldStatus != mCacheStatus) { 734 notifyStatus(); 735 } 736 737 if (abs(mCacheFill - mLastNotifiedCacheFill) > mCacheFillNotifThreshold) { 738 notifyCacheFill(); 739 } 740 741 return mCacheStatus; 742} 743 744 745void AudioSfDecoder::hasNewDecodeParams() { 746 747 if ((mAudioSource != 0) && mAudioSourceStarted) { 748 sp<MetaData> meta = mAudioSource->getFormat(); 749 750 SL_LOGV("old sample rate = %d, channel count = %d", mSampleRateHz, mChannelCount); 751 752 CHECK(meta->findInt32(kKeyChannelCount, &mChannelCount)); 753 int32_t sr; 754 CHECK(meta->findInt32(kKeySampleRate, &sr)); 755 mSampleRateHz = (uint32_t) sr; 756 SL_LOGV("format changed: new sample rate = %d, channel count = %d", 757 mSampleRateHz, mChannelCount); 758 759 // FIXME similar to onPrepare() 760 mChannelMask = channelCountToMask(mChannelCount); 761 762 { 763 android::Mutex::Autolock autoLock(mPcmFormatLock); 764 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = mChannelCount; 765 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE] = mSampleRateHz; 766 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] = mChannelMask; 767 } 768 } 769 770 // alert users of those params 771 updateAudioSink(); 772} 773 774} // namespace android 775