android_AudioSfDecoder.cpp revision 1fa5c3206d06bbebdea2dc92f378ce6b8a211e23
1/* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define USE_LOG SLAndroidLogLevel_Verbose 18 19#include "sles_allinclusive.h" 20#include "android/android_AudioSfDecoder.h" 21 22#include <media/stagefright/foundation/ADebug.h> 23 24 25#define SIZE_CACHED_HIGH_BYTES 1000000 26#define SIZE_CACHED_MED_BYTES 700000 27#define SIZE_CACHED_LOW_BYTES 400000 28 29namespace android { 30 31//-------------------------------------------------------------------------------------------------- 32AudioSfDecoder::AudioSfDecoder(const AudioPlayback_Parameters* params) : GenericPlayer(params), 33 mDataSource(0), 34 mAudioSource(0), 35 mAudioSourceStarted(false), 36 mBitrate(-1), 37 mDurationUsec(ANDROID_UNKNOWN_TIME), 38 mDecodeBuffer(NULL), 39 mSeekTimeMsec(0), 40 // play event logic depends on the initial time being zero not ANDROID_UNKNOWN_TIME 41 mLastDecodedPositionUs(0) 42{ 43 SL_LOGD("AudioSfDecoder::AudioSfDecoder()"); 44} 45 46 47AudioSfDecoder::~AudioSfDecoder() { 48 SL_LOGD("AudioSfDecoder::~AudioSfDecoder()"); 49} 50 51 52void AudioSfDecoder::preDestroy() { 53 GenericPlayer::preDestroy(); 54 SL_LOGD("AudioSfDecoder::preDestroy()"); 55 { 56 Mutex::Autolock _l(mBufferSourceLock); 57 58 if (NULL != mDecodeBuffer) { 59 mDecodeBuffer->release(); 60 mDecodeBuffer = NULL; 61 } 62 63 if ((mAudioSource != 0) && mAudioSourceStarted) { 64 mAudioSource->stop(); 65 mAudioSourceStarted = false; 66 } 67 } 68} 69 70 71//-------------------------------------------------- 72void AudioSfDecoder::play() { 73 SL_LOGD("AudioSfDecoder::play"); 74 75 GenericPlayer::play(); 76 (new AMessage(kWhatDecode, id()))->post(); 77} 78 79 80void AudioSfDecoder::getPositionMsec(int* msec) { 81 int64_t timeUsec = getPositionUsec(); 82 if (timeUsec == ANDROID_UNKNOWN_TIME) { 83 *msec = ANDROID_UNKNOWN_TIME; 84 } else { 85 *msec = timeUsec / 1000; 86 } 87} 88 89 90//-------------------------------------------------- 91uint32_t AudioSfDecoder::getPcmFormatKeyCount() const { 92 return NB_PCMMETADATA_KEYS; 93} 94 95 96//-------------------------------------------------- 97bool AudioSfDecoder::getPcmFormatKeySize(uint32_t index, uint32_t* pKeySize) { 98 if (index >= NB_PCMMETADATA_KEYS) { 99 return false; 100 } else { 101 *pKeySize = strlen(kPcmDecodeMetadataKeys[index]) +1; 102 return true; 103 } 104} 105 106 107//-------------------------------------------------- 108bool AudioSfDecoder::getPcmFormatKeyName(uint32_t index, uint32_t keySize, char* keyName) { 109 uint32_t actualKeySize; 110 if (!getPcmFormatKeySize(index, &actualKeySize)) { 111 return false; 112 } 113 if (keySize < actualKeySize) { 114 return false; 115 } 116 strncpy(keyName, kPcmDecodeMetadataKeys[index], actualKeySize); 117 return true; 118} 119 120 121//-------------------------------------------------- 122bool AudioSfDecoder::getPcmFormatValueSize(uint32_t index, uint32_t* pValueSize) { 123 if (index >= NB_PCMMETADATA_KEYS) { 124 *pValueSize = 0; 125 return false; 126 } else { 127 *pValueSize = sizeof(uint32_t); 128 return true; 129 } 130} 131 132 133//-------------------------------------------------- 134bool AudioSfDecoder::getPcmFormatKeyValue(uint32_t index, uint32_t size, uint32_t* pValue) { 135 uint32_t valueSize = 0; 136 if (!getPcmFormatValueSize(index, &valueSize)) { 137 return false; 138 } else if (size != valueSize) { 139 // this ensures we are accessing mPcmFormatValues with a valid size for that index 140 SL_LOGE("Error retrieving metadata value at index %d: using size of %d, should be %d", 141 index, size, valueSize); 142 return false; 143 } else { 144 android::Mutex::Autolock autoLock(mPcmFormatLock); 145 *pValue = mPcmFormatValues[index]; 146 return true; 147 } 148} 149 150 151//-------------------------------------------------- 152// Event handlers 153// it is strictly verboten to call those methods outside of the event loop 154 155// Initializes the data and audio sources, and update the PCM format info 156// post-condition: upon successful initialization based on the player data locator 157// GenericPlayer::onPrepare() was called 158// mDataSource != 0 159// mAudioSource != 0 160// mAudioSourceStarted == true 161// All error returns from this method are via notifyPrepared(status) followed by "return". 162void AudioSfDecoder::onPrepare() { 163 SL_LOGD("AudioSfDecoder::onPrepare()"); 164 Mutex::Autolock _l(mBufferSourceLock); 165 166 { 167 android::Mutex::Autolock autoLock(mPcmFormatLock); 168 // Initialize the PCM format info with the known parameters before the start of the decode 169 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_BITSPERSAMPLE] = SL_PCMSAMPLEFORMAT_FIXED_16; 170 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CONTAINERSIZE] = 16; 171 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_ENDIANNESS] = SL_BYTEORDER_LITTLEENDIAN; 172 // initialization with the default values: they will be replaced by the actual values 173 // once the decoder has figured them out 174 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = UNKNOWN_NUMCHANNELS; 175 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE] = UNKNOWN_SAMPLERATE; 176 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] = UNKNOWN_CHANNELMASK; 177 } 178 179 //--------------------------------- 180 // Instantiate and initialize the data source for the decoder 181 sp<DataSource> dataSource; 182 183 switch (mDataLocatorType) { 184 185 case kDataLocatorNone: 186 SL_LOGE("AudioSfDecoder::onPrepare: no data locator set"); 187 notifyPrepared(MEDIA_ERROR_BASE); 188 return; 189 190 case kDataLocatorUri: 191 dataSource = DataSource::CreateFromURI(mDataLocator.uriRef); 192 if (dataSource == NULL) { 193 SL_LOGE("AudioSfDecoder::onPrepare(): Error opening %s", mDataLocator.uriRef); 194 notifyPrepared(MEDIA_ERROR_BASE); 195 return; 196 } 197 break; 198 199 case kDataLocatorFd: 200 { 201 // As FileSource unconditionally takes ownership of the fd and closes it, then 202 // we have to make a dup for FileSource if the app wants to keep ownership itself 203 int fd = mDataLocator.fdi.fd; 204 if (mDataLocator.fdi.mCloseAfterUse) { 205 mDataLocator.fdi.mCloseAfterUse = false; 206 } else { 207 fd = ::dup(fd); 208 } 209 dataSource = new FileSource(fd, mDataLocator.fdi.offset, mDataLocator.fdi.length); 210 status_t err = dataSource->initCheck(); 211 if (err != OK) { 212 notifyPrepared(err); 213 return; 214 } 215 break; 216 } 217 218 default: 219 TRESPASS(); 220 } 221 222 //--------------------------------- 223 // Instanciate and initialize the decoder attached to the data source 224 sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource); 225 if (extractor == NULL) { 226 SL_LOGE("AudioSfDecoder::onPrepare: Could not instantiate extractor."); 227 notifyPrepared(ERROR_UNSUPPORTED); 228 return; 229 } 230 231 ssize_t audioTrackIndex = -1; 232 bool isRawAudio = false; 233 for (size_t i = 0; i < extractor->countTracks(); ++i) { 234 sp<MetaData> meta = extractor->getTrackMetaData(i); 235 236 const char *mime; 237 CHECK(meta->findCString(kKeyMIMEType, &mime)); 238 239 if (!strncasecmp("audio/", mime, 6)) { 240 audioTrackIndex = i; 241 242 if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime)) { 243 isRawAudio = true; 244 } 245 break; 246 } 247 } 248 249 if (audioTrackIndex < 0) { 250 SL_LOGE("AudioSfDecoder::onPrepare: Could not find a supported audio track."); 251 notifyPrepared(ERROR_UNSUPPORTED); 252 return; 253 } 254 255 sp<MediaSource> source = extractor->getTrack(audioTrackIndex); 256 sp<MetaData> meta = source->getFormat(); 257 258 // we can't trust the OMXCodec (if there is one) to issue a INFO_FORMAT_CHANGED so we want 259 // to have some meaningful values as soon as possible. 260 int32_t channelCount; 261 bool hasChannelCount = meta->findInt32(kKeyChannelCount, &channelCount); 262 int32_t sr; 263 bool hasSampleRate = meta->findInt32(kKeySampleRate, &sr); 264 265 // first compute the duration 266 off64_t size; 267 int64_t durationUs; 268 int32_t durationMsec; 269 if (dataSource->getSize(&size) == OK 270 && meta->findInt64(kKeyDuration, &durationUs)) { 271 if (durationUs != 0) { 272 mBitrate = size * 8000000ll / durationUs; // in bits/sec 273 } else { 274 mBitrate = -1; 275 } 276 mDurationUsec = durationUs; 277 durationMsec = durationUs / 1000; 278 } else { 279 mBitrate = -1; 280 mDurationUsec = ANDROID_UNKNOWN_TIME; 281 durationMsec = ANDROID_UNKNOWN_TIME; 282 } 283 284 // then assign the duration under the settings lock 285 { 286 Mutex::Autolock _l(mSettingsLock); 287 mDurationMsec = durationMsec; 288 } 289 290 // the audio content is not raw PCM, so we need a decoder 291 if (!isRawAudio) { 292 OMXClient client; 293 CHECK_EQ(client.connect(), (status_t)OK); 294 295 source = OMXCodec::Create( 296 client.interface(), meta, false /* createEncoder */, 297 source); 298 299 if (source == NULL) { 300 SL_LOGE("AudioSfDecoder::onPrepare: Could not instantiate decoder."); 301 notifyPrepared(ERROR_UNSUPPORTED); 302 return; 303 } 304 305 meta = source->getFormat(); 306 } 307 308 309 if (source->start() != OK) { 310 SL_LOGE("AudioSfDecoder::onPrepare: Failed to start source/decoder."); 311 notifyPrepared(MEDIA_ERROR_BASE); 312 return; 313 } 314 315 //--------------------------------- 316 // The data source, and audio source (a decoder if required) are ready to be used 317 mDataSource = dataSource; 318 mAudioSource = source; 319 mAudioSourceStarted = true; 320 321 if (!hasChannelCount) { 322 CHECK(meta->findInt32(kKeyChannelCount, &channelCount)); 323 } 324 325 if (!hasSampleRate) { 326 CHECK(meta->findInt32(kKeySampleRate, &sr)); 327 } 328 // FIXME add code below once channel mask support is in, currently initialized to default 329 // value computed from the channel count 330 // if (!hasChannelMask) { 331 // CHECK(meta->findInt32(kKeyChannelMask, &channelMask)); 332 // } 333 334 if (!wantPrefetch()) { 335 SL_LOGV("AudioSfDecoder::onPrepare: no need to prefetch"); 336 // doesn't need prefetching, notify good to go 337 mCacheStatus = kStatusHigh; 338 mCacheFill = 1000; 339 notifyStatus(); 340 notifyCacheFill(); 341 } 342 343 { 344 android::Mutex::Autolock autoLock(mPcmFormatLock); 345 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE] = sr; 346 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = channelCount; 347 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] = 348 channelCountToMask(channelCount); 349 } 350 351 // at this point we have enough information about the source to create the sink that 352 // will consume the data 353 createAudioSink(); 354 355 // signal successful completion of prepare 356 mStateFlags |= kFlagPrepared; 357 358 GenericPlayer::onPrepare(); 359 SL_LOGD("AudioSfDecoder::onPrepare() done, mStateFlags=0x%x", mStateFlags); 360} 361 362 363void AudioSfDecoder::onPause() { 364 SL_LOGV("AudioSfDecoder::onPause()"); 365 GenericPlayer::onPause(); 366 pauseAudioSink(); 367} 368 369 370void AudioSfDecoder::onPlay() { 371 SL_LOGV("AudioSfDecoder::onPlay()"); 372 GenericPlayer::onPlay(); 373 startAudioSink(); 374} 375 376 377void AudioSfDecoder::onSeek(const sp<AMessage> &msg) { 378 SL_LOGV("AudioSfDecoder::onSeek"); 379 int64_t timeMsec; 380 CHECK(msg->findInt64(WHATPARAM_SEEK_SEEKTIME_MS, &timeMsec)); 381 382 Mutex::Autolock _l(mTimeLock); 383 mStateFlags |= kFlagSeeking; 384 mSeekTimeMsec = timeMsec; 385 // don't set mLastDecodedPositionUs to ANDROID_UNKNOWN_TIME; getPositionUsec 386 // ignores mLastDecodedPositionUs while seeking, and substitutes the seek goal instead 387 388 // nop for now 389 GenericPlayer::onSeek(msg); 390} 391 392 393void AudioSfDecoder::onLoop(const sp<AMessage> &msg) { 394 SL_LOGV("AudioSfDecoder::onLoop"); 395 int32_t loop; 396 CHECK(msg->findInt32(WHATPARAM_LOOP_LOOPING, &loop)); 397 398 if (loop) { 399 //SL_LOGV("AudioSfDecoder::onLoop start looping"); 400 mStateFlags |= kFlagLooping; 401 } else { 402 //SL_LOGV("AudioSfDecoder::onLoop stop looping"); 403 mStateFlags &= ~kFlagLooping; 404 } 405 406 // nop for now 407 GenericPlayer::onLoop(msg); 408} 409 410 411void AudioSfDecoder::onCheckCache(const sp<AMessage> &msg) { 412 //SL_LOGV("AudioSfDecoder::onCheckCache"); 413 bool eos; 414 CacheStatus_t status = getCacheRemaining(&eos); 415 416 if (eos || status == kStatusHigh 417 || ((mStateFlags & kFlagPreparing) && (status >= kStatusEnough))) { 418 if (mStateFlags & kFlagPlaying) { 419 startAudioSink(); 420 } 421 mStateFlags &= ~kFlagBuffering; 422 423 SL_LOGV("AudioSfDecoder::onCheckCache: buffering done."); 424 425 if (mStateFlags & kFlagPreparing) { 426 //SL_LOGV("AudioSfDecoder::onCheckCache: preparation done."); 427 mStateFlags &= ~kFlagPreparing; 428 } 429 430 if (mStateFlags & kFlagPlaying) { 431 (new AMessage(kWhatDecode, id()))->post(); 432 } 433 return; 434 } 435 436 msg->post(100000); 437} 438 439 440void AudioSfDecoder::onDecode() { 441 SL_LOGV("AudioSfDecoder::onDecode"); 442 443 //-------------------------------- Need to buffer some more before decoding? 444 bool eos; 445 if (mDataSource == 0) { 446 // application set play state to paused which failed, then set play state to playing 447 return; 448 } 449 450 if (wantPrefetch() 451 && (getCacheRemaining(&eos) == kStatusLow) 452 && !eos) { 453 SL_LOGV("buffering more."); 454 455 if (mStateFlags & kFlagPlaying) { 456 pauseAudioSink(); 457 } 458 mStateFlags |= kFlagBuffering; 459 (new AMessage(kWhatCheckCache, id()))->post(100000); 460 return; 461 } 462 463 if (!(mStateFlags & (kFlagPlaying | kFlagBuffering | kFlagPreparing))) { 464 // don't decode if we're not buffering, prefetching or playing 465 //SL_LOGV("don't decode: not buffering, prefetching or playing"); 466 return; 467 } 468 469 //-------------------------------- Decode 470 status_t err; 471 MediaSource::ReadOptions readOptions; 472 if (mStateFlags & kFlagSeeking) { 473 assert(mSeekTimeMsec != ANDROID_UNKNOWN_TIME); 474 readOptions.setSeekTo(mSeekTimeMsec * 1000); 475 } 476 477 int64_t timeUsec = ANDROID_UNKNOWN_TIME; 478 { 479 Mutex::Autolock _l(mBufferSourceLock); 480 481 if (NULL != mDecodeBuffer) { 482 // the current decoded buffer hasn't been rendered, drop it 483 mDecodeBuffer->release(); 484 mDecodeBuffer = NULL; 485 } 486 if(!mAudioSourceStarted) { 487 return; 488 } 489 err = mAudioSource->read(&mDecodeBuffer, &readOptions); 490 if (err == OK) { 491 // FIXME workaround apparent bug in AAC decoder: kKeyTime is 3 frames old if length is 0 492 if (mDecodeBuffer->range_length() == 0) { 493 timeUsec = ANDROID_UNKNOWN_TIME; 494 } else { 495 CHECK(mDecodeBuffer->meta_data()->findInt64(kKeyTime, &timeUsec)); 496 } 497 } 498 } 499 500 { 501 Mutex::Autolock _l(mTimeLock); 502 if (mStateFlags & kFlagSeeking) { 503 mStateFlags &= ~kFlagSeeking; 504 mSeekTimeMsec = ANDROID_UNKNOWN_TIME; 505 } 506 if (timeUsec != ANDROID_UNKNOWN_TIME) { 507 // Note that though we've decoded this position, we haven't rendered it yet. 508 // So a GetPosition called after this point will observe the advanced position, 509 // even though the PCM may not have been supplied to the sink. That's OK as 510 // we don't claim to provide frame-accurate (let alone sample-accurate) GetPosition. 511 mLastDecodedPositionUs = timeUsec; 512 } 513 } 514 515 //-------------------------------- Handle return of decode 516 if (err != OK) { 517 bool continueDecoding = false; 518 switch(err) { 519 case ERROR_END_OF_STREAM: 520 if (0 < mDurationUsec) { 521 Mutex::Autolock _l(mTimeLock); 522 mLastDecodedPositionUs = mDurationUsec; 523 } 524 // handle notification and looping at end of stream 525 if (mStateFlags & kFlagPlaying) { 526 notify(PLAYEREVENT_ENDOFSTREAM, 1, true); 527 } 528 if (mStateFlags & kFlagLooping) { 529 seek(0); 530 // kick-off decoding again 531 continueDecoding = true; 532 } 533 break; 534 case INFO_FORMAT_CHANGED: 535 SL_LOGD("MediaSource::read encountered INFO_FORMAT_CHANGED"); 536 // reconfigure output 537 { 538 Mutex::Autolock _l(mBufferSourceLock); 539 hasNewDecodeParams(); 540 } 541 continueDecoding = true; 542 break; 543 case INFO_DISCONTINUITY: 544 SL_LOGD("MediaSource::read encountered INFO_DISCONTINUITY"); 545 continueDecoding = true; 546 break; 547 default: 548 SL_LOGE("MediaSource::read returned error %d", err); 549 break; 550 } 551 if (continueDecoding) { 552 if (NULL == mDecodeBuffer) { 553 (new AMessage(kWhatDecode, id()))->post(); 554 return; 555 } 556 } else { 557 return; 558 } 559 } 560 561 //-------------------------------- Render 562 sp<AMessage> msg = new AMessage(kWhatRender, id()); 563 msg->post(); 564 565} 566 567 568void AudioSfDecoder::onMessageReceived(const sp<AMessage> &msg) { 569 switch (msg->what()) { 570 case kWhatDecode: 571 onDecode(); 572 break; 573 574 case kWhatRender: 575 onRender(); 576 break; 577 578 case kWhatCheckCache: 579 onCheckCache(msg); 580 break; 581 582 default: 583 GenericPlayer::onMessageReceived(msg); 584 break; 585 } 586} 587 588//-------------------------------------------------- 589// Prepared state, prefetch status notifications 590void AudioSfDecoder::notifyPrepared(status_t prepareRes) { 591 assert(!(mStateFlags & (kFlagPrepared | kFlagPreparedUnsuccessfully))); 592 if (NO_ERROR == prepareRes) { 593 // The "then" fork is not currently used, but is kept here to make it easier 594 // to replace by a new signalPrepareCompletion(status) if we re-visit this later. 595 mStateFlags |= kFlagPrepared; 596 } else { 597 mStateFlags |= kFlagPreparedUnsuccessfully; 598 } 599 // Do not call the superclass onPrepare to notify, because it uses a default error 600 // status code but we can provide a more specific one. 601 // GenericPlayer::onPrepare(); 602 notify(PLAYEREVENT_PREPARED, (int32_t)prepareRes, true); 603 SL_LOGD("AudioSfDecoder::onPrepare() done, mStateFlags=0x%x", mStateFlags); 604} 605 606 607void AudioSfDecoder::onNotify(const sp<AMessage> &msg) { 608 notif_cbf_t notifyClient; 609 void* notifyUser; 610 { 611 android::Mutex::Autolock autoLock(mNotifyClientLock); 612 if (NULL == mNotifyClient) { 613 return; 614 } else { 615 notifyClient = mNotifyClient; 616 notifyUser = mNotifyUser; 617 } 618 } 619 int32_t val; 620 if (msg->findInt32(PLAYEREVENT_PREFETCHSTATUSCHANGE, &val)) { 621 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_PREFETCHSTATUSCHANGE, val); 622 notifyClient(kEventPrefetchStatusChange, val, 0, notifyUser); 623 } 624 else if (msg->findInt32(PLAYEREVENT_PREFETCHFILLLEVELUPDATE, &val)) { 625 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_PREFETCHFILLLEVELUPDATE, val); 626 notifyClient(kEventPrefetchFillLevelUpdate, val, 0, notifyUser); 627 } 628 else if (msg->findInt32(PLAYEREVENT_ENDOFSTREAM, &val)) { 629 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_ENDOFSTREAM, val); 630 notifyClient(kEventEndOfStream, val, 0, notifyUser); 631 } 632 else { 633 GenericPlayer::onNotify(msg); 634 } 635} 636 637 638//-------------------------------------------------- 639// Private utility functions 640 641bool AudioSfDecoder::wantPrefetch() { 642 if (mDataSource != 0) { 643 return (mDataSource->flags() & DataSource::kWantsPrefetching); 644 } else { 645 // happens if an improper data locator was passed, if the media extractor couldn't be 646 // initialized, if there is no audio track in the media, if the OMX decoder couldn't be 647 // instantiated, if the source couldn't be opened, or if the MediaSource 648 // couldn't be started 649 SL_LOGV("AudioSfDecoder::wantPrefetch() tries to access NULL mDataSource"); 650 return false; 651 } 652} 653 654 655int64_t AudioSfDecoder::getPositionUsec() { 656 Mutex::Autolock _l(mTimeLock); 657 if (mStateFlags & kFlagSeeking) { 658 return mSeekTimeMsec * 1000; 659 } else { 660 return mLastDecodedPositionUs; 661 } 662} 663 664 665CacheStatus_t AudioSfDecoder::getCacheRemaining(bool *eos) { 666 sp<NuCachedSource2> cachedSource = 667 static_cast<NuCachedSource2 *>(mDataSource.get()); 668 669 CacheStatus_t oldStatus = mCacheStatus; 670 671 status_t finalStatus; 672 size_t dataRemaining = cachedSource->approxDataRemaining(&finalStatus); 673 *eos = (finalStatus != OK); 674 675 CHECK_GE(mBitrate, 0); 676 677 int64_t dataRemainingUs = dataRemaining * 8000000ll / mBitrate; 678 //SL_LOGV("AudioSfDecoder::getCacheRemaining: approx %.2f secs remaining (eos=%d)", 679 // dataRemainingUs / 1E6, *eos); 680 681 if (*eos) { 682 // data is buffered up to the end of the stream, it can't get any better than this 683 mCacheStatus = kStatusHigh; 684 mCacheFill = 1000; 685 686 } else { 687 if (mDurationUsec > 0) { 688 // known duration: 689 690 // fill level is ratio of how much has been played + how much is 691 // cached, divided by total duration 692 uint32_t currentPositionUsec = getPositionUsec(); 693 if (currentPositionUsec == ANDROID_UNKNOWN_TIME) { 694 // if we don't know where we are, assume the worst for the fill ratio 695 currentPositionUsec = 0; 696 } 697 if (mDurationUsec > 0) { 698 mCacheFill = (int16_t) ((1000.0 699 * (double)(currentPositionUsec + dataRemainingUs) / mDurationUsec)); 700 } else { 701 mCacheFill = 0; 702 } 703 //SL_LOGV("cacheFill = %d", mCacheFill); 704 705 // cache status is evaluated against duration thresholds 706 if (dataRemainingUs > DURATION_CACHED_HIGH_MS*1000) { 707 mCacheStatus = kStatusHigh; 708 //LOGV("high"); 709 } else if (dataRemainingUs > DURATION_CACHED_MED_MS*1000) { 710 //LOGV("enough"); 711 mCacheStatus = kStatusEnough; 712 } else if (dataRemainingUs < DURATION_CACHED_LOW_MS*1000) { 713 //LOGV("low"); 714 mCacheStatus = kStatusLow; 715 } else { 716 mCacheStatus = kStatusIntermediate; 717 } 718 719 } else { 720 // unknown duration: 721 722 // cache status is evaluated against cache amount thresholds 723 // (no duration so we don't have the bitrate either, could be derived from format?) 724 if (dataRemaining > SIZE_CACHED_HIGH_BYTES) { 725 mCacheStatus = kStatusHigh; 726 } else if (dataRemaining > SIZE_CACHED_MED_BYTES) { 727 mCacheStatus = kStatusEnough; 728 } else if (dataRemaining < SIZE_CACHED_LOW_BYTES) { 729 mCacheStatus = kStatusLow; 730 } else { 731 mCacheStatus = kStatusIntermediate; 732 } 733 } 734 735 } 736 737 if (oldStatus != mCacheStatus) { 738 notifyStatus(); 739 } 740 741 if (abs(mCacheFill - mLastNotifiedCacheFill) > mCacheFillNotifThreshold) { 742 notifyCacheFill(); 743 } 744 745 return mCacheStatus; 746} 747 748 749void AudioSfDecoder::hasNewDecodeParams() { 750 751 if ((mAudioSource != 0) && mAudioSourceStarted) { 752 sp<MetaData> meta = mAudioSource->getFormat(); 753 754 int32_t channelCount; 755 CHECK(meta->findInt32(kKeyChannelCount, &channelCount)); 756 int32_t sr; 757 CHECK(meta->findInt32(kKeySampleRate, &sr)); 758 759 // FIXME similar to onPrepare() 760 { 761 android::Mutex::Autolock autoLock(mPcmFormatLock); 762 SL_LOGV("format changed: old sr=%d, channels=%d; new sr=%d, channels=%d", 763 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE], 764 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS], 765 sr, channelCount); 766 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = channelCount; 767 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE] = sr; 768 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] = 769 channelCountToMask(channelCount); 770 } 771 } 772 773 // alert users of those params 774 updateAudioSink(); 775} 776 777} // namespace android 778