android_AudioSfDecoder.cpp revision bb832e853d4afb11b0a3287b2eb0cad87696d631
1/* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define USE_LOG SLAndroidLogLevel_Verbose 18 19#include "sles_allinclusive.h" 20#include "android/android_AudioSfDecoder.h" 21 22#include <media/stagefright/foundation/ADebug.h> 23 24 25#define SIZE_CACHED_HIGH_BYTES 1000000 26#define SIZE_CACHED_MED_BYTES 700000 27#define SIZE_CACHED_LOW_BYTES 400000 28 29namespace android { 30 31//-------------------------------------------------------------------------------------------------- 32AudioSfDecoder::AudioSfDecoder(const AudioPlayback_Parameters* params) : GenericPlayer(params), 33 mDataSource(0), 34 mAudioSource(0), 35 mAudioSourceStarted(false), 36 mBitrate(-1), 37 mChannelMask(UNKNOWN_CHANNELMASK), 38 mDurationUsec(ANDROID_UNKNOWN_TIME), 39 mDecodeBuffer(NULL), 40 mSeekTimeMsec(0), 41 mLastDecodedPositionUs(ANDROID_UNKNOWN_TIME), 42 mPcmFormatKeyCount(0) 43{ 44 SL_LOGD("AudioSfDecoder::AudioSfDecoder()"); 45} 46 47 48AudioSfDecoder::~AudioSfDecoder() { 49 SL_LOGD("AudioSfDecoder::~AudioSfDecoder()"); 50} 51 52 53void AudioSfDecoder::preDestroy() { 54 GenericPlayer::preDestroy(); 55 SL_LOGD("AudioSfDecoder::preDestroy()"); 56 { 57 Mutex::Autolock _l(mBufferSourceLock); 58 59 if (NULL != mDecodeBuffer) { 60 mDecodeBuffer->release(); 61 mDecodeBuffer = NULL; 62 } 63 64 if ((mAudioSource != 0) && mAudioSourceStarted) { 65 mAudioSource->stop(); 66 mAudioSourceStarted = false; 67 } 68 } 69} 70 71 72//-------------------------------------------------- 73void AudioSfDecoder::play() { 74 SL_LOGD("AudioSfDecoder::play"); 75 76 GenericPlayer::play(); 77 (new AMessage(kWhatDecode, id()))->post(); 78} 79 80 81void AudioSfDecoder::getPositionMsec(int* msec) { 82 int64_t timeUsec = getPositionUsec(); 83 if (timeUsec == ANDROID_UNKNOWN_TIME) { 84 *msec = ANDROID_UNKNOWN_TIME; 85 } else { 86 *msec = timeUsec / 1000; 87 } 88} 89 90 91void AudioSfDecoder::startPrefetch_async() { 92 SL_LOGV("AudioSfDecoder::startPrefetch_async()"); 93 94 if (wantPrefetch()) { 95 SL_LOGV("AudioSfDecoder::startPrefetch_async(): sending check cache msg"); 96 97 mStateFlags |= kFlagPreparing | kFlagBuffering; 98 99 (new AMessage(kWhatCheckCache, id()))->post(); 100 } 101} 102 103 104//-------------------------------------------------- 105uint32_t AudioSfDecoder::getPcmFormatKeyCount() { 106 android::Mutex::Autolock autoLock(mPcmFormatLock); 107 return mPcmFormatKeyCount; 108} 109 110 111//-------------------------------------------------- 112bool AudioSfDecoder::getPcmFormatKeySize(uint32_t index, uint32_t* pKeySize) { 113 uint32_t keyCount = getPcmFormatKeyCount(); 114 if (index >= keyCount) { 115 return false; 116 } else { 117 *pKeySize = strlen(kPcmDecodeMetadataKeys[index]) +1; 118 return true; 119 } 120} 121 122 123//-------------------------------------------------- 124bool AudioSfDecoder::getPcmFormatKeyName(uint32_t index, uint32_t keySize, char* keyName) { 125 uint32_t actualKeySize; 126 if (!getPcmFormatKeySize(index, &actualKeySize)) { 127 return false; 128 } 129 if (keySize < actualKeySize) { 130 return false; 131 } 132 strncpy(keyName, kPcmDecodeMetadataKeys[index], actualKeySize); 133 return true; 134} 135 136 137//-------------------------------------------------- 138bool AudioSfDecoder::getPcmFormatValueSize(uint32_t index, uint32_t* pValueSize) { 139 uint32_t keyCount = getPcmFormatKeyCount(); 140 if (index >= keyCount) { 141 *pValueSize = 0; 142 return false; 143 } else { 144 *pValueSize = sizeof(uint32_t); 145 return true; 146 } 147} 148 149 150//-------------------------------------------------- 151bool AudioSfDecoder::getPcmFormatKeyValue(uint32_t index, uint32_t size, uint32_t* pValue) { 152 uint32_t valueSize = 0; 153 if (!getPcmFormatValueSize(index, &valueSize)) { 154 return false; 155 } else if (size != valueSize) { 156 // this ensures we are accessing mPcmFormatValues with a valid size for that index 157 SL_LOGE("Error retrieving metadata value at index %d: using size of %d, should be %d", 158 index, size, valueSize); 159 return false; 160 } else { 161 *pValue = mPcmFormatValues[index]; 162 return true; 163 } 164} 165 166 167//-------------------------------------------------- 168// Event handlers 169// it is strictly verboten to call those methods outside of the event loop 170 171// Initializes the data and audio sources, and update the PCM format info 172// post-condition: upon successful initialization based on the player data locator 173// GenericPlayer::onPrepare() was called 174// mDataSource != 0 175// mAudioSource != 0 176// mAudioSourceStarted == true 177// All error returns from this method are via notifyPrepared(status) followed by "return". 178void AudioSfDecoder::onPrepare() { 179 SL_LOGD("AudioSfDecoder::onPrepare()"); 180 Mutex::Autolock _l(mBufferSourceLock); 181 182 // Initialize the PCM format info with the known parameters before the start of the decode 183 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_BITSPERSAMPLE] = SL_PCMSAMPLEFORMAT_FIXED_16; 184 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CONTAINERSIZE] = 16; 185 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_ENDIANNESS] = SL_BYTEORDER_LITTLEENDIAN; 186 // initialization with the default values: they will be replaced by the actual values 187 // once the decoder has figured them out 188 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = mChannelCount; 189 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLESPERSEC] = mSampleRateHz; 190 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] = mChannelMask; 191 192 //--------------------------------- 193 // Instantiate and initialize the data source for the decoder 194 sp<DataSource> dataSource; 195 196 switch (mDataLocatorType) { 197 198 case kDataLocatorNone: 199 SL_LOGE("AudioSfDecoder::onPrepare: no data locator set"); 200 notifyPrepared(MEDIA_ERROR_BASE); 201 return; 202 203 case kDataLocatorUri: 204 dataSource = DataSource::CreateFromURI(mDataLocator.uriRef); 205 if (dataSource == NULL) { 206 SL_LOGE("AudioSfDecoder::onPrepare(): Error opening %s", mDataLocator.uriRef); 207 notifyPrepared(MEDIA_ERROR_BASE); 208 return; 209 } 210 break; 211 212 case kDataLocatorFd: 213 { 214 dataSource = new FileSource( 215 mDataLocator.fdi.fd, mDataLocator.fdi.offset, mDataLocator.fdi.length); 216 status_t err = dataSource->initCheck(); 217 if (err != OK) { 218 notifyPrepared(err); 219 return; 220 } 221 break; 222 } 223 224 default: 225 TRESPASS(); 226 } 227 228 //--------------------------------- 229 // Instanciate and initialize the decoder attached to the data source 230 sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource); 231 if (extractor == NULL) { 232 SL_LOGE("AudioSfDecoder::onPrepare: Could not instantiate extractor."); 233 notifyPrepared(ERROR_UNSUPPORTED); 234 return; 235 } 236 237 ssize_t audioTrackIndex = -1; 238 bool isRawAudio = false; 239 for (size_t i = 0; i < extractor->countTracks(); ++i) { 240 sp<MetaData> meta = extractor->getTrackMetaData(i); 241 242 const char *mime; 243 CHECK(meta->findCString(kKeyMIMEType, &mime)); 244 245 if (!strncasecmp("audio/", mime, 6)) { 246 audioTrackIndex = i; 247 248 if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime)) { 249 isRawAudio = true; 250 } 251 break; 252 } 253 } 254 255 if (audioTrackIndex < 0) { 256 SL_LOGE("AudioSfDecoder::onPrepare: Could not find a supported audio track."); 257 notifyPrepared(ERROR_UNSUPPORTED); 258 return; 259 } 260 261 sp<MediaSource> source = extractor->getTrack(audioTrackIndex); 262 sp<MetaData> meta = source->getFormat(); 263 264 // we can't trust the OMXCodec (if there is one) to issue a INFO_FORMAT_CHANGED so we want 265 // to have some meaningful values as soon as possible. 266 bool hasChannelCount = meta->findInt32(kKeyChannelCount, &mChannelCount); 267 int32_t sr; 268 bool hasSampleRate = meta->findInt32(kKeySampleRate, &sr); 269 if (hasSampleRate) { 270 mSampleRateHz = (uint32_t) sr; 271 } 272 273 off64_t size; 274 int64_t durationUs; 275 if (dataSource->getSize(&size) == OK 276 && meta->findInt64(kKeyDuration, &durationUs)) { 277 if (durationUs != 0) { 278 mBitrate = size * 8000000ll / durationUs; // in bits/sec 279 } else { 280 mBitrate = -1; 281 } 282 mDurationUsec = durationUs; 283 mDurationMsec = durationUs / 1000; 284 } else { 285 mBitrate = -1; 286 mDurationUsec = ANDROID_UNKNOWN_TIME; 287 mDurationMsec = ANDROID_UNKNOWN_TIME; 288 } 289 290 // the audio content is not raw PCM, so we need a decoder 291 if (!isRawAudio) { 292 OMXClient client; 293 CHECK_EQ(client.connect(), (status_t)OK); 294 295 source = OMXCodec::Create( 296 client.interface(), meta, false /* createEncoder */, 297 source); 298 299 if (source == NULL) { 300 SL_LOGE("AudioSfDecoder::onPrepare: Could not instantiate decoder."); 301 notifyPrepared(ERROR_UNSUPPORTED); 302 return; 303 } 304 305 meta = source->getFormat(); 306 } 307 308 309 if (source->start() != OK) { 310 SL_LOGE("AudioSfDecoder::onPrepare: Failed to start source/decoder."); 311 notifyPrepared(MEDIA_ERROR_BASE); 312 return; 313 } 314 315 //--------------------------------- 316 // The data source, and audio source (a decoder if required) are ready to be used 317 mDataSource = dataSource; 318 mAudioSource = source; 319 mAudioSourceStarted = true; 320 321 if (!hasChannelCount) { 322 CHECK(meta->findInt32(kKeyChannelCount, &mChannelCount)); 323 } 324 325 if (!hasSampleRate) { 326 CHECK(meta->findInt32(kKeySampleRate, &sr)); 327 mSampleRateHz = (uint32_t) sr; 328 } 329 // FIXME add code below once channel mask support is in, currently initialized to default 330 // if (meta->findInt32(kKeyChannelMask, &mChannelMask)) { 331 // mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] = mChannelMask; 332 // } 333 334 if (!wantPrefetch()) { 335 SL_LOGV("AudioSfDecoder::onPrepare: no need to prefetch"); 336 // doesn't need prefetching, notify good to go 337 mCacheStatus = kStatusHigh; 338 mCacheFill = 1000; 339 notifyStatus(); 340 notifyCacheFill(); 341 } 342 343 { 344 android::Mutex::Autolock autoLock(mPcmFormatLock); 345 mPcmFormatKeyCount = NB_PCMMETADATA_KEYS; 346 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLESPERSEC] = mSampleRateHz; 347 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = mChannelCount; 348 } 349 350 // at this point we have enough information about the source to create the sink that 351 // will consume the data 352 createAudioSink(); 353 354 // signal successful completion of prepare 355 mStateFlags |= kFlagPrepared; 356 357 GenericPlayer::onPrepare(); 358 SL_LOGD("AudioSfDecoder::onPrepare() done, mStateFlags=0x%x", mStateFlags); 359} 360 361 362void AudioSfDecoder::onPause() { 363 SL_LOGV("AudioSfDecoder::onPause()"); 364 GenericPlayer::onPause(); 365 pauseAudioSink(); 366} 367 368 369void AudioSfDecoder::onPlay() { 370 SL_LOGV("AudioSfDecoder::onPlay()"); 371 GenericPlayer::onPlay(); 372 startAudioSink(); 373} 374 375 376void AudioSfDecoder::onSeek(const sp<AMessage> &msg) { 377 SL_LOGV("AudioSfDecoder::onSeek"); 378 int64_t timeMsec; 379 CHECK(msg->findInt64(WHATPARAM_SEEK_SEEKTIME_MS, &timeMsec)); 380 381 Mutex::Autolock _l(mTimeLock); 382 mStateFlags |= kFlagSeeking; 383 mSeekTimeMsec = timeMsec; 384 mLastDecodedPositionUs = ANDROID_UNKNOWN_TIME; 385} 386 387 388void AudioSfDecoder::onLoop(const sp<AMessage> &msg) { 389 SL_LOGV("AudioSfDecoder::onLoop"); 390 int32_t loop; 391 CHECK(msg->findInt32(WHATPARAM_LOOP_LOOPING, &loop)); 392 393 if (loop) { 394 //SL_LOGV("AudioSfDecoder::onLoop start looping"); 395 mStateFlags |= kFlagLooping; 396 } else { 397 //SL_LOGV("AudioSfDecoder::onLoop stop looping"); 398 mStateFlags &= ~kFlagLooping; 399 } 400} 401 402 403void AudioSfDecoder::onCheckCache(const sp<AMessage> &msg) { 404 //SL_LOGV("AudioSfDecoder::onCheckCache"); 405 bool eos; 406 CacheStatus_t status = getCacheRemaining(&eos); 407 408 if (eos || status == kStatusHigh 409 || ((mStateFlags & kFlagPreparing) && (status >= kStatusEnough))) { 410 if (mStateFlags & kFlagPlaying) { 411 startAudioSink(); 412 } 413 mStateFlags &= ~kFlagBuffering; 414 415 SL_LOGV("AudioSfDecoder::onCheckCache: buffering done."); 416 417 if (mStateFlags & kFlagPreparing) { 418 //SL_LOGV("AudioSfDecoder::onCheckCache: preparation done."); 419 mStateFlags &= ~kFlagPreparing; 420 } 421 422 if (mStateFlags & kFlagPlaying) { 423 (new AMessage(kWhatDecode, id()))->post(); 424 } 425 return; 426 } 427 428 msg->post(100000); 429} 430 431 432void AudioSfDecoder::onDecode() { 433 SL_LOGV("AudioSfDecoder::onDecode"); 434 435 //-------------------------------- Need to buffer some more before decoding? 436 bool eos; 437 if (mDataSource == 0) { 438 // application set play state to paused which failed, then set play state to playing 439 return; 440 } 441 442 if (wantPrefetch() 443 && (getCacheRemaining(&eos) == kStatusLow) 444 && !eos) { 445 SL_LOGV("buffering more."); 446 447 if (mStateFlags & kFlagPlaying) { 448 pauseAudioSink(); 449 } 450 mStateFlags |= kFlagBuffering; 451 (new AMessage(kWhatCheckCache, id()))->post(100000); 452 return; 453 } 454 455 if (!(mStateFlags & (kFlagPlaying | kFlagBuffering | kFlagPreparing))) { 456 // don't decode if we're not buffering, prefetching or playing 457 //SL_LOGV("don't decode: not buffering, prefetching or playing"); 458 return; 459 } 460 461 //-------------------------------- Decode 462 status_t err; 463 MediaSource::ReadOptions readOptions; 464 if (mStateFlags & kFlagSeeking) { 465 assert(mSeekTimeMsec != ANDROID_UNKNOWN_TIME); 466 readOptions.setSeekTo(mSeekTimeMsec * 1000); 467 } 468 469 int64_t timeUsec = ANDROID_UNKNOWN_TIME; 470 { 471 Mutex::Autolock _l(mBufferSourceLock); 472 473 if (NULL != mDecodeBuffer) { 474 // the current decoded buffer hasn't been rendered, drop it 475 mDecodeBuffer->release(); 476 mDecodeBuffer = NULL; 477 } 478 if(!mAudioSourceStarted) { 479 return; 480 } 481 err = mAudioSource->read(&mDecodeBuffer, &readOptions); 482 if (err == OK) { 483 CHECK(mDecodeBuffer->meta_data()->findInt64(kKeyTime, &timeUsec)); 484 } 485 } 486 487 { 488 Mutex::Autolock _l(mTimeLock); 489 if (mStateFlags & kFlagSeeking) { 490 mStateFlags &= ~kFlagSeeking; 491 mSeekTimeMsec = ANDROID_UNKNOWN_TIME; 492 } 493 if (timeUsec != ANDROID_UNKNOWN_TIME) { 494 mLastDecodedPositionUs = timeUsec; 495 } 496 } 497 498 //-------------------------------- Handle return of decode 499 if (err != OK) { 500 bool continueDecoding = false; 501 switch(err) { 502 case ERROR_END_OF_STREAM: 503 if (0 < mDurationUsec) { 504 Mutex::Autolock _l(mTimeLock); 505 mLastDecodedPositionUs = mDurationUsec; 506 } 507 // handle notification and looping at end of stream 508 if (mStateFlags & kFlagPlaying) { 509 notify(PLAYEREVENT_ENDOFSTREAM, 1, true); 510 } 511 if (mStateFlags & kFlagLooping) { 512 seek(0); 513 // kick-off decoding again 514 continueDecoding = true; 515 } 516 break; 517 case INFO_FORMAT_CHANGED: 518 SL_LOGD("MediaSource::read encountered INFO_FORMAT_CHANGED"); 519 // reconfigure output 520 { 521 Mutex::Autolock _l(mBufferSourceLock); 522 hasNewDecodeParams(); 523 } 524 continueDecoding = true; 525 break; 526 case INFO_DISCONTINUITY: 527 SL_LOGD("MediaSource::read encountered INFO_DISCONTINUITY"); 528 continueDecoding = true; 529 break; 530 default: 531 SL_LOGE("MediaSource::read returned error %d", err); 532 break; 533 } 534 if (continueDecoding) { 535 if (NULL == mDecodeBuffer) { 536 (new AMessage(kWhatDecode, id()))->post(); 537 return; 538 } 539 } else { 540 return; 541 } 542 } 543 544 //-------------------------------- Render 545 sp<AMessage> msg = new AMessage(kWhatRender, id()); 546 msg->post(); 547} 548 549 550void AudioSfDecoder::onRender() { 551 //SL_LOGV("AudioSfDecoder::onRender"); 552 553 Mutex::Autolock _l(mBufferSourceLock); 554 555 if (NULL == mDecodeBuffer) { 556 // nothing to render, move along 557 SL_LOGV("AudioSfDecoder::onRender NULL buffer, exiting"); 558 return; 559 } 560 561 mDecodeBuffer->release(); 562 mDecodeBuffer = NULL; 563 564} 565 566 567void AudioSfDecoder::onMessageReceived(const sp<AMessage> &msg) { 568 switch (msg->what()) { 569 case kWhatPrepare: 570 onPrepare(); 571 break; 572 573 case kWhatDecode: 574 onDecode(); 575 break; 576 577 case kWhatRender: 578 onRender(); 579 break; 580 581 case kWhatCheckCache: 582 onCheckCache(msg); 583 break; 584 585 case kWhatNotif: 586 onNotify(msg); 587 break; 588 589 case kWhatPlay: 590 onPlay(); 591 break; 592 593 case kWhatPause: 594 onPause(); 595 break; 596 597/* 598 case kWhatSeek: 599 onSeek(msg); 600 break; 601 602 case kWhatLoop: 603 onLoop(msg); 604 break; 605*/ 606 default: 607 GenericPlayer::onMessageReceived(msg); 608 break; 609 } 610} 611 612//-------------------------------------------------- 613// Prepared state, prefetch status notifications 614void AudioSfDecoder::notifyPrepared(status_t prepareRes) { 615 assert(!(mStateFlags & (kFlagPrepared | kFlagPreparedUnsuccessfully))); 616 if (NO_ERROR == prepareRes) { 617 // The "then" fork is not currently used, but is kept here to make it easier 618 // to replace by a new signalPrepareCompletion(status) if we re-visit this later. 619 mStateFlags |= kFlagPrepared; 620 } else { 621 mStateFlags |= kFlagPreparedUnsuccessfully; 622 } 623 // Do not call the superclass onPrepare to notify, because it uses a default error 624 // status code but we can provide a more specific one. 625 // GenericPlayer::onPrepare(); 626 notify(PLAYEREVENT_PREPARED, (int32_t)prepareRes, true); 627 SL_LOGD("AudioSfDecoder::onPrepare() done, mStateFlags=0x%x", mStateFlags); 628} 629 630 631void AudioSfDecoder::onNotify(const sp<AMessage> &msg) { 632 notif_cbf_t notifyClient; 633 void* notifyUser; 634 { 635 android::Mutex::Autolock autoLock(mNotifyClientLock); 636 if (NULL == mNotifyClient) { 637 return; 638 } else { 639 notifyClient = mNotifyClient; 640 notifyUser = mNotifyUser; 641 } 642 } 643 int32_t val; 644 if (msg->findInt32(PLAYEREVENT_PREFETCHSTATUSCHANGE, &val)) { 645 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_PREFETCHSTATUSCHANGE, val); 646 notifyClient(kEventPrefetchStatusChange, val, 0, notifyUser); 647 } 648 else if (msg->findInt32(PLAYEREVENT_PREFETCHFILLLEVELUPDATE, &val)) { 649 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_PREFETCHFILLLEVELUPDATE, val); 650 notifyClient(kEventPrefetchFillLevelUpdate, val, 0, notifyUser); 651 } 652 else if (msg->findInt32(PLAYEREVENT_ENDOFSTREAM, &val)) { 653 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_ENDOFSTREAM, val); 654 notifyClient(kEventEndOfStream, val, 0, notifyUser); 655 } 656 else { 657 GenericPlayer::onNotify(msg); 658 } 659} 660 661 662//-------------------------------------------------- 663// Private utility functions 664 665bool AudioSfDecoder::wantPrefetch() { 666 if (mDataSource != 0) { 667 return (mDataSource->flags() & DataSource::kWantsPrefetching); 668 } else { 669 // happens if an improper data locator was passed, if the media extractor couldn't be 670 // initialized, if there is no audio track in the media, if the OMX decoder couldn't be 671 // instantiated, if the source couldn't be opened, or if the MediaSource 672 // couldn't be started 673 SL_LOGV("AudioSfDecoder::wantPrefetch() tries to access NULL mDataSource"); 674 return false; 675 } 676} 677 678 679int64_t AudioSfDecoder::getPositionUsec() { 680 Mutex::Autolock _l(mTimeLock); 681 if (mStateFlags & kFlagSeeking) { 682 return mSeekTimeMsec * 1000; 683 } else { 684 if (mLastDecodedPositionUs < 0) { 685 return ANDROID_UNKNOWN_TIME; 686 } else { 687 return mLastDecodedPositionUs; 688 } 689 } 690} 691 692 693CacheStatus_t AudioSfDecoder::getCacheRemaining(bool *eos) { 694 sp<NuCachedSource2> cachedSource = 695 static_cast<NuCachedSource2 *>(mDataSource.get()); 696 697 CacheStatus_t oldStatus = mCacheStatus; 698 699 status_t finalStatus; 700 size_t dataRemaining = cachedSource->approxDataRemaining(&finalStatus); 701 *eos = (finalStatus != OK); 702 703 CHECK_GE(mBitrate, 0); 704 705 int64_t dataRemainingUs = dataRemaining * 8000000ll / mBitrate; 706 //SL_LOGV("AudioSfDecoder::getCacheRemaining: approx %.2f secs remaining (eos=%d)", 707 // dataRemainingUs / 1E6, *eos); 708 709 if (*eos) { 710 // data is buffered up to the end of the stream, it can't get any better than this 711 mCacheStatus = kStatusHigh; 712 mCacheFill = 1000; 713 714 } else { 715 if (mDurationUsec > 0) { 716 // known duration: 717 718 // fill level is ratio of how much has been played + how much is 719 // cached, divided by total duration 720 uint32_t currentPositionUsec = getPositionUsec(); 721 if (currentPositionUsec == ANDROID_UNKNOWN_TIME) { 722 // if we don't know where we are, assume the worst for the fill ratio 723 currentPositionUsec = 0; 724 } 725 if (mDurationUsec > 0) { 726 mCacheFill = (int16_t) ((1000.0 727 * (double)(currentPositionUsec + dataRemainingUs) / mDurationUsec)); 728 } else { 729 mCacheFill = 0; 730 } 731 //SL_LOGV("cacheFill = %d", mCacheFill); 732 733 // cache status is evaluated against duration thresholds 734 if (dataRemainingUs > DURATION_CACHED_HIGH_MS*1000) { 735 mCacheStatus = kStatusHigh; 736 //LOGV("high"); 737 } else if (dataRemainingUs > DURATION_CACHED_MED_MS*1000) { 738 //LOGV("enough"); 739 mCacheStatus = kStatusEnough; 740 } else if (dataRemainingUs < DURATION_CACHED_LOW_MS*1000) { 741 //LOGV("low"); 742 mCacheStatus = kStatusLow; 743 } else { 744 mCacheStatus = kStatusIntermediate; 745 } 746 747 } else { 748 // unknown duration: 749 750 // cache status is evaluated against cache amount thresholds 751 // (no duration so we don't have the bitrate either, could be derived from format?) 752 if (dataRemaining > SIZE_CACHED_HIGH_BYTES) { 753 mCacheStatus = kStatusHigh; 754 } else if (dataRemaining > SIZE_CACHED_MED_BYTES) { 755 mCacheStatus = kStatusEnough; 756 } else if (dataRemaining < SIZE_CACHED_LOW_BYTES) { 757 mCacheStatus = kStatusLow; 758 } else { 759 mCacheStatus = kStatusIntermediate; 760 } 761 } 762 763 } 764 765 if (oldStatus != mCacheStatus) { 766 notifyStatus(); 767 } 768 769 if (abs(mCacheFill - mLastNotifiedCacheFill) > mCacheFillNotifThreshold) { 770 notifyCacheFill(); 771 } 772 773 return mCacheStatus; 774} 775 776 777void AudioSfDecoder::hasNewDecodeParams() { 778 779 if ((mAudioSource != 0) && mAudioSourceStarted) { 780 sp<MetaData> meta = mAudioSource->getFormat(); 781 782 SL_LOGV("old sample rate = %d, channel count = %d", mSampleRateHz, mChannelCount); 783 784 CHECK(meta->findInt32(kKeyChannelCount, &mChannelCount)); 785 int32_t sr; 786 CHECK(meta->findInt32(kKeySampleRate, &sr)); 787 mSampleRateHz = (uint32_t) sr; 788 SL_LOGV("format changed: new sample rate = %d, channel count = %d", 789 mSampleRateHz, mChannelCount); 790 791 { 792 android::Mutex::Autolock autoLock(mPcmFormatLock); 793 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = mChannelCount; 794 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLESPERSEC] = mSampleRateHz; 795 } 796 } 797 798 // alert users of those params 799 updateAudioSink(); 800} 801 802} // namespace android 803