android_AudioSfDecoder.cpp revision 0f92f48017588949daf7d24a339423e149bb2555
1/* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define USE_LOG SLAndroidLogLevel_Verbose 18 19#include "sles_allinclusive.h" 20#include "android/android_AudioSfDecoder.h" 21 22#include <media/stagefright/foundation/ADebug.h> 23 24 25#define SIZE_CACHED_HIGH_BYTES 1000000 26#define SIZE_CACHED_MED_BYTES 700000 27#define SIZE_CACHED_LOW_BYTES 400000 28 29namespace android { 30 31//-------------------------------------------------------------------------------------------------- 32AudioSfDecoder::AudioSfDecoder(const AudioPlayback_Parameters* params) : GenericPlayer(params), 33 mDataSource(0), 34 mAudioSource(0), 35 mAudioSourceStarted(false), 36 mBitrate(-1), 37 mChannelMask(UNKNOWN_CHANNELMASK), 38 mDurationUsec(ANDROID_UNKNOWN_TIME), 39 mDecodeBuffer(NULL), 40 mSeekTimeMsec(0), 41 mLastDecodedPositionUs(ANDROID_UNKNOWN_TIME) 42{ 43 SL_LOGD("AudioSfDecoder::AudioSfDecoder()"); 44} 45 46 47AudioSfDecoder::~AudioSfDecoder() { 48 SL_LOGD("AudioSfDecoder::~AudioSfDecoder()"); 49} 50 51 52void AudioSfDecoder::preDestroy() { 53 GenericPlayer::preDestroy(); 54 SL_LOGD("AudioSfDecoder::preDestroy()"); 55 { 56 Mutex::Autolock _l(mBufferSourceLock); 57 58 if (NULL != mDecodeBuffer) { 59 mDecodeBuffer->release(); 60 mDecodeBuffer = NULL; 61 } 62 63 if ((mAudioSource != 0) && mAudioSourceStarted) { 64 mAudioSource->stop(); 65 mAudioSourceStarted = false; 66 } 67 } 68} 69 70 71//-------------------------------------------------- 72void AudioSfDecoder::play() { 73 SL_LOGD("AudioSfDecoder::play"); 74 75 GenericPlayer::play(); 76 (new AMessage(kWhatDecode, id()))->post(); 77} 78 79 80void AudioSfDecoder::getPositionMsec(int* msec) { 81 int64_t timeUsec = getPositionUsec(); 82 if (timeUsec == ANDROID_UNKNOWN_TIME) { 83 *msec = ANDROID_UNKNOWN_TIME; 84 } else { 85 *msec = timeUsec / 1000; 86 } 87} 88 89 90void AudioSfDecoder::startPrefetch_async() { 91 SL_LOGV("AudioSfDecoder::startPrefetch_async()"); 92 93 if (wantPrefetch()) { 94 SL_LOGV("AudioSfDecoder::startPrefetch_async(): sending check cache msg"); 95 96 mStateFlags |= kFlagPreparing | kFlagBuffering; 97 98 (new AMessage(kWhatCheckCache, id()))->post(); 99 } 100} 101 102 103//-------------------------------------------------- 104uint32_t AudioSfDecoder::getPcmFormatKeyCount() const { 105 return NB_PCMMETADATA_KEYS; 106} 107 108 109//-------------------------------------------------- 110bool AudioSfDecoder::getPcmFormatKeySize(uint32_t index, uint32_t* pKeySize) { 111 if (index >= NB_PCMMETADATA_KEYS) { 112 return false; 113 } else { 114 *pKeySize = strlen(kPcmDecodeMetadataKeys[index]) +1; 115 return true; 116 } 117} 118 119 120//-------------------------------------------------- 121bool AudioSfDecoder::getPcmFormatKeyName(uint32_t index, uint32_t keySize, char* keyName) { 122 uint32_t actualKeySize; 123 if (!getPcmFormatKeySize(index, &actualKeySize)) { 124 return false; 125 } 126 if (keySize < actualKeySize) { 127 return false; 128 } 129 strncpy(keyName, kPcmDecodeMetadataKeys[index], actualKeySize); 130 return true; 131} 132 133 134//-------------------------------------------------- 135bool AudioSfDecoder::getPcmFormatValueSize(uint32_t index, uint32_t* pValueSize) { 136 if (index >= NB_PCMMETADATA_KEYS) { 137 *pValueSize = 0; 138 return false; 139 } else { 140 *pValueSize = sizeof(uint32_t); 141 return true; 142 } 143} 144 145 146//-------------------------------------------------- 147bool AudioSfDecoder::getPcmFormatKeyValue(uint32_t index, uint32_t size, uint32_t* pValue) { 148 uint32_t valueSize = 0; 149 if (!getPcmFormatValueSize(index, &valueSize)) { 150 return false; 151 } else if (size != valueSize) { 152 // this ensures we are accessing mPcmFormatValues with a valid size for that index 153 SL_LOGE("Error retrieving metadata value at index %d: using size of %d, should be %d", 154 index, size, valueSize); 155 return false; 156 } else { 157 android::Mutex::Autolock autoLock(mPcmFormatLock); 158 *pValue = mPcmFormatValues[index]; 159 return true; 160 } 161} 162 163 164//-------------------------------------------------- 165// Event handlers 166// it is strictly verboten to call those methods outside of the event loop 167 168// Initializes the data and audio sources, and update the PCM format info 169// post-condition: upon successful initialization based on the player data locator 170// GenericPlayer::onPrepare() was called 171// mDataSource != 0 172// mAudioSource != 0 173// mAudioSourceStarted == true 174// All error returns from this method are via notifyPrepared(status) followed by "return". 175void AudioSfDecoder::onPrepare() { 176 SL_LOGD("AudioSfDecoder::onPrepare()"); 177 Mutex::Autolock _l(mBufferSourceLock); 178 179 { 180 android::Mutex::Autolock autoLock(mPcmFormatLock); 181 // Initialize the PCM format info with the known parameters before the start of the decode 182 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_BITSPERSAMPLE] = SL_PCMSAMPLEFORMAT_FIXED_16; 183 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CONTAINERSIZE] = 16; 184 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_ENDIANNESS] = SL_BYTEORDER_LITTLEENDIAN; 185 // initialization with the default values: they will be replaced by the actual values 186 // once the decoder has figured them out 187 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = mChannelCount; 188 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE] = mSampleRateHz; 189 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] = mChannelMask; 190 } 191 192 //--------------------------------- 193 // Instantiate and initialize the data source for the decoder 194 sp<DataSource> dataSource; 195 196 switch (mDataLocatorType) { 197 198 case kDataLocatorNone: 199 SL_LOGE("AudioSfDecoder::onPrepare: no data locator set"); 200 notifyPrepared(MEDIA_ERROR_BASE); 201 return; 202 203 case kDataLocatorUri: 204 dataSource = DataSource::CreateFromURI(mDataLocator.uriRef); 205 if (dataSource == NULL) { 206 SL_LOGE("AudioSfDecoder::onPrepare(): Error opening %s", mDataLocator.uriRef); 207 notifyPrepared(MEDIA_ERROR_BASE); 208 return; 209 } 210 break; 211 212 case kDataLocatorFd: 213 { 214 // As FileSource unconditionally takes ownership of the fd and closes it, then 215 // we have to make a dup for FileSource if the app wants to keep ownership itself 216 int fd = mDataLocator.fdi.fd; 217 if (mDataLocator.fdi.mCloseAfterUse) { 218 mDataLocator.fdi.mCloseAfterUse = false; 219 } else { 220 fd = ::dup(fd); 221 } 222 dataSource = new FileSource(fd, mDataLocator.fdi.offset, mDataLocator.fdi.length); 223 status_t err = dataSource->initCheck(); 224 if (err != OK) { 225 notifyPrepared(err); 226 return; 227 } 228 break; 229 } 230 231 default: 232 TRESPASS(); 233 } 234 235 //--------------------------------- 236 // Instanciate and initialize the decoder attached to the data source 237 sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource); 238 if (extractor == NULL) { 239 SL_LOGE("AudioSfDecoder::onPrepare: Could not instantiate extractor."); 240 notifyPrepared(ERROR_UNSUPPORTED); 241 return; 242 } 243 244 ssize_t audioTrackIndex = -1; 245 bool isRawAudio = false; 246 for (size_t i = 0; i < extractor->countTracks(); ++i) { 247 sp<MetaData> meta = extractor->getTrackMetaData(i); 248 249 const char *mime; 250 CHECK(meta->findCString(kKeyMIMEType, &mime)); 251 252 if (!strncasecmp("audio/", mime, 6)) { 253 audioTrackIndex = i; 254 255 if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime)) { 256 isRawAudio = true; 257 } 258 break; 259 } 260 } 261 262 if (audioTrackIndex < 0) { 263 SL_LOGE("AudioSfDecoder::onPrepare: Could not find a supported audio track."); 264 notifyPrepared(ERROR_UNSUPPORTED); 265 return; 266 } 267 268 sp<MediaSource> source = extractor->getTrack(audioTrackIndex); 269 sp<MetaData> meta = source->getFormat(); 270 271 // we can't trust the OMXCodec (if there is one) to issue a INFO_FORMAT_CHANGED so we want 272 // to have some meaningful values as soon as possible. 273 bool hasChannelCount = meta->findInt32(kKeyChannelCount, &mChannelCount); 274 int32_t sr; 275 bool hasSampleRate = meta->findInt32(kKeySampleRate, &sr); 276 if (hasSampleRate) { 277 mSampleRateHz = (uint32_t) sr; 278 } 279 280 off64_t size; 281 int64_t durationUs; 282 if (dataSource->getSize(&size) == OK 283 && meta->findInt64(kKeyDuration, &durationUs)) { 284 if (durationUs != 0) { 285 mBitrate = size * 8000000ll / durationUs; // in bits/sec 286 } else { 287 mBitrate = -1; 288 } 289 mDurationUsec = durationUs; 290 mDurationMsec = durationUs / 1000; 291 } else { 292 mBitrate = -1; 293 mDurationUsec = ANDROID_UNKNOWN_TIME; 294 mDurationMsec = ANDROID_UNKNOWN_TIME; 295 } 296 297 // the audio content is not raw PCM, so we need a decoder 298 if (!isRawAudio) { 299 OMXClient client; 300 CHECK_EQ(client.connect(), (status_t)OK); 301 302 source = OMXCodec::Create( 303 client.interface(), meta, false /* createEncoder */, 304 source); 305 306 if (source == NULL) { 307 SL_LOGE("AudioSfDecoder::onPrepare: Could not instantiate decoder."); 308 notifyPrepared(ERROR_UNSUPPORTED); 309 return; 310 } 311 312 meta = source->getFormat(); 313 } 314 315 316 if (source->start() != OK) { 317 SL_LOGE("AudioSfDecoder::onPrepare: Failed to start source/decoder."); 318 notifyPrepared(MEDIA_ERROR_BASE); 319 return; 320 } 321 322 //--------------------------------- 323 // The data source, and audio source (a decoder if required) are ready to be used 324 mDataSource = dataSource; 325 mAudioSource = source; 326 mAudioSourceStarted = true; 327 328 if (!hasChannelCount) { 329 CHECK(meta->findInt32(kKeyChannelCount, &mChannelCount)); 330 } 331 332 if (!hasSampleRate) { 333 CHECK(meta->findInt32(kKeySampleRate, &sr)); 334 mSampleRateHz = (uint32_t) sr; 335 } 336 // FIXME add code below once channel mask support is in, currently initialized to default 337 // value computed from the channel count 338 // if (!hasChannelMask) { 339 // CHECK(meta->findInt32(kKeyChannelMask, &mChannelMask)); 340 // } 341 mChannelMask = channelCountToMask(mChannelCount); 342 343 if (!wantPrefetch()) { 344 SL_LOGV("AudioSfDecoder::onPrepare: no need to prefetch"); 345 // doesn't need prefetching, notify good to go 346 mCacheStatus = kStatusHigh; 347 mCacheFill = 1000; 348 notifyStatus(); 349 notifyCacheFill(); 350 } 351 352 { 353 android::Mutex::Autolock autoLock(mPcmFormatLock); 354 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE] = mSampleRateHz; 355 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = mChannelCount; 356 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] = mChannelMask; 357 } 358 359 // at this point we have enough information about the source to create the sink that 360 // will consume the data 361 createAudioSink(); 362 363 // signal successful completion of prepare 364 mStateFlags |= kFlagPrepared; 365 366 GenericPlayer::onPrepare(); 367 SL_LOGD("AudioSfDecoder::onPrepare() done, mStateFlags=0x%x", mStateFlags); 368} 369 370 371void AudioSfDecoder::onPause() { 372 SL_LOGV("AudioSfDecoder::onPause()"); 373 GenericPlayer::onPause(); 374 pauseAudioSink(); 375} 376 377 378void AudioSfDecoder::onPlay() { 379 SL_LOGV("AudioSfDecoder::onPlay()"); 380 GenericPlayer::onPlay(); 381 startAudioSink(); 382} 383 384 385void AudioSfDecoder::onSeek(const sp<AMessage> &msg) { 386 SL_LOGV("AudioSfDecoder::onSeek"); 387 int64_t timeMsec; 388 CHECK(msg->findInt64(WHATPARAM_SEEK_SEEKTIME_MS, &timeMsec)); 389 390 Mutex::Autolock _l(mTimeLock); 391 mStateFlags |= kFlagSeeking; 392 mSeekTimeMsec = timeMsec; 393 mLastDecodedPositionUs = ANDROID_UNKNOWN_TIME; 394} 395 396 397void AudioSfDecoder::onLoop(const sp<AMessage> &msg) { 398 SL_LOGV("AudioSfDecoder::onLoop"); 399 int32_t loop; 400 CHECK(msg->findInt32(WHATPARAM_LOOP_LOOPING, &loop)); 401 402 if (loop) { 403 //SL_LOGV("AudioSfDecoder::onLoop start looping"); 404 mStateFlags |= kFlagLooping; 405 } else { 406 //SL_LOGV("AudioSfDecoder::onLoop stop looping"); 407 mStateFlags &= ~kFlagLooping; 408 } 409} 410 411 412void AudioSfDecoder::onCheckCache(const sp<AMessage> &msg) { 413 //SL_LOGV("AudioSfDecoder::onCheckCache"); 414 bool eos; 415 CacheStatus_t status = getCacheRemaining(&eos); 416 417 if (eos || status == kStatusHigh 418 || ((mStateFlags & kFlagPreparing) && (status >= kStatusEnough))) { 419 if (mStateFlags & kFlagPlaying) { 420 startAudioSink(); 421 } 422 mStateFlags &= ~kFlagBuffering; 423 424 SL_LOGV("AudioSfDecoder::onCheckCache: buffering done."); 425 426 if (mStateFlags & kFlagPreparing) { 427 //SL_LOGV("AudioSfDecoder::onCheckCache: preparation done."); 428 mStateFlags &= ~kFlagPreparing; 429 } 430 431 if (mStateFlags & kFlagPlaying) { 432 (new AMessage(kWhatDecode, id()))->post(); 433 } 434 return; 435 } 436 437 msg->post(100000); 438} 439 440 441void AudioSfDecoder::onDecode() { 442 SL_LOGV("AudioSfDecoder::onDecode"); 443 444 //-------------------------------- Need to buffer some more before decoding? 445 bool eos; 446 if (mDataSource == 0) { 447 // application set play state to paused which failed, then set play state to playing 448 return; 449 } 450 451 if (wantPrefetch() 452 && (getCacheRemaining(&eos) == kStatusLow) 453 && !eos) { 454 SL_LOGV("buffering more."); 455 456 if (mStateFlags & kFlagPlaying) { 457 pauseAudioSink(); 458 } 459 mStateFlags |= kFlagBuffering; 460 (new AMessage(kWhatCheckCache, id()))->post(100000); 461 return; 462 } 463 464 if (!(mStateFlags & (kFlagPlaying | kFlagBuffering | kFlagPreparing))) { 465 // don't decode if we're not buffering, prefetching or playing 466 //SL_LOGV("don't decode: not buffering, prefetching or playing"); 467 return; 468 } 469 470 //-------------------------------- Decode 471 status_t err; 472 MediaSource::ReadOptions readOptions; 473 if (mStateFlags & kFlagSeeking) { 474 assert(mSeekTimeMsec != ANDROID_UNKNOWN_TIME); 475 readOptions.setSeekTo(mSeekTimeMsec * 1000); 476 } 477 478 int64_t timeUsec = ANDROID_UNKNOWN_TIME; 479 { 480 Mutex::Autolock _l(mBufferSourceLock); 481 482 if (NULL != mDecodeBuffer) { 483 // the current decoded buffer hasn't been rendered, drop it 484 mDecodeBuffer->release(); 485 mDecodeBuffer = NULL; 486 } 487 if(!mAudioSourceStarted) { 488 return; 489 } 490 err = mAudioSource->read(&mDecodeBuffer, &readOptions); 491 if (err == OK) { 492 CHECK(mDecodeBuffer->meta_data()->findInt64(kKeyTime, &timeUsec)); 493 } 494 } 495 496 { 497 Mutex::Autolock _l(mTimeLock); 498 if (mStateFlags & kFlagSeeking) { 499 mStateFlags &= ~kFlagSeeking; 500 mSeekTimeMsec = ANDROID_UNKNOWN_TIME; 501 } 502 if (timeUsec != ANDROID_UNKNOWN_TIME) { 503 mLastDecodedPositionUs = timeUsec; 504 } 505 } 506 507 //-------------------------------- Handle return of decode 508 if (err != OK) { 509 bool continueDecoding = false; 510 switch(err) { 511 case ERROR_END_OF_STREAM: 512 if (0 < mDurationUsec) { 513 Mutex::Autolock _l(mTimeLock); 514 mLastDecodedPositionUs = mDurationUsec; 515 } 516 // handle notification and looping at end of stream 517 if (mStateFlags & kFlagPlaying) { 518 notify(PLAYEREVENT_ENDOFSTREAM, 1, true); 519 } 520 if (mStateFlags & kFlagLooping) { 521 seek(0); 522 // kick-off decoding again 523 continueDecoding = true; 524 } 525 break; 526 case INFO_FORMAT_CHANGED: 527 SL_LOGD("MediaSource::read encountered INFO_FORMAT_CHANGED"); 528 // reconfigure output 529 { 530 Mutex::Autolock _l(mBufferSourceLock); 531 hasNewDecodeParams(); 532 } 533 continueDecoding = true; 534 break; 535 case INFO_DISCONTINUITY: 536 SL_LOGD("MediaSource::read encountered INFO_DISCONTINUITY"); 537 continueDecoding = true; 538 break; 539 default: 540 SL_LOGE("MediaSource::read returned error %d", err); 541 break; 542 } 543 if (continueDecoding) { 544 if (NULL == mDecodeBuffer) { 545 (new AMessage(kWhatDecode, id()))->post(); 546 return; 547 } 548 } else { 549 return; 550 } 551 } 552 553 //-------------------------------- Render 554 sp<AMessage> msg = new AMessage(kWhatRender, id()); 555 msg->post(); 556} 557 558 559void AudioSfDecoder::onRender() { 560 //SL_LOGV("AudioSfDecoder::onRender"); 561 562 Mutex::Autolock _l(mBufferSourceLock); 563 564 if (NULL == mDecodeBuffer) { 565 // nothing to render, move along 566 SL_LOGV("AudioSfDecoder::onRender NULL buffer, exiting"); 567 return; 568 } 569 570 mDecodeBuffer->release(); 571 mDecodeBuffer = NULL; 572 573} 574 575 576void AudioSfDecoder::onMessageReceived(const sp<AMessage> &msg) { 577 switch (msg->what()) { 578 case kWhatPrepare: 579 onPrepare(); 580 break; 581 582 case kWhatDecode: 583 onDecode(); 584 break; 585 586 case kWhatRender: 587 onRender(); 588 break; 589 590 case kWhatCheckCache: 591 onCheckCache(msg); 592 break; 593 594 case kWhatNotif: 595 onNotify(msg); 596 break; 597 598 case kWhatPlay: 599 onPlay(); 600 break; 601 602 case kWhatPause: 603 onPause(); 604 break; 605 606/* 607 case kWhatSeek: 608 onSeek(msg); 609 break; 610 611 case kWhatLoop: 612 onLoop(msg); 613 break; 614*/ 615 default: 616 GenericPlayer::onMessageReceived(msg); 617 break; 618 } 619} 620 621//-------------------------------------------------- 622// Prepared state, prefetch status notifications 623void AudioSfDecoder::notifyPrepared(status_t prepareRes) { 624 assert(!(mStateFlags & (kFlagPrepared | kFlagPreparedUnsuccessfully))); 625 if (NO_ERROR == prepareRes) { 626 // The "then" fork is not currently used, but is kept here to make it easier 627 // to replace by a new signalPrepareCompletion(status) if we re-visit this later. 628 mStateFlags |= kFlagPrepared; 629 } else { 630 mStateFlags |= kFlagPreparedUnsuccessfully; 631 } 632 // Do not call the superclass onPrepare to notify, because it uses a default error 633 // status code but we can provide a more specific one. 634 // GenericPlayer::onPrepare(); 635 notify(PLAYEREVENT_PREPARED, (int32_t)prepareRes, true); 636 SL_LOGD("AudioSfDecoder::onPrepare() done, mStateFlags=0x%x", mStateFlags); 637} 638 639 640void AudioSfDecoder::onNotify(const sp<AMessage> &msg) { 641 notif_cbf_t notifyClient; 642 void* notifyUser; 643 { 644 android::Mutex::Autolock autoLock(mNotifyClientLock); 645 if (NULL == mNotifyClient) { 646 return; 647 } else { 648 notifyClient = mNotifyClient; 649 notifyUser = mNotifyUser; 650 } 651 } 652 int32_t val; 653 if (msg->findInt32(PLAYEREVENT_PREFETCHSTATUSCHANGE, &val)) { 654 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_PREFETCHSTATUSCHANGE, val); 655 notifyClient(kEventPrefetchStatusChange, val, 0, notifyUser); 656 } 657 else if (msg->findInt32(PLAYEREVENT_PREFETCHFILLLEVELUPDATE, &val)) { 658 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_PREFETCHFILLLEVELUPDATE, val); 659 notifyClient(kEventPrefetchFillLevelUpdate, val, 0, notifyUser); 660 } 661 else if (msg->findInt32(PLAYEREVENT_ENDOFSTREAM, &val)) { 662 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_ENDOFSTREAM, val); 663 notifyClient(kEventEndOfStream, val, 0, notifyUser); 664 } 665 else { 666 GenericPlayer::onNotify(msg); 667 } 668} 669 670 671//-------------------------------------------------- 672// Private utility functions 673 674bool AudioSfDecoder::wantPrefetch() { 675 if (mDataSource != 0) { 676 return (mDataSource->flags() & DataSource::kWantsPrefetching); 677 } else { 678 // happens if an improper data locator was passed, if the media extractor couldn't be 679 // initialized, if there is no audio track in the media, if the OMX decoder couldn't be 680 // instantiated, if the source couldn't be opened, or if the MediaSource 681 // couldn't be started 682 SL_LOGV("AudioSfDecoder::wantPrefetch() tries to access NULL mDataSource"); 683 return false; 684 } 685} 686 687 688int64_t AudioSfDecoder::getPositionUsec() { 689 Mutex::Autolock _l(mTimeLock); 690 if (mStateFlags & kFlagSeeking) { 691 return mSeekTimeMsec * 1000; 692 } else { 693 if (mLastDecodedPositionUs < 0) { 694 return ANDROID_UNKNOWN_TIME; 695 } else { 696 return mLastDecodedPositionUs; 697 } 698 } 699} 700 701 702CacheStatus_t AudioSfDecoder::getCacheRemaining(bool *eos) { 703 sp<NuCachedSource2> cachedSource = 704 static_cast<NuCachedSource2 *>(mDataSource.get()); 705 706 CacheStatus_t oldStatus = mCacheStatus; 707 708 status_t finalStatus; 709 size_t dataRemaining = cachedSource->approxDataRemaining(&finalStatus); 710 *eos = (finalStatus != OK); 711 712 CHECK_GE(mBitrate, 0); 713 714 int64_t dataRemainingUs = dataRemaining * 8000000ll / mBitrate; 715 //SL_LOGV("AudioSfDecoder::getCacheRemaining: approx %.2f secs remaining (eos=%d)", 716 // dataRemainingUs / 1E6, *eos); 717 718 if (*eos) { 719 // data is buffered up to the end of the stream, it can't get any better than this 720 mCacheStatus = kStatusHigh; 721 mCacheFill = 1000; 722 723 } else { 724 if (mDurationUsec > 0) { 725 // known duration: 726 727 // fill level is ratio of how much has been played + how much is 728 // cached, divided by total duration 729 uint32_t currentPositionUsec = getPositionUsec(); 730 if (currentPositionUsec == ANDROID_UNKNOWN_TIME) { 731 // if we don't know where we are, assume the worst for the fill ratio 732 currentPositionUsec = 0; 733 } 734 if (mDurationUsec > 0) { 735 mCacheFill = (int16_t) ((1000.0 736 * (double)(currentPositionUsec + dataRemainingUs) / mDurationUsec)); 737 } else { 738 mCacheFill = 0; 739 } 740 //SL_LOGV("cacheFill = %d", mCacheFill); 741 742 // cache status is evaluated against duration thresholds 743 if (dataRemainingUs > DURATION_CACHED_HIGH_MS*1000) { 744 mCacheStatus = kStatusHigh; 745 //LOGV("high"); 746 } else if (dataRemainingUs > DURATION_CACHED_MED_MS*1000) { 747 //LOGV("enough"); 748 mCacheStatus = kStatusEnough; 749 } else if (dataRemainingUs < DURATION_CACHED_LOW_MS*1000) { 750 //LOGV("low"); 751 mCacheStatus = kStatusLow; 752 } else { 753 mCacheStatus = kStatusIntermediate; 754 } 755 756 } else { 757 // unknown duration: 758 759 // cache status is evaluated against cache amount thresholds 760 // (no duration so we don't have the bitrate either, could be derived from format?) 761 if (dataRemaining > SIZE_CACHED_HIGH_BYTES) { 762 mCacheStatus = kStatusHigh; 763 } else if (dataRemaining > SIZE_CACHED_MED_BYTES) { 764 mCacheStatus = kStatusEnough; 765 } else if (dataRemaining < SIZE_CACHED_LOW_BYTES) { 766 mCacheStatus = kStatusLow; 767 } else { 768 mCacheStatus = kStatusIntermediate; 769 } 770 } 771 772 } 773 774 if (oldStatus != mCacheStatus) { 775 notifyStatus(); 776 } 777 778 if (abs(mCacheFill - mLastNotifiedCacheFill) > mCacheFillNotifThreshold) { 779 notifyCacheFill(); 780 } 781 782 return mCacheStatus; 783} 784 785 786void AudioSfDecoder::hasNewDecodeParams() { 787 788 if ((mAudioSource != 0) && mAudioSourceStarted) { 789 sp<MetaData> meta = mAudioSource->getFormat(); 790 791 SL_LOGV("old sample rate = %d, channel count = %d", mSampleRateHz, mChannelCount); 792 793 CHECK(meta->findInt32(kKeyChannelCount, &mChannelCount)); 794 int32_t sr; 795 CHECK(meta->findInt32(kKeySampleRate, &sr)); 796 mSampleRateHz = (uint32_t) sr; 797 SL_LOGV("format changed: new sample rate = %d, channel count = %d", 798 mSampleRateHz, mChannelCount); 799 800 // FIXME similar to onPrepare() 801 mChannelMask = channelCountToMask(mChannelCount); 802 803 { 804 android::Mutex::Autolock autoLock(mPcmFormatLock); 805 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = mChannelCount; 806 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE] = mSampleRateHz; 807 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] = mChannelMask; 808 } 809 } 810 811 // alert users of those params 812 updateAudioSink(); 813} 814 815} // namespace android 816