android_AudioSfDecoder.cpp revision 2b06e20ae32388f6e1dfd088d9773c34e6b1cb45
1/* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define USE_LOG SLAndroidLogLevel_Verbose 18 19#include "sles_allinclusive.h" 20#include "android/android_AudioSfDecoder.h" 21 22namespace android { 23 24//-------------------------------------------------------------------------------------------------- 25AudioSfDecoder::AudioSfDecoder(const AudioPlayback_Parameters* params) : GenericPlayer(params), 26 mBitrate(-1), 27 mNumChannels(1), 28 mSampleRateHz(0), 29 mDurationUsec(-1), 30 mDecodeBuffer(NULL), 31 mCacheStatus(kStatusEmpty), 32 mCacheFill(0), 33 mLastNotifiedCacheFill(0), 34 mCacheFillNotifThreshold(100), 35 mTimeDelta(-1), 36 mSeekTimeMsec(0), 37 mLastDecodedPositionUs(-1) 38{ 39 SL_LOGV("AudioSfDecoder::AudioSfDecoder()"); 40 41} 42 43 44AudioSfDecoder::~AudioSfDecoder() { 45 SL_LOGV("AudioSfDecoder::~AudioSfDecoder()"); 46 47} 48 49 50//-------------------------------------------------- 51void AudioSfDecoder::play() { 52 SL_LOGV("AudioSfDecoder::play"); 53 54 GenericPlayer::play(); 55 (new AMessage(kWhatDecode, id()))->post(); 56} 57 58 59void AudioSfDecoder::startPrefetch_async() { 60 SL_LOGV("AudioSfDecoder::startPrefetch_async()"); 61 62 if (wantPrefetch()) { 63 SL_LOGV("AudioSfDecoder::startPrefetch_async(): sending check cache msg"); 64 65 mStateFlags |= kFlagPreparing | kFlagBuffering; 66 67 (new AMessage(kWhatCheckCache, id()))->post(); 68 } 69} 70 71 72//-------------------------------------------------- 73// Event handlers 74void AudioSfDecoder::onPrepare() { 75 SL_LOGD("AudioSfDecoder::onPrepare()"); 76 77 sp<DataSource> dataSource; 78 79 switch (mDataLocatorType) { 80 81 case kDataLocatorNone: 82 SL_LOGE("AudioSfDecoder::onPrepare: no data locator set"); 83 notifyPrepared(MEDIA_ERROR_BASE); 84 return; 85 86 case kDataLocatorUri: 87 dataSource = DataSource::CreateFromURI(mDataLocator.uriRef); 88 if (dataSource == NULL) { 89 SL_LOGE("AudioSfDecoder::onPrepare(): Error opening %s", mDataLocator.uriRef); 90 notifyPrepared(MEDIA_ERROR_BASE); 91 return; 92 } 93 break; 94 95 case kDataLocatorFd: 96 { 97 dataSource = new FileSource( 98 mDataLocator.fdi.fd, mDataLocator.fdi.offset, mDataLocator.fdi.length); 99 status_t err = dataSource->initCheck(); 100 if (err != OK) { 101 notifyPrepared(err); 102 return; 103 } 104 break; 105 } 106 107 default: 108 TRESPASS(); 109 } 110 111 sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource); 112 if (extractor == NULL) { 113 SL_LOGE("AudioSfDecoder::onPrepare: Could not instantiate extractor."); 114 notifyPrepared(ERROR_UNSUPPORTED); 115 return; 116 } 117 118 ssize_t audioTrackIndex = -1; 119 bool isRawAudio = false; 120 for (size_t i = 0; i < extractor->countTracks(); ++i) { 121 sp<MetaData> meta = extractor->getTrackMetaData(i); 122 123 const char *mime; 124 CHECK(meta->findCString(kKeyMIMEType, &mime)); 125 126 if (!strncasecmp("audio/", mime, 6)) { 127 audioTrackIndex = i; 128 129 if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime)) { 130 isRawAudio = true; 131 } 132 break; 133 } 134 } 135 136 if (audioTrackIndex < 0) { 137 SL_LOGE("AudioSfDecoder::onPrepare: Could not find a supported audio track."); 138 notifyPrepared(ERROR_UNSUPPORTED); 139 return; 140 } 141 142 sp<MediaSource> source = extractor->getTrack(audioTrackIndex); 143 sp<MetaData> meta = source->getFormat(); 144 145 off64_t size; 146 int64_t durationUs; 147 if (dataSource->getSize(&size) == OK 148 && meta->findInt64(kKeyDuration, &durationUs)) { 149 mBitrate = size * 8000000ll / durationUs; // in bits/sec 150 mDurationUsec = durationUs; 151 } else { 152 mBitrate = -1; 153 mDurationUsec = -1; 154 } 155 156 if (!isRawAudio) { 157 OMXClient client; 158 CHECK_EQ(client.connect(), (status_t)OK); 159 160 source = OMXCodec::Create( 161 client.interface(), meta, false /* createEncoder */, 162 source); 163 164 if (source == NULL) { 165 SL_LOGE("AudioSfDecoder::onPrepare: Could not instantiate decoder."); 166 notifyPrepared(ERROR_UNSUPPORTED); 167 return; 168 } 169 170 meta = source->getFormat(); 171 } 172 173 174 if (source->start() != OK) { 175 SL_LOGE("AudioSfDecoder::onPrepare: Failed to start source/decoder."); 176 notifyPrepared(MEDIA_ERROR_BASE); 177 return; 178 } 179 180 mDataSource = dataSource; 181 mAudioSource = source; 182 183 CHECK(meta->findInt32(kKeyChannelCount, &mNumChannels)); 184 CHECK(meta->findInt32(kKeySampleRate, &mSampleRateHz)); 185 186 if (!wantPrefetch()) { 187 SL_LOGV("AudioSfDecoder::onPrepare: no need to prefetch"); 188 // doesn't need prefetching, notify good to go 189 mCacheStatus = kStatusHigh; 190 mCacheFill = 1000; 191 notifyStatus(); 192 notifyCacheFill(); 193 } 194 195 // at this point we have enough information about the source to create the sink that 196 // will consume the data 197 createAudioSink(); 198 199 GenericPlayer::onPrepare(); 200 SL_LOGD("AudioSfDecoder::onPrepare() done, mStateFlags=0x%x", mStateFlags); 201} 202 203 204void AudioSfDecoder::onPause() { 205 SL_LOGD("AudioSfDecoder::onPause()"); 206 GenericPlayer::onPause(); 207 pauseAudioSink(); 208} 209 210 211void AudioSfDecoder::onPlay() { 212 SL_LOGD("AudioSfDecoder::onPlay()"); 213 GenericPlayer::onPlay(); 214 startAudioSink(); 215} 216 217 218void AudioSfDecoder::onSeek(const sp<AMessage> &msg) { 219 SL_LOGV("AudioSfDecoder::onSeek"); 220 int64_t timeMsec; 221 CHECK(msg->findInt64(WHATPARAM_SEEK_SEEKTIME_MS, &timeMsec)); 222 223 Mutex::Autolock _l(mSeekLock); 224 mStateFlags |= kFlagSeeking; 225 mSeekTimeMsec = timeMsec; 226 mTimeDelta = -1; 227 mLastDecodedPositionUs = -1; 228} 229 230 231void AudioSfDecoder::onLoop(const sp<AMessage> &msg) { 232 SL_LOGV("AudioSfDecoder::onLoop"); 233 int32_t loop; 234 CHECK(msg->findInt32(WHATPARAM_LOOP_LOOPING, &loop)); 235 236 if (loop) { 237 //SL_LOGV("AudioSfDecoder::onLoop start looping"); 238 mStateFlags |= kFlagLooping; 239 } else { 240 //SL_LOGV("AudioSfDecoder::onLoop stop looping"); 241 mStateFlags &= ~kFlagLooping; 242 } 243} 244 245 246void AudioSfDecoder::onCheckCache(const sp<AMessage> &msg) { 247 //SL_LOGV("AudioSfDecoder::onCheckCache"); 248 bool eos; 249 CacheStatus status = getCacheRemaining(&eos); 250 251 if (eos || status == kStatusHigh 252 || ((mStateFlags & kFlagPreparing) && (status >= kStatusEnough))) { 253 if (mStateFlags & kFlagPlaying) { 254 startAudioSink(); 255 } 256 mStateFlags &= ~kFlagBuffering; 257 258 SL_LOGV("AudioSfDecoder::onCheckCache: buffering done."); 259 260 if (mStateFlags & kFlagPreparing) { 261 //SL_LOGV("AudioSfDecoder::onCheckCache: preparation done."); 262 mStateFlags &= ~kFlagPreparing; 263 } 264 265 mTimeDelta = -1; 266 if (mStateFlags & kFlagPlaying) { 267 (new AMessage(kWhatDecode, id()))->post(); 268 } 269 return; 270 } 271 272 msg->post(100000); 273} 274 275 276void AudioSfDecoder::onDecode() { 277 SL_LOGV("AudioSfDecoder::onDecode"); 278 279 //-------------------------------- Need to buffer some more before decoding? 280 bool eos; 281 if (mDataSource == 0) { 282 // application set play state to paused which failed, then set play state to playing 283 return; 284 } 285 if (wantPrefetch() 286 && (getCacheRemaining(&eos) == kStatusLow) 287 && !eos) { 288 SL_LOGV("buffering more."); 289 290 if (mStateFlags & kFlagPlaying) { 291 pauseAudioSink(); 292 } 293 mStateFlags |= kFlagBuffering; 294 (new AMessage(kWhatCheckCache, id()))->post(100000); 295 return; 296 } 297 298 if (!(mStateFlags & (kFlagPlaying | kFlagBuffering | kFlagPreparing))) { 299 // don't decode if we're not buffering, prefetching or playing 300 //SL_LOGV("don't decode: not buffering, prefetching or playing"); 301 return; 302 } 303 304 //-------------------------------- Decode 305 status_t err; 306 MediaSource::ReadOptions readOptions; 307 if (mStateFlags & kFlagSeeking) { 308 readOptions.setSeekTo(mSeekTimeMsec * 1000); 309 } 310 311 { 312 Mutex::Autolock _l(mDecodeBufferLock); 313 if (NULL != mDecodeBuffer) { 314 // the current decoded buffer hasn't been rendered, drop it 315 mDecodeBuffer->release(); 316 mDecodeBuffer = NULL; 317 } 318 err = mAudioSource->read(&mDecodeBuffer, &readOptions); 319 if (err == OK) { 320 CHECK(mDecodeBuffer->meta_data()->findInt64(kKeyTime, &mLastDecodedPositionUs)); 321 } 322 } 323 324 { 325 Mutex::Autolock _l(mSeekLock); 326 if (mStateFlags & kFlagSeeking) { 327 mStateFlags &= ~kFlagSeeking; 328 } 329 } 330 331 //-------------------------------- Handle return of decode 332 if (err != OK) { 333 bool continueDecoding = false; 334 switch(err) { 335 case ERROR_END_OF_STREAM: 336 if (0 < mDurationUsec) { 337 mLastDecodedPositionUs = mDurationUsec; 338 } 339 // handle notification and looping at end of stream 340 if (mStateFlags & kFlagPlaying) { 341 notify(PLAYEREVENT_ENDOFSTREAM, 1, true); 342 } 343 if (mStateFlags & kFlagLooping) { 344 seek(0); 345 // kick-off decoding again 346 continueDecoding = true; 347 } 348 break; 349 case INFO_FORMAT_CHANGED: 350 SL_LOGD("MediaSource::read encountered INFO_FORMAT_CHANGED"); 351 // reconfigure output 352 updateAudioSink(); 353 continueDecoding = true; 354 break; 355 case INFO_DISCONTINUITY: 356 SL_LOGD("MediaSource::read encountered INFO_DISCONTINUITY"); 357 continueDecoding = true; 358 break; 359 default: 360 SL_LOGE("MediaSource::read returned error %d", err); 361 break; 362 } 363 if (continueDecoding) { 364 if (NULL == mDecodeBuffer) { 365 (new AMessage(kWhatDecode, id()))->post(); 366 return; 367 } 368 } else { 369 return; 370 } 371 } 372 373 //-------------------------------- Render 374 sp<AMessage> msg = new AMessage(kWhatRender, id()); 375 msg->post(); 376} 377 378 379void AudioSfDecoder::onRender() { 380 //SL_LOGV("AudioSfDecoder::onRender"); 381 382 Mutex::Autolock _l(mDecodeBufferLock); 383 384 if (NULL == mDecodeBuffer) { 385 // nothing to render, move along 386 SL_LOGV("AudioSfDecoder::onRender NULL buffer, exiting"); 387 return; 388 } 389 390 mDecodeBuffer->release(); 391 mDecodeBuffer = NULL; 392 393} 394 395 396void AudioSfDecoder::onMessageReceived(const sp<AMessage> &msg) { 397 switch (msg->what()) { 398 case kWhatPrepare: 399 onPrepare(); 400 break; 401 402 case kWhatDecode: 403 onDecode(); 404 break; 405 406 case kWhatRender: 407 onRender(); 408 break; 409 410 case kWhatCheckCache: 411 onCheckCache(msg); 412 break; 413 414 case kWhatNotif: 415 onNotify(msg); 416 break; 417 418 case kWhatPlay: 419 onPlay(); 420 break; 421 422 case kWhatPause: 423 onPause(); 424 break; 425/* 426 case kWhatSeek: 427 onSeek(msg); 428 break; 429 430 case kWhatLoop: 431 onLoop(msg); 432 break; 433*/ 434 default: 435 GenericPlayer::onMessageReceived(msg); 436 break; 437 } 438} 439 440//-------------------------------------------------- 441// Prepared state, prefetch status notifications 442void AudioSfDecoder::notifyPrepared(status_t prepareRes) { 443 notify(PLAYEREVENT_PREPARED, (int32_t)prepareRes, true); 444 445} 446 447void AudioSfDecoder::notifyStatus() { 448 notify(PLAYEREVENT_PREFETCHSTATUSCHANGE, (int32_t)mCacheStatus, true); 449} 450 451void AudioSfDecoder::notifyCacheFill() { 452 mLastNotifiedCacheFill = mCacheFill; 453 notify(PLAYEREVENT_PREFETCHFILLLEVELUPDATE, (int32_t)mLastNotifiedCacheFill, true); 454} 455 456void AudioSfDecoder::onNotify(const sp<AMessage> &msg) { 457 if (NULL == mNotifyClient) { 458 return; 459 } 460 int32_t val; 461 if (msg->findInt32(PLAYEREVENT_PREFETCHSTATUSCHANGE, &val)) { 462 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_PREFETCHSTATUSCHANGE, val); 463 mNotifyClient(kEventPrefetchStatusChange, val, 0, mNotifyUser); 464 } 465 else if (msg->findInt32(PLAYEREVENT_PREFETCHFILLLEVELUPDATE, &val)) { 466 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_PREFETCHFILLLEVELUPDATE, val); 467 mNotifyClient(kEventPrefetchFillLevelUpdate, val, 0, mNotifyUser); 468 } 469 else if (msg->findInt32(PLAYEREVENT_ENDOFSTREAM, &val)) { 470 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_ENDOFSTREAM, val); 471 mNotifyClient(kEventEndOfStream, val, 0, mNotifyUser); 472 } 473 else { 474 GenericPlayer::onNotify(msg); 475 } 476} 477 478 479//-------------------------------------------------- 480// Private utility functions 481 482bool AudioSfDecoder::wantPrefetch() { 483 return (mDataSource->flags() & DataSource::kWantsPrefetching); 484} 485 486 487int64_t AudioSfDecoder::getPositionUsec() { 488 Mutex::Autolock _l(mSeekLock); 489 if (mStateFlags & kFlagSeeking) { 490 return mSeekTimeMsec * 1000; 491 } else { 492 if (mLastDecodedPositionUs < 0) { 493 return 0; 494 } else { 495 return mLastDecodedPositionUs; 496 } 497 } 498} 499 500 501AudioSfDecoder::CacheStatus AudioSfDecoder::getCacheRemaining(bool *eos) { 502 sp<NuCachedSource2> cachedSource = 503 static_cast<NuCachedSource2 *>(mDataSource.get()); 504 505 CacheStatus oldStatus = mCacheStatus; 506 507 status_t finalStatus; 508 size_t dataRemaining = cachedSource->approxDataRemaining(&finalStatus); 509 *eos = (finalStatus != OK); 510 511 CHECK_GE(mBitrate, 0); 512 513 int64_t dataRemainingUs = dataRemaining * 8000000ll / mBitrate; 514 //SL_LOGV("AudioSfDecoder::getCacheRemaining: approx %.2f secs remaining (eos=%d)", 515 // dataRemainingUs / 1E6, *eos); 516 517 if (*eos) { 518 // data is buffered up to the end of the stream, it can't get any better than this 519 mCacheStatus = kStatusHigh; 520 mCacheFill = 1000; 521 522 } else { 523 if (mDurationUsec > 0) { 524 // known duration: 525 526 // fill level is ratio of how much has been played + how much is 527 // cached, divided by total duration 528 uint32_t currentPositionUsec = getPositionUsec(); 529 mCacheFill = (int16_t) ((1000.0 530 * (double)(currentPositionUsec + dataRemainingUs) / mDurationUsec)); 531 //SL_LOGV("cacheFill = %d", mCacheFill); 532 533 // cache status is evaluated against duration thresholds 534 if (dataRemainingUs > DURATION_CACHED_HIGH_US) { 535 mCacheStatus = kStatusHigh; 536 //LOGV("high"); 537 } else if (dataRemainingUs > DURATION_CACHED_MED_US) { 538 //LOGV("enough"); 539 mCacheStatus = kStatusEnough; 540 } else if (dataRemainingUs < DURATION_CACHED_LOW_US) { 541 //LOGV("low"); 542 mCacheStatus = kStatusLow; 543 } else { 544 mCacheStatus = kStatusIntermediate; 545 } 546 547 } else { 548 // unknown duration: 549 550 // cache status is evaluated against cache amount thresholds 551 // (no duration so we don't have the bitrate either, could be derived from format?) 552 if (dataRemaining > SIZE_CACHED_HIGH_BYTES) { 553 mCacheStatus = kStatusHigh; 554 } else if (dataRemaining > SIZE_CACHED_MED_BYTES) { 555 mCacheStatus = kStatusEnough; 556 } else if (dataRemaining < SIZE_CACHED_LOW_BYTES) { 557 mCacheStatus = kStatusLow; 558 } else { 559 mCacheStatus = kStatusIntermediate; 560 } 561 } 562 563 } 564 565 if (oldStatus != mCacheStatus) { 566 notifyStatus(); 567 } 568 569 if (abs(mCacheFill - mLastNotifiedCacheFill) > mCacheFillNotifThreshold) { 570 notifyCacheFill(); 571 } 572 573 return mCacheStatus; 574} 575 576} // namespace android 577