AwesomePlayer.cpp revision 889b340ec736a9d3e3e690256d305cc8740f0c4b
1/* 2 * Copyright (C) 2009 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "AwesomePlayer" 19#include <utils/Log.h> 20 21#include <dlfcn.h> 22 23#include "include/ARTSPController.h" 24#include "include/AwesomePlayer.h" 25#include "include/SoftwareRenderer.h" 26#include "include/NuCachedSource2.h" 27#include "include/ThrottledSource.h" 28#include "include/MPEG2TSExtractor.h" 29 30#include "ARTPSession.h" 31#include "APacketSource.h" 32#include "ASessionDescription.h" 33#include "UDPPusher.h" 34 35#include <binder/IPCThreadState.h> 36#include <media/stagefright/foundation/hexdump.h> 37#include <media/stagefright/foundation/ADebug.h> 38#include <media/stagefright/AudioPlayer.h> 39#include <media/stagefright/DataSource.h> 40#include <media/stagefright/FileSource.h> 41#include <media/stagefright/MediaBuffer.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/MediaExtractor.h> 44#include <media/stagefright/MediaSource.h> 45#include <media/stagefright/MetaData.h> 46#include <media/stagefright/OMXCodec.h> 47 48#include <surfaceflinger/Surface.h> 49 50#include <media/stagefright/foundation/ALooper.h> 51#include <media/stagefright/foundation/AMessage.h> 52#include "include/LiveSession.h" 53 54#define USE_SURFACE_ALLOC 1 55#define FRAME_DROP_FREQ 7 56 57namespace android { 58 59static int64_t kLowWaterMarkUs = 2000000ll; // 2secs 60static int64_t kHighWaterMarkUs = 10000000ll; // 10secs 61static const size_t kLowWaterMarkBytes = 40000; 62static const size_t kHighWaterMarkBytes = 200000; 63 64struct AwesomeEvent : public TimedEventQueue::Event { 65 AwesomeEvent( 66 AwesomePlayer *player, 67 void (AwesomePlayer::*method)()) 68 : mPlayer(player), 69 mMethod(method) { 70 } 71 72protected: 73 virtual ~AwesomeEvent() {} 74 75 virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) { 76 (mPlayer->*mMethod)(); 77 } 78 79private: 80 AwesomePlayer *mPlayer; 81 void (AwesomePlayer::*mMethod)(); 82 83 AwesomeEvent(const AwesomeEvent &); 84 AwesomeEvent &operator=(const AwesomeEvent &); 85}; 86 87struct AwesomeLocalRenderer : public AwesomeRenderer { 88 AwesomeLocalRenderer( 89 const sp<Surface> &surface, const sp<MetaData> &meta) 90 : mTarget(new SoftwareRenderer(surface, meta)) { 91 } 92 93 virtual void render(MediaBuffer *buffer) { 94 render((const uint8_t *)buffer->data() + buffer->range_offset(), 95 buffer->range_length()); 96 } 97 98 void render(const void *data, size_t size) { 99 mTarget->render(data, size, NULL); 100 } 101 102protected: 103 virtual ~AwesomeLocalRenderer() { 104 delete mTarget; 105 mTarget = NULL; 106 } 107 108private: 109 SoftwareRenderer *mTarget; 110 111 AwesomeLocalRenderer(const AwesomeLocalRenderer &); 112 AwesomeLocalRenderer &operator=(const AwesomeLocalRenderer &);; 113}; 114 115struct AwesomeNativeWindowRenderer : public AwesomeRenderer { 116 AwesomeNativeWindowRenderer( 117 const sp<ANativeWindow> &nativeWindow, 118 int32_t rotationDegrees) 119 : mNativeWindow(nativeWindow) { 120 applyRotation(rotationDegrees); 121 } 122 123 virtual void render(MediaBuffer *buffer) { 124 status_t err = mNativeWindow->queueBuffer( 125 mNativeWindow.get(), buffer->graphicBuffer().get()); 126 if (err != 0) { 127 LOGE("queueBuffer failed with error %s (%d)", strerror(-err), 128 -err); 129 return; 130 } 131 132 sp<MetaData> metaData = buffer->meta_data(); 133 metaData->setInt32(kKeyRendered, 1); 134 } 135 136protected: 137 virtual ~AwesomeNativeWindowRenderer() {} 138 139private: 140 sp<ANativeWindow> mNativeWindow; 141 142 void applyRotation(int32_t rotationDegrees) { 143 uint32_t transform; 144 switch (rotationDegrees) { 145 case 0: transform = 0; break; 146 case 90: transform = HAL_TRANSFORM_ROT_90; break; 147 case 180: transform = HAL_TRANSFORM_ROT_180; break; 148 case 270: transform = HAL_TRANSFORM_ROT_270; break; 149 default: transform = 0; break; 150 } 151 152 if (transform) { 153 CHECK_EQ(0, native_window_set_buffers_transform( 154 mNativeWindow.get(), transform)); 155 } 156 } 157 158 AwesomeNativeWindowRenderer(const AwesomeNativeWindowRenderer &); 159 AwesomeNativeWindowRenderer &operator=( 160 const AwesomeNativeWindowRenderer &); 161}; 162 163//////////////////////////////////////////////////////////////////////////////// 164 165AwesomePlayer::AwesomePlayer() 166 : mQueueStarted(false), 167 mTimeSource(NULL), 168 mVideoRendererIsPreview(false), 169 mAudioPlayer(NULL), 170 mDisplayWidth(0), 171 mDisplayHeight(0), 172 mFlags(0), 173 mExtractorFlags(0), 174 mVideoBuffer(NULL), 175 mDecryptHandle(NULL) { 176 CHECK_EQ(mClient.connect(), (status_t)OK); 177 178 DataSource::RegisterDefaultSniffers(); 179 180 mVideoEvent = new AwesomeEvent(this, &AwesomePlayer::onVideoEvent); 181 mVideoEventPending = false; 182 mStreamDoneEvent = new AwesomeEvent(this, &AwesomePlayer::onStreamDone); 183 mStreamDoneEventPending = false; 184 mBufferingEvent = new AwesomeEvent(this, &AwesomePlayer::onBufferingUpdate); 185 mBufferingEventPending = false; 186 mVideoLagEvent = new AwesomeEvent(this, &AwesomePlayer::onVideoLagUpdate); 187 mVideoEventPending = false; 188 189 mCheckAudioStatusEvent = new AwesomeEvent( 190 this, &AwesomePlayer::onCheckAudioStatus); 191 192 mAudioStatusEventPending = false; 193 194 reset(); 195} 196 197AwesomePlayer::~AwesomePlayer() { 198 if (mQueueStarted) { 199 mQueue.stop(); 200 } 201 202 reset(); 203 204 mClient.disconnect(); 205} 206 207void AwesomePlayer::cancelPlayerEvents(bool keepBufferingGoing) { 208 mQueue.cancelEvent(mVideoEvent->eventID()); 209 mVideoEventPending = false; 210 mQueue.cancelEvent(mStreamDoneEvent->eventID()); 211 mStreamDoneEventPending = false; 212 mQueue.cancelEvent(mCheckAudioStatusEvent->eventID()); 213 mAudioStatusEventPending = false; 214 mQueue.cancelEvent(mVideoLagEvent->eventID()); 215 mVideoLagEventPending = false; 216 217 if (!keepBufferingGoing) { 218 mQueue.cancelEvent(mBufferingEvent->eventID()); 219 mBufferingEventPending = false; 220 } 221} 222 223void AwesomePlayer::setListener(const wp<MediaPlayerBase> &listener) { 224 Mutex::Autolock autoLock(mLock); 225 mListener = listener; 226} 227 228status_t AwesomePlayer::setDataSource( 229 const char *uri, const KeyedVector<String8, String8> *headers) { 230 Mutex::Autolock autoLock(mLock); 231 return setDataSource_l(uri, headers); 232} 233 234status_t AwesomePlayer::setDataSource_l( 235 const char *uri, const KeyedVector<String8, String8> *headers) { 236 reset_l(); 237 238 mUri = uri; 239 240 if (!strncmp("http://", uri, 7)) { 241 // Hack to support http live. 242 243 size_t len = strlen(uri); 244 if (!strcasecmp(&uri[len - 5], ".m3u8") 245 || strstr(&uri[7], "m3u8") != NULL) { 246 mUri = "httplive://"; 247 mUri.append(&uri[7]); 248 } 249 } 250 251 if (headers) { 252 mUriHeaders = *headers; 253 } 254 255 // The actual work will be done during preparation in the call to 256 // ::finishSetDataSource_l to avoid blocking the calling thread in 257 // setDataSource for any significant time. 258 259 return OK; 260} 261 262status_t AwesomePlayer::setDataSource( 263 int fd, int64_t offset, int64_t length) { 264 Mutex::Autolock autoLock(mLock); 265 266 reset_l(); 267 268 sp<DataSource> dataSource = new FileSource(fd, offset, length); 269 270 status_t err = dataSource->initCheck(); 271 272 if (err != OK) { 273 return err; 274 } 275 276 mFileSource = dataSource; 277 278 return setDataSource_l(dataSource); 279} 280 281status_t AwesomePlayer::setDataSource(const sp<IStreamSource> &source) { 282 return INVALID_OPERATION; 283} 284 285status_t AwesomePlayer::setDataSource_l( 286 const sp<DataSource> &dataSource) { 287 sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource); 288 289 if (extractor == NULL) { 290 return UNKNOWN_ERROR; 291 } 292 293 dataSource->getDrmInfo(&mDecryptHandle, &mDrmManagerClient); 294 if (mDecryptHandle != NULL 295 && RightsStatus::RIGHTS_VALID != mDecryptHandle->status) { 296 notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_NO_LICENSE); 297 } 298 299 return setDataSource_l(extractor); 300} 301 302status_t AwesomePlayer::setDataSource_l(const sp<MediaExtractor> &extractor) { 303 // Attempt to approximate overall stream bitrate by summing all 304 // tracks' individual bitrates, if not all of them advertise bitrate, 305 // we have to fail. 306 307 int64_t totalBitRate = 0; 308 309 for (size_t i = 0; i < extractor->countTracks(); ++i) { 310 sp<MetaData> meta = extractor->getTrackMetaData(i); 311 312 int32_t bitrate; 313 if (!meta->findInt32(kKeyBitRate, &bitrate)) { 314 totalBitRate = -1; 315 break; 316 } 317 318 totalBitRate += bitrate; 319 } 320 321 mBitrate = totalBitRate; 322 323 LOGV("mBitrate = %lld bits/sec", mBitrate); 324 325 bool haveAudio = false; 326 bool haveVideo = false; 327 for (size_t i = 0; i < extractor->countTracks(); ++i) { 328 sp<MetaData> meta = extractor->getTrackMetaData(i); 329 330 const char *mime; 331 CHECK(meta->findCString(kKeyMIMEType, &mime)); 332 333 if (!haveVideo && !strncasecmp(mime, "video/", 6)) { 334 setVideoSource(extractor->getTrack(i)); 335 haveVideo = true; 336 337 // Set the presentation/display size 338 int32_t displayWidth, displayHeight; 339 bool success = meta->findInt32(kKeyDisplayWidth, &displayWidth); 340 if (success) { 341 success = meta->findInt32(kKeyDisplayHeight, &displayHeight); 342 } 343 if (success) { 344 mDisplayWidth = displayWidth; 345 mDisplayHeight = displayHeight; 346 } 347 348 } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) { 349 setAudioSource(extractor->getTrack(i)); 350 haveAudio = true; 351 352 if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) { 353 // Only do this for vorbis audio, none of the other audio 354 // formats even support this ringtone specific hack and 355 // retrieving the metadata on some extractors may turn out 356 // to be very expensive. 357 sp<MetaData> fileMeta = extractor->getMetaData(); 358 int32_t loop; 359 if (fileMeta != NULL 360 && fileMeta->findInt32(kKeyAutoLoop, &loop) && loop != 0) { 361 mFlags |= AUTO_LOOPING; 362 } 363 } 364 } 365 366 if (haveAudio && haveVideo) { 367 break; 368 } 369 } 370 371 if (!haveAudio && !haveVideo) { 372 return UNKNOWN_ERROR; 373 } 374 375 mExtractorFlags = extractor->flags(); 376 377 return OK; 378} 379 380void AwesomePlayer::reset() { 381 LOGI("reset"); 382 383 Mutex::Autolock autoLock(mLock); 384 reset_l(); 385} 386 387void AwesomePlayer::reset_l() { 388 LOGI("reset_l"); 389 mDisplayWidth = 0; 390 mDisplayHeight = 0; 391 392 if (mDecryptHandle != NULL) { 393 mDrmManagerClient->setPlaybackStatus(mDecryptHandle, 394 Playback::STOP, 0); 395 mDecryptHandle = NULL; 396 mDrmManagerClient = NULL; 397 } 398 399 if (mFlags & PREPARING) { 400 mFlags |= PREPARE_CANCELLED; 401 if (mConnectingDataSource != NULL) { 402 LOGI("interrupting the connection process"); 403 mConnectingDataSource->disconnect(); 404 } 405 406 if (mFlags & PREPARING_CONNECTED) { 407 // We are basically done preparing, we're just buffering 408 // enough data to start playback, we can safely interrupt that. 409 finishAsyncPrepare_l(); 410 } 411 } 412 413 if (mFlags & PREPARING) { 414 LOGI("waiting until preparation is completes."); 415 } 416 417 while (mFlags & PREPARING) { 418 mPreparedCondition.wait(mLock); 419 } 420 421 cancelPlayerEvents(); 422 423 mCachedSource.clear(); 424 mAudioTrack.clear(); 425 mVideoTrack.clear(); 426 427 // Shutdown audio first, so that the respone to the reset request 428 // appears to happen instantaneously as far as the user is concerned 429 // If we did this later, audio would continue playing while we 430 // shutdown the video-related resources and the player appear to 431 // not be as responsive to a reset request. 432 if (mAudioPlayer == NULL && mAudioSource != NULL) { 433 // If we had an audio player, it would have effectively 434 // taken possession of the audio source and stopped it when 435 // _it_ is stopped. Otherwise this is still our responsibility. 436 mAudioSource->stop(); 437 } 438 mAudioSource.clear(); 439 440 LOGI("audio source cleared"); 441 442 mTimeSource = NULL; 443 444 delete mAudioPlayer; 445 mAudioPlayer = NULL; 446 447 mVideoRenderer.clear(); 448 449 if (mVideoBuffer) { 450 mVideoBuffer->release(); 451 mVideoBuffer = NULL; 452 } 453 454 if (mRTSPController != NULL) { 455 mRTSPController->disconnect(); 456 mRTSPController.clear(); 457 } 458 459 if (mLiveSession != NULL) { 460 mLiveSession->disconnect(); 461 mLiveSession.clear(); 462 } 463 464 mRTPPusher.clear(); 465 mRTCPPusher.clear(); 466 mRTPSession.clear(); 467 468 if (mVideoSource != NULL) { 469 mVideoSource->stop(); 470 471 // The following hack is necessary to ensure that the OMX 472 // component is completely released by the time we may try 473 // to instantiate it again. 474 wp<MediaSource> tmp = mVideoSource; 475 mVideoSource.clear(); 476 while (tmp.promote() != NULL) { 477 usleep(1000); 478 } 479 IPCThreadState::self()->flushCommands(); 480 } 481 482 LOGI("video source cleared"); 483 484 mDurationUs = -1; 485 mFlags = 0; 486 mExtractorFlags = 0; 487 mTimeSourceDeltaUs = 0; 488 mVideoTimeUs = 0; 489 490 mSeeking = false; 491 mSeekNotificationSent = false; 492 mSeekTimeUs = 0; 493 494 mUri.setTo(""); 495 mUriHeaders.clear(); 496 497 mFileSource.clear(); 498 499 mBitrate = -1; 500 501 LOGI("reset_l completed"); 502} 503 504void AwesomePlayer::notifyListener_l(int msg, int ext1, int ext2) { 505 if (mListener != NULL) { 506 sp<MediaPlayerBase> listener = mListener.promote(); 507 508 if (listener != NULL) { 509 listener->sendEvent(msg, ext1, ext2); 510 } 511 } 512} 513 514bool AwesomePlayer::getBitrate(int64_t *bitrate) { 515 off64_t size; 516 if (mDurationUs >= 0 && mCachedSource != NULL 517 && mCachedSource->getSize(&size) == OK) { 518 *bitrate = size * 8000000ll / mDurationUs; // in bits/sec 519 return true; 520 } 521 522 if (mBitrate >= 0) { 523 *bitrate = mBitrate; 524 return true; 525 } 526 527 *bitrate = 0; 528 529 return false; 530} 531 532// Returns true iff cached duration is available/applicable. 533bool AwesomePlayer::getCachedDuration_l(int64_t *durationUs, bool *eos) { 534 int64_t bitrate; 535 536 if (mRTSPController != NULL) { 537 *durationUs = mRTSPController->getQueueDurationUs(eos); 538 return true; 539 } else if (mCachedSource != NULL && getBitrate(&bitrate)) { 540 status_t finalStatus; 541 size_t cachedDataRemaining = mCachedSource->approxDataRemaining(&finalStatus); 542 *durationUs = cachedDataRemaining * 8000000ll / bitrate; 543 *eos = (finalStatus != OK); 544 return true; 545 } 546 547 return false; 548} 549 550void AwesomePlayer::ensureCacheIsFetching_l() { 551 if (mCachedSource != NULL) { 552 mCachedSource->resumeFetchingIfNecessary(); 553 } 554} 555 556void AwesomePlayer::onVideoLagUpdate() { 557 Mutex::Autolock autoLock(mLock); 558 if (!mVideoLagEventPending) { 559 return; 560 } 561 mVideoLagEventPending = false; 562 563 int64_t audioTimeUs = mAudioPlayer->getMediaTimeUs(); 564 int64_t videoLateByUs = audioTimeUs - mVideoTimeUs; 565 566 if (videoLateByUs > 300000ll) { 567 LOGV("video late by %lld ms.", videoLateByUs / 1000ll); 568 569 notifyListener_l( 570 MEDIA_INFO, 571 MEDIA_INFO_VIDEO_TRACK_LAGGING, 572 videoLateByUs / 1000ll); 573 } 574 575 postVideoLagEvent_l(); 576} 577 578void AwesomePlayer::onBufferingUpdate() { 579 Mutex::Autolock autoLock(mLock); 580 if (!mBufferingEventPending) { 581 return; 582 } 583 mBufferingEventPending = false; 584 585 if (mCachedSource != NULL) { 586 status_t finalStatus; 587 size_t cachedDataRemaining = mCachedSource->approxDataRemaining(&finalStatus); 588 bool eos = (finalStatus != OK); 589 590 if (eos) { 591 if (finalStatus == ERROR_END_OF_STREAM) { 592 notifyListener_l(MEDIA_BUFFERING_UPDATE, 100); 593 } 594 if (mFlags & PREPARING) { 595 LOGV("cache has reached EOS, prepare is done."); 596 finishAsyncPrepare_l(); 597 } 598 } else { 599 int64_t bitrate; 600 if (getBitrate(&bitrate)) { 601 size_t cachedSize = mCachedSource->cachedSize(); 602 int64_t cachedDurationUs = cachedSize * 8000000ll / bitrate; 603 604 int percentage = 100.0 * (double)cachedDurationUs / mDurationUs; 605 if (percentage > 100) { 606 percentage = 100; 607 } 608 609 notifyListener_l(MEDIA_BUFFERING_UPDATE, percentage); 610 } else { 611 // We don't know the bitrate of the stream, use absolute size 612 // limits to maintain the cache. 613 614 if ((mFlags & PLAYING) && !eos 615 && (cachedDataRemaining < kLowWaterMarkBytes)) { 616 LOGI("cache is running low (< %d) , pausing.", 617 kLowWaterMarkBytes); 618 mFlags |= CACHE_UNDERRUN; 619 pause_l(); 620 ensureCacheIsFetching_l(); 621 notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START); 622 } else if (eos || cachedDataRemaining > kHighWaterMarkBytes) { 623 if (mFlags & CACHE_UNDERRUN) { 624 LOGI("cache has filled up (> %d), resuming.", 625 kHighWaterMarkBytes); 626 mFlags &= ~CACHE_UNDERRUN; 627 play_l(); 628 notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_END); 629 } else if (mFlags & PREPARING) { 630 LOGV("cache has filled up (> %d), prepare is done", 631 kHighWaterMarkBytes); 632 finishAsyncPrepare_l(); 633 } 634 } 635 } 636 } 637 } 638 639 int64_t cachedDurationUs; 640 bool eos; 641 if (getCachedDuration_l(&cachedDurationUs, &eos)) { 642 LOGV("cachedDurationUs = %.2f secs, eos=%d", 643 cachedDurationUs / 1E6, eos); 644 645 if ((mFlags & PLAYING) && !eos 646 && (cachedDurationUs < kLowWaterMarkUs)) { 647 LOGI("cache is running low (%.2f secs) , pausing.", 648 cachedDurationUs / 1E6); 649 mFlags |= CACHE_UNDERRUN; 650 pause_l(); 651 ensureCacheIsFetching_l(); 652 notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START); 653 } else if (eos || cachedDurationUs > kHighWaterMarkUs) { 654 if (mFlags & CACHE_UNDERRUN) { 655 LOGI("cache has filled up (%.2f secs), resuming.", 656 cachedDurationUs / 1E6); 657 mFlags &= ~CACHE_UNDERRUN; 658 play_l(); 659 notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_END); 660 } else if (mFlags & PREPARING) { 661 LOGV("cache has filled up (%.2f secs), prepare is done", 662 cachedDurationUs / 1E6); 663 finishAsyncPrepare_l(); 664 } 665 } 666 } 667 668 postBufferingEvent_l(); 669} 670 671void AwesomePlayer::partial_reset_l() { 672 // Only reset the video renderer and shut down the video decoder. 673 // Then instantiate a new video decoder and resume video playback. 674 675 mVideoRenderer.clear(); 676 677 if (mVideoBuffer) { 678 mVideoBuffer->release(); 679 mVideoBuffer = NULL; 680 } 681 682 { 683 mVideoSource->stop(); 684 685 // The following hack is necessary to ensure that the OMX 686 // component is completely released by the time we may try 687 // to instantiate it again. 688 wp<MediaSource> tmp = mVideoSource; 689 mVideoSource.clear(); 690 while (tmp.promote() != NULL) { 691 usleep(1000); 692 } 693 IPCThreadState::self()->flushCommands(); 694 } 695 696 CHECK_EQ((status_t)OK, 697 initVideoDecoder(OMXCodec::kIgnoreCodecSpecificData)); 698} 699 700void AwesomePlayer::onStreamDone() { 701 // Posted whenever any stream finishes playing. 702 703 Mutex::Autolock autoLock(mLock); 704 if (!mStreamDoneEventPending) { 705 return; 706 } 707 mStreamDoneEventPending = false; 708 709 if (mStreamDoneStatus == INFO_DISCONTINUITY) { 710 // This special status is returned because an http live stream's 711 // video stream switched to a different bandwidth at this point 712 // and future data may have been encoded using different parameters. 713 // This requires us to shutdown the video decoder and reinstantiate 714 // a fresh one. 715 716 LOGV("INFO_DISCONTINUITY"); 717 718 CHECK(mVideoSource != NULL); 719 720 partial_reset_l(); 721 postVideoEvent_l(); 722 return; 723 } else if (mStreamDoneStatus != ERROR_END_OF_STREAM) { 724 LOGV("MEDIA_ERROR %d", mStreamDoneStatus); 725 726 notifyListener_l( 727 MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, mStreamDoneStatus); 728 729 pause_l(true /* at eos */); 730 731 mFlags |= AT_EOS; 732 return; 733 } 734 735 const bool allDone = 736 (mVideoSource == NULL || (mFlags & VIDEO_AT_EOS)) 737 && (mAudioSource == NULL || (mFlags & AUDIO_AT_EOS)); 738 739 if (!allDone) { 740 return; 741 } 742 743 if (mFlags & (LOOPING | AUTO_LOOPING)) { 744 seekTo_l(0); 745 746 if (mVideoSource != NULL) { 747 postVideoEvent_l(); 748 } 749 } else { 750 LOGV("MEDIA_PLAYBACK_COMPLETE"); 751 notifyListener_l(MEDIA_PLAYBACK_COMPLETE); 752 753 pause_l(true /* at eos */); 754 755 mFlags |= AT_EOS; 756 } 757} 758 759status_t AwesomePlayer::play() { 760 Mutex::Autolock autoLock(mLock); 761 762 mFlags &= ~CACHE_UNDERRUN; 763 764 return play_l(); 765} 766 767status_t AwesomePlayer::play_l() { 768 if (mFlags & PLAYING) { 769 return OK; 770 } 771 772 if (!(mFlags & PREPARED)) { 773 status_t err = prepare_l(); 774 775 if (err != OK) { 776 return err; 777 } 778 } 779 780 mFlags |= PLAYING; 781 mFlags |= FIRST_FRAME; 782 783 bool deferredAudioSeek = false; 784 785 if (mDecryptHandle != NULL) { 786 int64_t position; 787 getPosition(&position); 788 mDrmManagerClient->setPlaybackStatus(mDecryptHandle, 789 Playback::START, position / 1000); 790 } 791 792 if (mAudioSource != NULL) { 793 if (mAudioPlayer == NULL) { 794 if (mAudioSink != NULL) { 795 mAudioPlayer = new AudioPlayer(mAudioSink, this); 796 mAudioPlayer->setSource(mAudioSource); 797 798 // We've already started the MediaSource in order to enable 799 // the prefetcher to read its data. 800 status_t err = mAudioPlayer->start( 801 true /* sourceAlreadyStarted */); 802 803 if (err != OK) { 804 delete mAudioPlayer; 805 mAudioPlayer = NULL; 806 807 mFlags &= ~(PLAYING | FIRST_FRAME); 808 809 if (mDecryptHandle != NULL) { 810 mDrmManagerClient->setPlaybackStatus(mDecryptHandle, 811 Playback::STOP, 0); 812 } 813 814 return err; 815 } 816 817 mTimeSource = mAudioPlayer; 818 819 deferredAudioSeek = true; 820 821 mWatchForAudioSeekComplete = false; 822 mWatchForAudioEOS = true; 823 } 824 } else { 825 mAudioPlayer->resume(); 826 } 827 } 828 829 if (mTimeSource == NULL && mAudioPlayer == NULL) { 830 mTimeSource = &mSystemTimeSource; 831 } 832 833 if (mVideoSource != NULL) { 834 // Kick off video playback 835 postVideoEvent_l(); 836 837 if (mAudioSource != NULL && mVideoSource != NULL) { 838 postVideoLagEvent_l(); 839 } 840 } 841 842 if (deferredAudioSeek) { 843 // If there was a seek request while we were paused 844 // and we're just starting up again, honor the request now. 845 seekAudioIfNecessary_l(); 846 } 847 848 if (mFlags & AT_EOS) { 849 // Legacy behaviour, if a stream finishes playing and then 850 // is started again, we play from the start... 851 seekTo_l(0); 852 } 853 854 return OK; 855} 856 857void AwesomePlayer::notifyVideoSize_l() { 858 sp<MetaData> meta = mVideoSource->getFormat(); 859 860 int32_t cropLeft, cropTop, cropRight, cropBottom; 861 if (!meta->findRect( 862 kKeyCropRect, &cropLeft, &cropTop, &cropRight, &cropBottom)) { 863 int32_t width, height; 864 CHECK(meta->findInt32(kKeyWidth, &width)); 865 CHECK(meta->findInt32(kKeyHeight, &height)); 866 867 cropLeft = cropTop = 0; 868 cropRight = width - 1; 869 cropBottom = height - 1; 870 871 LOGV("got dimensions only %d x %d", width, height); 872 } else { 873 LOGV("got crop rect %d, %d, %d, %d", 874 cropLeft, cropTop, cropRight, cropBottom); 875 } 876 877 int32_t usableWidth = cropRight - cropLeft + 1; 878 int32_t usableHeight = cropBottom - cropTop + 1; 879 if (mDisplayWidth != 0) { 880 usableWidth = mDisplayWidth; 881 } 882 if (mDisplayHeight != 0) { 883 usableHeight = mDisplayHeight; 884 } 885 886 int32_t rotationDegrees; 887 if (!mVideoTrack->getFormat()->findInt32( 888 kKeyRotation, &rotationDegrees)) { 889 rotationDegrees = 0; 890 } 891 892 if (rotationDegrees == 90 || rotationDegrees == 270) { 893 notifyListener_l( 894 MEDIA_SET_VIDEO_SIZE, usableHeight, usableWidth); 895 } else { 896 notifyListener_l( 897 MEDIA_SET_VIDEO_SIZE, usableWidth, usableHeight); 898 } 899} 900 901void AwesomePlayer::initRenderer_l() { 902 if (mSurface == NULL) { 903 return; 904 } 905 906 sp<MetaData> meta = mVideoSource->getFormat(); 907 908 int32_t format; 909 const char *component; 910 int32_t decodedWidth, decodedHeight; 911 CHECK(meta->findInt32(kKeyColorFormat, &format)); 912 CHECK(meta->findCString(kKeyDecoderComponent, &component)); 913 CHECK(meta->findInt32(kKeyWidth, &decodedWidth)); 914 CHECK(meta->findInt32(kKeyHeight, &decodedHeight)); 915 916 int32_t rotationDegrees; 917 if (!mVideoTrack->getFormat()->findInt32( 918 kKeyRotation, &rotationDegrees)) { 919 rotationDegrees = 0; 920 } 921 922 mVideoRenderer.clear(); 923 924 // Must ensure that mVideoRenderer's destructor is actually executed 925 // before creating a new one. 926 IPCThreadState::self()->flushCommands(); 927 928 if (USE_SURFACE_ALLOC && strncmp(component, "OMX.", 4) == 0) { 929 // Hardware decoders avoid the CPU color conversion by decoding 930 // directly to ANativeBuffers, so we must use a renderer that 931 // just pushes those buffers to the ANativeWindow. 932 mVideoRenderer = 933 new AwesomeNativeWindowRenderer(mSurface, rotationDegrees); 934 } else { 935 // Other decoders are instantiated locally and as a consequence 936 // allocate their buffers in local address space. This renderer 937 // then performs a color conversion and copy to get the data 938 // into the ANativeBuffer. 939 mVideoRenderer = new AwesomeLocalRenderer(mSurface, meta); 940 } 941} 942 943status_t AwesomePlayer::pause() { 944 Mutex::Autolock autoLock(mLock); 945 946 mFlags &= ~CACHE_UNDERRUN; 947 948 return pause_l(); 949} 950 951status_t AwesomePlayer::pause_l(bool at_eos) { 952 if (!(mFlags & PLAYING)) { 953 return OK; 954 } 955 956 cancelPlayerEvents(true /* keepBufferingGoing */); 957 958 if (mAudioPlayer != NULL) { 959 if (at_eos) { 960 // If we played the audio stream to completion we 961 // want to make sure that all samples remaining in the audio 962 // track's queue are played out. 963 mAudioPlayer->pause(true /* playPendingSamples */); 964 } else { 965 mAudioPlayer->pause(); 966 } 967 } 968 969 mFlags &= ~PLAYING; 970 971 if (mDecryptHandle != NULL) { 972 mDrmManagerClient->setPlaybackStatus(mDecryptHandle, 973 Playback::PAUSE, 0); 974 } 975 976 return OK; 977} 978 979bool AwesomePlayer::isPlaying() const { 980 return (mFlags & PLAYING) || (mFlags & CACHE_UNDERRUN); 981} 982 983void AwesomePlayer::setSurface(const sp<Surface> &surface) { 984 Mutex::Autolock autoLock(mLock); 985 986 mSurface = surface; 987} 988 989void AwesomePlayer::setAudioSink( 990 const sp<MediaPlayerBase::AudioSink> &audioSink) { 991 Mutex::Autolock autoLock(mLock); 992 993 mAudioSink = audioSink; 994} 995 996status_t AwesomePlayer::setLooping(bool shouldLoop) { 997 Mutex::Autolock autoLock(mLock); 998 999 mFlags = mFlags & ~LOOPING; 1000 1001 if (shouldLoop) { 1002 mFlags |= LOOPING; 1003 } 1004 1005 return OK; 1006} 1007 1008status_t AwesomePlayer::getDuration(int64_t *durationUs) { 1009 Mutex::Autolock autoLock(mMiscStateLock); 1010 1011 if (mDurationUs < 0) { 1012 return UNKNOWN_ERROR; 1013 } 1014 1015 *durationUs = mDurationUs; 1016 1017 return OK; 1018} 1019 1020status_t AwesomePlayer::getPosition(int64_t *positionUs) { 1021 if (mRTSPController != NULL) { 1022 *positionUs = mRTSPController->getNormalPlayTimeUs(); 1023 } 1024 else if (mSeeking) { 1025 *positionUs = mSeekTimeUs; 1026 } else if (mVideoSource != NULL) { 1027 Mutex::Autolock autoLock(mMiscStateLock); 1028 *positionUs = mVideoTimeUs; 1029 } else if (mAudioPlayer != NULL) { 1030 *positionUs = mAudioPlayer->getMediaTimeUs(); 1031 } else { 1032 *positionUs = 0; 1033 } 1034 1035 return OK; 1036} 1037 1038status_t AwesomePlayer::seekTo(int64_t timeUs) { 1039 if (mExtractorFlags & MediaExtractor::CAN_SEEK) { 1040 Mutex::Autolock autoLock(mLock); 1041 return seekTo_l(timeUs); 1042 } 1043 1044 return OK; 1045} 1046 1047// static 1048void AwesomePlayer::OnRTSPSeekDoneWrapper(void *cookie) { 1049 static_cast<AwesomePlayer *>(cookie)->onRTSPSeekDone(); 1050} 1051 1052void AwesomePlayer::onRTSPSeekDone() { 1053 notifyListener_l(MEDIA_SEEK_COMPLETE); 1054 mSeekNotificationSent = true; 1055} 1056 1057status_t AwesomePlayer::seekTo_l(int64_t timeUs) { 1058 if (mRTSPController != NULL) { 1059 mRTSPController->seekAsync(timeUs, OnRTSPSeekDoneWrapper, this); 1060 return OK; 1061 } 1062 1063 if (mFlags & CACHE_UNDERRUN) { 1064 mFlags &= ~CACHE_UNDERRUN; 1065 play_l(); 1066 } 1067 1068 mSeeking = true; 1069 mSeekNotificationSent = false; 1070 mSeekTimeUs = timeUs; 1071 mFlags &= ~(AT_EOS | AUDIO_AT_EOS | VIDEO_AT_EOS); 1072 1073 seekAudioIfNecessary_l(); 1074 1075 if (!(mFlags & PLAYING)) { 1076 LOGV("seeking while paused, sending SEEK_COMPLETE notification" 1077 " immediately."); 1078 1079 notifyListener_l(MEDIA_SEEK_COMPLETE); 1080 mSeekNotificationSent = true; 1081 } 1082 1083 return OK; 1084} 1085 1086void AwesomePlayer::seekAudioIfNecessary_l() { 1087 if (mSeeking && mVideoSource == NULL && mAudioPlayer != NULL) { 1088 mAudioPlayer->seekTo(mSeekTimeUs); 1089 1090 mWatchForAudioSeekComplete = true; 1091 mWatchForAudioEOS = true; 1092 mSeekNotificationSent = false; 1093 1094 if (mDecryptHandle != NULL) { 1095 mDrmManagerClient->setPlaybackStatus(mDecryptHandle, 1096 Playback::PAUSE, 0); 1097 mDrmManagerClient->setPlaybackStatus(mDecryptHandle, 1098 Playback::START, mSeekTimeUs / 1000); 1099 } 1100 } 1101} 1102 1103void AwesomePlayer::setAudioSource(sp<MediaSource> source) { 1104 CHECK(source != NULL); 1105 1106 mAudioTrack = source; 1107} 1108 1109status_t AwesomePlayer::initAudioDecoder() { 1110 sp<MetaData> meta = mAudioTrack->getFormat(); 1111 1112 const char *mime; 1113 CHECK(meta->findCString(kKeyMIMEType, &mime)); 1114 1115 if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 1116 mAudioSource = mAudioTrack; 1117 } else { 1118 mAudioSource = OMXCodec::Create( 1119 mClient.interface(), mAudioTrack->getFormat(), 1120 false, // createEncoder 1121 mAudioTrack); 1122 } 1123 1124 if (mAudioSource != NULL) { 1125 int64_t durationUs; 1126 if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) { 1127 Mutex::Autolock autoLock(mMiscStateLock); 1128 if (mDurationUs < 0 || durationUs > mDurationUs) { 1129 mDurationUs = durationUs; 1130 } 1131 } 1132 1133 status_t err = mAudioSource->start(); 1134 1135 if (err != OK) { 1136 mAudioSource.clear(); 1137 return err; 1138 } 1139 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) { 1140 // For legacy reasons we're simply going to ignore the absence 1141 // of an audio decoder for QCELP instead of aborting playback 1142 // altogether. 1143 return OK; 1144 } 1145 1146 return mAudioSource != NULL ? OK : UNKNOWN_ERROR; 1147} 1148 1149void AwesomePlayer::setVideoSource(sp<MediaSource> source) { 1150 CHECK(source != NULL); 1151 1152 mVideoTrack = source; 1153} 1154 1155status_t AwesomePlayer::initVideoDecoder(uint32_t flags) { 1156 mVideoSource = OMXCodec::Create( 1157 mClient.interface(), mVideoTrack->getFormat(), 1158 false, // createEncoder 1159 mVideoTrack, 1160 NULL, flags, USE_SURFACE_ALLOC ? mSurface : NULL); 1161 1162 if (mVideoSource != NULL) { 1163 int64_t durationUs; 1164 if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) { 1165 Mutex::Autolock autoLock(mMiscStateLock); 1166 if (mDurationUs < 0 || durationUs > mDurationUs) { 1167 mDurationUs = durationUs; 1168 } 1169 } 1170 1171 status_t err = mVideoSource->start(); 1172 1173 if (err != OK) { 1174 mVideoSource.clear(); 1175 return err; 1176 } 1177 } 1178 1179 return mVideoSource != NULL ? OK : UNKNOWN_ERROR; 1180} 1181 1182void AwesomePlayer::finishSeekIfNecessary(int64_t videoTimeUs) { 1183 if (!mSeeking) { 1184 return; 1185 } 1186 1187 if (mAudioPlayer != NULL) { 1188 LOGV("seeking audio to %lld us (%.2f secs).", videoTimeUs, videoTimeUs / 1E6); 1189 1190 // If we don't have a video time, seek audio to the originally 1191 // requested seek time instead. 1192 1193 mAudioPlayer->seekTo(videoTimeUs < 0 ? mSeekTimeUs : videoTimeUs); 1194 mAudioPlayer->resume(); 1195 mWatchForAudioSeekComplete = true; 1196 mWatchForAudioEOS = true; 1197 } else if (!mSeekNotificationSent) { 1198 // If we're playing video only, report seek complete now, 1199 // otherwise audio player will notify us later. 1200 notifyListener_l(MEDIA_SEEK_COMPLETE); 1201 } 1202 1203 mFlags |= FIRST_FRAME; 1204 mSeeking = false; 1205 mSeekNotificationSent = false; 1206 1207 if (mDecryptHandle != NULL) { 1208 mDrmManagerClient->setPlaybackStatus(mDecryptHandle, 1209 Playback::PAUSE, 0); 1210 mDrmManagerClient->setPlaybackStatus(mDecryptHandle, 1211 Playback::START, videoTimeUs / 1000); 1212 } 1213} 1214 1215void AwesomePlayer::onVideoEvent() { 1216 Mutex::Autolock autoLock(mLock); 1217 if (!mVideoEventPending) { 1218 // The event has been cancelled in reset_l() but had already 1219 // been scheduled for execution at that time. 1220 return; 1221 } 1222 mVideoEventPending = false; 1223 1224 if (mSeeking) { 1225 if (mVideoBuffer) { 1226 mVideoBuffer->release(); 1227 mVideoBuffer = NULL; 1228 } 1229 1230 if (mCachedSource != NULL && mAudioSource != NULL) { 1231 // We're going to seek the video source first, followed by 1232 // the audio source. 1233 // In order to avoid jumps in the DataSource offset caused by 1234 // the audio codec prefetching data from the old locations 1235 // while the video codec is already reading data from the new 1236 // locations, we'll "pause" the audio source, causing it to 1237 // stop reading input data until a subsequent seek. 1238 1239 if (mAudioPlayer != NULL) { 1240 mAudioPlayer->pause(); 1241 } 1242 mAudioSource->pause(); 1243 } 1244 } 1245 1246 if (!mVideoBuffer) { 1247 MediaSource::ReadOptions options; 1248 if (mSeeking) { 1249 LOGV("seeking to %lld us (%.2f secs)", mSeekTimeUs, mSeekTimeUs / 1E6); 1250 1251 options.setSeekTo( 1252 mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST_SYNC); 1253 } 1254 for (;;) { 1255 status_t err = mVideoSource->read(&mVideoBuffer, &options); 1256 options.clearSeekTo(); 1257 1258 if (err != OK) { 1259 CHECK(mVideoBuffer == NULL); 1260 1261 if (err == INFO_FORMAT_CHANGED) { 1262 LOGV("VideoSource signalled format change."); 1263 1264 notifyVideoSize_l(); 1265 1266 if (mVideoRenderer != NULL) { 1267 mVideoRendererIsPreview = false; 1268 initRenderer_l(); 1269 } 1270 continue; 1271 } 1272 1273 // So video playback is complete, but we may still have 1274 // a seek request pending that needs to be applied 1275 // to the audio track. 1276 if (mSeeking) { 1277 LOGV("video stream ended while seeking!"); 1278 } 1279 finishSeekIfNecessary(-1); 1280 1281 mFlags |= VIDEO_AT_EOS; 1282 postStreamDoneEvent_l(err); 1283 return; 1284 } 1285 1286 if (mVideoBuffer->range_length() == 0) { 1287 // Some decoders, notably the PV AVC software decoder 1288 // return spurious empty buffers that we just want to ignore. 1289 1290 mVideoBuffer->release(); 1291 mVideoBuffer = NULL; 1292 continue; 1293 } 1294 1295 break; 1296 } 1297 } 1298 1299 int64_t timeUs; 1300 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs)); 1301 1302 { 1303 Mutex::Autolock autoLock(mMiscStateLock); 1304 mVideoTimeUs = timeUs; 1305 } 1306 1307 bool wasSeeking = mSeeking; 1308 finishSeekIfNecessary(timeUs); 1309 1310 TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource; 1311 1312 if (mFlags & FIRST_FRAME) { 1313 mFlags &= ~FIRST_FRAME; 1314 mSinceLastDropped = 0; 1315 mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs; 1316 } 1317 1318 int64_t realTimeUs, mediaTimeUs; 1319 if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL 1320 && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) { 1321 mTimeSourceDeltaUs = realTimeUs - mediaTimeUs; 1322 } 1323 1324 int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs; 1325 1326 int64_t latenessUs = nowUs - timeUs; 1327 1328 if (wasSeeking) { 1329 // Let's display the first frame after seeking right away. 1330 latenessUs = 0; 1331 } 1332 1333 if (mRTPSession != NULL) { 1334 // We'll completely ignore timestamps for gtalk videochat 1335 // and we'll play incoming video as fast as we get it. 1336 latenessUs = 0; 1337 } 1338 1339 if (latenessUs > 40000) { 1340 // We're more than 40ms late. 1341 LOGV("we're late by %lld us (%.2f secs)", latenessUs, latenessUs / 1E6); 1342 if ( mSinceLastDropped > FRAME_DROP_FREQ) 1343 { 1344 LOGV("we're late by %lld us (%.2f secs) dropping one after %d frames", latenessUs, latenessUs / 1E6, mSinceLastDropped); 1345 mSinceLastDropped = 0; 1346 mVideoBuffer->release(); 1347 mVideoBuffer = NULL; 1348 1349 postVideoEvent_l(); 1350 return; 1351 } 1352 } 1353 1354 if (latenessUs < -10000) { 1355 // We're more than 10ms early. 1356 1357 postVideoEvent_l(10000); 1358 return; 1359 } 1360 1361 if (mVideoRendererIsPreview || mVideoRenderer == NULL) { 1362 mVideoRendererIsPreview = false; 1363 1364 initRenderer_l(); 1365 } 1366 1367 if (mVideoRenderer != NULL) { 1368 mSinceLastDropped++; 1369 mVideoRenderer->render(mVideoBuffer); 1370 } 1371 1372 mVideoBuffer->release(); 1373 mVideoBuffer = NULL; 1374 1375 postVideoEvent_l(); 1376} 1377 1378void AwesomePlayer::postVideoEvent_l(int64_t delayUs) { 1379 if (mVideoEventPending) { 1380 return; 1381 } 1382 1383 mVideoEventPending = true; 1384 mQueue.postEventWithDelay(mVideoEvent, delayUs < 0 ? 10000 : delayUs); 1385} 1386 1387void AwesomePlayer::postStreamDoneEvent_l(status_t status) { 1388 if (mStreamDoneEventPending) { 1389 return; 1390 } 1391 mStreamDoneEventPending = true; 1392 1393 mStreamDoneStatus = status; 1394 mQueue.postEvent(mStreamDoneEvent); 1395} 1396 1397void AwesomePlayer::postBufferingEvent_l() { 1398 if (mBufferingEventPending) { 1399 return; 1400 } 1401 mBufferingEventPending = true; 1402 mQueue.postEventWithDelay(mBufferingEvent, 1000000ll); 1403} 1404 1405void AwesomePlayer::postVideoLagEvent_l() { 1406 if (mVideoLagEventPending) { 1407 return; 1408 } 1409 mVideoLagEventPending = true; 1410 mQueue.postEventWithDelay(mVideoLagEvent, 1000000ll); 1411} 1412 1413void AwesomePlayer::postCheckAudioStatusEvent_l() { 1414 if (mAudioStatusEventPending) { 1415 return; 1416 } 1417 mAudioStatusEventPending = true; 1418 mQueue.postEvent(mCheckAudioStatusEvent); 1419} 1420 1421void AwesomePlayer::onCheckAudioStatus() { 1422 Mutex::Autolock autoLock(mLock); 1423 if (!mAudioStatusEventPending) { 1424 // Event was dispatched and while we were blocking on the mutex, 1425 // has already been cancelled. 1426 return; 1427 } 1428 1429 mAudioStatusEventPending = false; 1430 1431 if (mWatchForAudioSeekComplete && !mAudioPlayer->isSeeking()) { 1432 mWatchForAudioSeekComplete = false; 1433 1434 if (!mSeekNotificationSent) { 1435 notifyListener_l(MEDIA_SEEK_COMPLETE); 1436 mSeekNotificationSent = true; 1437 } 1438 1439 mSeeking = false; 1440 } 1441 1442 status_t finalStatus; 1443 if (mWatchForAudioEOS && mAudioPlayer->reachedEOS(&finalStatus)) { 1444 mWatchForAudioEOS = false; 1445 mFlags |= AUDIO_AT_EOS; 1446 mFlags |= FIRST_FRAME; 1447 postStreamDoneEvent_l(finalStatus); 1448 } 1449} 1450 1451status_t AwesomePlayer::prepare() { 1452 Mutex::Autolock autoLock(mLock); 1453 return prepare_l(); 1454} 1455 1456status_t AwesomePlayer::prepare_l() { 1457 if (mFlags & PREPARED) { 1458 return OK; 1459 } 1460 1461 if (mFlags & PREPARING) { 1462 return UNKNOWN_ERROR; 1463 } 1464 1465 mIsAsyncPrepare = false; 1466 status_t err = prepareAsync_l(); 1467 1468 if (err != OK) { 1469 return err; 1470 } 1471 1472 while (mFlags & PREPARING) { 1473 mPreparedCondition.wait(mLock); 1474 } 1475 1476 return mPrepareResult; 1477} 1478 1479status_t AwesomePlayer::prepareAsync() { 1480 Mutex::Autolock autoLock(mLock); 1481 1482 if (mFlags & PREPARING) { 1483 return UNKNOWN_ERROR; // async prepare already pending 1484 } 1485 1486 mIsAsyncPrepare = true; 1487 return prepareAsync_l(); 1488} 1489 1490status_t AwesomePlayer::prepareAsync_l() { 1491 if (mFlags & PREPARING) { 1492 return UNKNOWN_ERROR; // async prepare already pending 1493 } 1494 1495 if (!mQueueStarted) { 1496 mQueue.start(); 1497 mQueueStarted = true; 1498 } 1499 1500 mFlags |= PREPARING; 1501 mAsyncPrepareEvent = new AwesomeEvent( 1502 this, &AwesomePlayer::onPrepareAsyncEvent); 1503 1504 mQueue.postEvent(mAsyncPrepareEvent); 1505 1506 return OK; 1507} 1508 1509status_t AwesomePlayer::finishSetDataSource_l() { 1510 sp<DataSource> dataSource; 1511 1512 if (!strncasecmp("http://", mUri.string(), 7)) { 1513 mConnectingDataSource = new NuHTTPDataSource; 1514 1515 mLock.unlock(); 1516 status_t err = mConnectingDataSource->connect(mUri, &mUriHeaders); 1517 mLock.lock(); 1518 1519 if (err != OK) { 1520 mConnectingDataSource.clear(); 1521 1522 LOGI("mConnectingDataSource->connect() returned %d", err); 1523 return err; 1524 } 1525 1526#if 0 1527 mCachedSource = new NuCachedSource2( 1528 new ThrottledSource( 1529 mConnectingDataSource, 50 * 1024 /* bytes/sec */)); 1530#else 1531 mCachedSource = new NuCachedSource2(mConnectingDataSource); 1532#endif 1533 mConnectingDataSource.clear(); 1534 1535 dataSource = mCachedSource; 1536 1537 // We're going to prefill the cache before trying to instantiate 1538 // the extractor below, as the latter is an operation that otherwise 1539 // could block on the datasource for a significant amount of time. 1540 // During that time we'd be unable to abort the preparation phase 1541 // without this prefill. 1542 1543 mLock.unlock(); 1544 1545 for (;;) { 1546 status_t finalStatus; 1547 size_t cachedDataRemaining = 1548 mCachedSource->approxDataRemaining(&finalStatus); 1549 1550 if (finalStatus != OK || cachedDataRemaining >= kHighWaterMarkBytes 1551 || (mFlags & PREPARE_CANCELLED)) { 1552 break; 1553 } 1554 1555 usleep(200000); 1556 } 1557 1558 mLock.lock(); 1559 1560 if (mFlags & PREPARE_CANCELLED) { 1561 LOGI("Prepare cancelled while waiting for initial cache fill."); 1562 return UNKNOWN_ERROR; 1563 } 1564 } else if (!strncasecmp(mUri.string(), "httplive://", 11)) { 1565 String8 uri("http://"); 1566 uri.append(mUri.string() + 11); 1567 1568 if (mLooper == NULL) { 1569 mLooper = new ALooper; 1570 mLooper->setName("httplive"); 1571 mLooper->start(); 1572 } 1573 1574 mLiveSession = new LiveSession; 1575 mLooper->registerHandler(mLiveSession); 1576 1577 mLiveSession->connect(uri.string()); 1578 dataSource = mLiveSession->getDataSource(); 1579 1580 sp<MediaExtractor> extractor = 1581 MediaExtractor::Create(dataSource, MEDIA_MIMETYPE_CONTAINER_MPEG2TS); 1582 1583 static_cast<MPEG2TSExtractor *>(extractor.get()) 1584 ->setLiveSession(mLiveSession); 1585 1586 return setDataSource_l(extractor); 1587 } else if (!strncmp("rtsp://gtalk/", mUri.string(), 13)) { 1588 if (mLooper == NULL) { 1589 mLooper = new ALooper; 1590 mLooper->setName("gtalk rtp"); 1591 mLooper->start( 1592 false /* runOnCallingThread */, 1593 false /* canCallJava */, 1594 PRIORITY_HIGHEST); 1595 } 1596 1597 const char *startOfCodecString = &mUri.string()[13]; 1598 const char *startOfSlash1 = strchr(startOfCodecString, '/'); 1599 if (startOfSlash1 == NULL) { 1600 return BAD_VALUE; 1601 } 1602 const char *startOfWidthString = &startOfSlash1[1]; 1603 const char *startOfSlash2 = strchr(startOfWidthString, '/'); 1604 if (startOfSlash2 == NULL) { 1605 return BAD_VALUE; 1606 } 1607 const char *startOfHeightString = &startOfSlash2[1]; 1608 1609 String8 codecString(startOfCodecString, startOfSlash1 - startOfCodecString); 1610 String8 widthString(startOfWidthString, startOfSlash2 - startOfWidthString); 1611 String8 heightString(startOfHeightString); 1612 1613#if 0 1614 mRTPPusher = new UDPPusher("/data/misc/rtpout.bin", 5434); 1615 mLooper->registerHandler(mRTPPusher); 1616 1617 mRTCPPusher = new UDPPusher("/data/misc/rtcpout.bin", 5435); 1618 mLooper->registerHandler(mRTCPPusher); 1619#endif 1620 1621 mRTPSession = new ARTPSession; 1622 mLooper->registerHandler(mRTPSession); 1623 1624#if 0 1625 // My AMR SDP 1626 static const char *raw = 1627 "v=0\r\n" 1628 "o=- 64 233572944 IN IP4 127.0.0.0\r\n" 1629 "s=QuickTime\r\n" 1630 "t=0 0\r\n" 1631 "a=range:npt=0-315\r\n" 1632 "a=isma-compliance:2,2.0,2\r\n" 1633 "m=audio 5434 RTP/AVP 97\r\n" 1634 "c=IN IP4 127.0.0.1\r\n" 1635 "b=AS:30\r\n" 1636 "a=rtpmap:97 AMR/8000/1\r\n" 1637 "a=fmtp:97 octet-align\r\n"; 1638#elif 1 1639 String8 sdp; 1640 sdp.appendFormat( 1641 "v=0\r\n" 1642 "o=- 64 233572944 IN IP4 127.0.0.0\r\n" 1643 "s=QuickTime\r\n" 1644 "t=0 0\r\n" 1645 "a=range:npt=0-315\r\n" 1646 "a=isma-compliance:2,2.0,2\r\n" 1647 "m=video 5434 RTP/AVP 97\r\n" 1648 "c=IN IP4 127.0.0.1\r\n" 1649 "b=AS:30\r\n" 1650 "a=rtpmap:97 %s/90000\r\n" 1651 "a=cliprect:0,0,%s,%s\r\n" 1652 "a=framesize:97 %s-%s\r\n", 1653 1654 codecString.string(), 1655 heightString.string(), widthString.string(), 1656 widthString.string(), heightString.string() 1657 ); 1658 const char *raw = sdp.string(); 1659 1660#endif 1661 1662 sp<ASessionDescription> desc = new ASessionDescription; 1663 CHECK(desc->setTo(raw, strlen(raw))); 1664 1665 CHECK_EQ(mRTPSession->setup(desc), (status_t)OK); 1666 1667 if (mRTPPusher != NULL) { 1668 mRTPPusher->start(); 1669 } 1670 1671 if (mRTCPPusher != NULL) { 1672 mRTCPPusher->start(); 1673 } 1674 1675 CHECK_EQ(mRTPSession->countTracks(), 1u); 1676 sp<MediaSource> source = mRTPSession->trackAt(0); 1677 1678#if 0 1679 bool eos; 1680 while (((APacketSource *)source.get()) 1681 ->getQueuedDuration(&eos) < 5000000ll && !eos) { 1682 usleep(100000ll); 1683 } 1684#endif 1685 1686 const char *mime; 1687 CHECK(source->getFormat()->findCString(kKeyMIMEType, &mime)); 1688 1689 if (!strncasecmp("video/", mime, 6)) { 1690 setVideoSource(source); 1691 } else { 1692 CHECK(!strncasecmp("audio/", mime, 6)); 1693 setAudioSource(source); 1694 } 1695 1696 mExtractorFlags = MediaExtractor::CAN_PAUSE; 1697 1698 return OK; 1699 } else if (!strncasecmp("rtsp://", mUri.string(), 7)) { 1700 if (mLooper == NULL) { 1701 mLooper = new ALooper; 1702 mLooper->setName("rtsp"); 1703 mLooper->start(); 1704 } 1705 mRTSPController = new ARTSPController(mLooper); 1706 status_t err = mRTSPController->connect(mUri.string()); 1707 1708 LOGI("ARTSPController::connect returned %d", err); 1709 1710 if (err != OK) { 1711 mRTSPController.clear(); 1712 return err; 1713 } 1714 1715 sp<MediaExtractor> extractor = mRTSPController.get(); 1716 return setDataSource_l(extractor); 1717 } else { 1718 dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders); 1719 } 1720 1721 if (dataSource == NULL) { 1722 return UNKNOWN_ERROR; 1723 } 1724 1725 sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource); 1726 1727 if (extractor == NULL) { 1728 return UNKNOWN_ERROR; 1729 } 1730 1731 dataSource->getDrmInfo(&mDecryptHandle, &mDrmManagerClient); 1732 if (mDecryptHandle != NULL) { 1733 if (RightsStatus::RIGHTS_VALID == mDecryptHandle->status) { 1734 if (DecryptApiType::WV_BASED == mDecryptHandle->decryptApiType) { 1735 LOGD("Setting mCachedSource to NULL for WVM\n"); 1736 mCachedSource.clear(); 1737 } 1738 } else { 1739 notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_NO_LICENSE); 1740 } 1741 } 1742 1743 return setDataSource_l(extractor); 1744} 1745 1746void AwesomePlayer::abortPrepare(status_t err) { 1747 CHECK(err != OK); 1748 1749 if (mIsAsyncPrepare) { 1750 notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err); 1751 } 1752 1753 mPrepareResult = err; 1754 mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED); 1755 mAsyncPrepareEvent = NULL; 1756 mPreparedCondition.broadcast(); 1757} 1758 1759// static 1760bool AwesomePlayer::ContinuePreparation(void *cookie) { 1761 AwesomePlayer *me = static_cast<AwesomePlayer *>(cookie); 1762 1763 return (me->mFlags & PREPARE_CANCELLED) == 0; 1764} 1765 1766void AwesomePlayer::onPrepareAsyncEvent() { 1767 Mutex::Autolock autoLock(mLock); 1768 1769 if (mFlags & PREPARE_CANCELLED) { 1770 LOGI("prepare was cancelled before doing anything"); 1771 abortPrepare(UNKNOWN_ERROR); 1772 return; 1773 } 1774 1775 if (mUri.size() > 0) { 1776 status_t err = finishSetDataSource_l(); 1777 1778 if (err != OK) { 1779 abortPrepare(err); 1780 return; 1781 } 1782 } 1783 1784 if (mVideoTrack != NULL && mVideoSource == NULL) { 1785 status_t err = initVideoDecoder(); 1786 1787 if (err != OK) { 1788 abortPrepare(err); 1789 return; 1790 } 1791 } 1792 1793 if (mAudioTrack != NULL && mAudioSource == NULL) { 1794 status_t err = initAudioDecoder(); 1795 1796 if (err != OK) { 1797 abortPrepare(err); 1798 return; 1799 } 1800 } 1801 1802 mFlags |= PREPARING_CONNECTED; 1803 1804 if (mCachedSource != NULL || mRTSPController != NULL) { 1805 postBufferingEvent_l(); 1806 } else { 1807 finishAsyncPrepare_l(); 1808 } 1809} 1810 1811void AwesomePlayer::finishAsyncPrepare_l() { 1812 if (mIsAsyncPrepare) { 1813 if (mVideoSource == NULL) { 1814 notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0); 1815 } else { 1816 notifyVideoSize_l(); 1817 } 1818 1819 notifyListener_l(MEDIA_PREPARED); 1820 } 1821 1822 mPrepareResult = OK; 1823 mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED); 1824 mFlags |= PREPARED; 1825 mAsyncPrepareEvent = NULL; 1826 mPreparedCondition.broadcast(); 1827} 1828 1829uint32_t AwesomePlayer::flags() const { 1830 return mExtractorFlags; 1831} 1832 1833void AwesomePlayer::postAudioEOS() { 1834 postCheckAudioStatusEvent_l(); 1835} 1836 1837void AwesomePlayer::postAudioSeekComplete() { 1838 postCheckAudioStatusEvent_l(); 1839} 1840 1841} // namespace android 1842