AwesomePlayer.cpp revision ac05c317cd818701535c5d72ce90da98c4bae75b
1/* 2 * Copyright (C) 2009 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "AwesomePlayer" 19#include <utils/Log.h> 20 21#include <dlfcn.h> 22 23#include "include/ARTSPController.h" 24#include "include/AwesomePlayer.h" 25#include "include/SoftwareRenderer.h" 26#include "include/NuCachedSource2.h" 27#include "include/ThrottledSource.h" 28#include "include/MPEG2TSExtractor.h" 29 30#include "ARTPSession.h" 31#include "APacketSource.h" 32#include "ASessionDescription.h" 33#include "UDPPusher.h" 34 35#include <binder/IPCThreadState.h> 36#include <media/stagefright/foundation/hexdump.h> 37#include <media/stagefright/foundation/ADebug.h> 38#include <media/stagefright/AudioPlayer.h> 39#include <media/stagefright/DataSource.h> 40#include <media/stagefright/FileSource.h> 41#include <media/stagefright/MediaBuffer.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/MediaExtractor.h> 44#include <media/stagefright/MediaSource.h> 45#include <media/stagefright/MetaData.h> 46#include <media/stagefright/OMXCodec.h> 47 48#include <surfaceflinger/Surface.h> 49 50#include <media/stagefright/foundation/ALooper.h> 51#include <media/stagefright/foundation/AMessage.h> 52#include "include/LiveSession.h" 53 54#define USE_SURFACE_ALLOC 1 55#define FRAME_DROP_FREQ 7 56 57namespace android { 58 59static int64_t kLowWaterMarkUs = 2000000ll; // 2secs 60static int64_t kHighWaterMarkUs = 10000000ll; // 10secs 61static const size_t kLowWaterMarkBytes = 40000; 62static const size_t kHighWaterMarkBytes = 200000; 63 64struct AwesomeEvent : public TimedEventQueue::Event { 65 AwesomeEvent( 66 AwesomePlayer *player, 67 void (AwesomePlayer::*method)()) 68 : mPlayer(player), 69 mMethod(method) { 70 } 71 72protected: 73 virtual ~AwesomeEvent() {} 74 75 virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) { 76 (mPlayer->*mMethod)(); 77 } 78 79private: 80 AwesomePlayer *mPlayer; 81 void (AwesomePlayer::*mMethod)(); 82 83 AwesomeEvent(const AwesomeEvent &); 84 AwesomeEvent &operator=(const AwesomeEvent &); 85}; 86 87struct AwesomeLocalRenderer : public AwesomeRenderer { 88 AwesomeLocalRenderer( 89 const sp<Surface> &surface, const sp<MetaData> &meta) 90 : mTarget(new SoftwareRenderer(surface, meta)) { 91 } 92 93 virtual void render(MediaBuffer *buffer) { 94 render((const uint8_t *)buffer->data() + buffer->range_offset(), 95 buffer->range_length()); 96 } 97 98 void render(const void *data, size_t size) { 99 mTarget->render(data, size, NULL); 100 } 101 102protected: 103 virtual ~AwesomeLocalRenderer() { 104 delete mTarget; 105 mTarget = NULL; 106 } 107 108private: 109 SoftwareRenderer *mTarget; 110 111 AwesomeLocalRenderer(const AwesomeLocalRenderer &); 112 AwesomeLocalRenderer &operator=(const AwesomeLocalRenderer &);; 113}; 114 115struct AwesomeNativeWindowRenderer : public AwesomeRenderer { 116 AwesomeNativeWindowRenderer( 117 const sp<ANativeWindow> &nativeWindow, 118 int32_t rotationDegrees) 119 : mNativeWindow(nativeWindow) { 120 applyRotation(rotationDegrees); 121 } 122 123 virtual void render(MediaBuffer *buffer) { 124 status_t err = mNativeWindow->queueBuffer( 125 mNativeWindow.get(), buffer->graphicBuffer().get()); 126 if (err != 0) { 127 LOGE("queueBuffer failed with error %s (%d)", strerror(-err), 128 -err); 129 return; 130 } 131 132 sp<MetaData> metaData = buffer->meta_data(); 133 metaData->setInt32(kKeyRendered, 1); 134 } 135 136protected: 137 virtual ~AwesomeNativeWindowRenderer() {} 138 139private: 140 sp<ANativeWindow> mNativeWindow; 141 142 void applyRotation(int32_t rotationDegrees) { 143 uint32_t transform; 144 switch (rotationDegrees) { 145 case 0: transform = 0; break; 146 case 90: transform = HAL_TRANSFORM_ROT_90; break; 147 case 180: transform = HAL_TRANSFORM_ROT_180; break; 148 case 270: transform = HAL_TRANSFORM_ROT_270; break; 149 default: transform = 0; break; 150 } 151 152 if (transform) { 153 CHECK_EQ(0, native_window_set_buffers_transform( 154 mNativeWindow.get(), transform)); 155 } 156 } 157 158 AwesomeNativeWindowRenderer(const AwesomeNativeWindowRenderer &); 159 AwesomeNativeWindowRenderer &operator=( 160 const AwesomeNativeWindowRenderer &); 161}; 162 163//////////////////////////////////////////////////////////////////////////////// 164 165AwesomePlayer::AwesomePlayer() 166 : mQueueStarted(false), 167 mTimeSource(NULL), 168 mVideoRendererIsPreview(false), 169 mAudioPlayer(NULL), 170 mDisplayWidth(0), 171 mDisplayHeight(0), 172 mFlags(0), 173 mExtractorFlags(0), 174 mVideoBuffer(NULL), 175 mDecryptHandle(NULL) { 176 CHECK_EQ(mClient.connect(), (status_t)OK); 177 178 DataSource::RegisterDefaultSniffers(); 179 180 mVideoEvent = new AwesomeEvent(this, &AwesomePlayer::onVideoEvent); 181 mVideoEventPending = false; 182 mStreamDoneEvent = new AwesomeEvent(this, &AwesomePlayer::onStreamDone); 183 mStreamDoneEventPending = false; 184 mBufferingEvent = new AwesomeEvent(this, &AwesomePlayer::onBufferingUpdate); 185 mBufferingEventPending = false; 186 mVideoLagEvent = new AwesomeEvent(this, &AwesomePlayer::onVideoLagUpdate); 187 mVideoEventPending = false; 188 189 mCheckAudioStatusEvent = new AwesomeEvent( 190 this, &AwesomePlayer::onCheckAudioStatus); 191 192 mAudioStatusEventPending = false; 193 194 reset(); 195} 196 197AwesomePlayer::~AwesomePlayer() { 198 if (mQueueStarted) { 199 mQueue.stop(); 200 } 201 202 reset(); 203 204 mClient.disconnect(); 205} 206 207void AwesomePlayer::cancelPlayerEvents(bool keepBufferingGoing) { 208 mQueue.cancelEvent(mVideoEvent->eventID()); 209 mVideoEventPending = false; 210 mQueue.cancelEvent(mStreamDoneEvent->eventID()); 211 mStreamDoneEventPending = false; 212 mQueue.cancelEvent(mCheckAudioStatusEvent->eventID()); 213 mAudioStatusEventPending = false; 214 mQueue.cancelEvent(mVideoLagEvent->eventID()); 215 mVideoLagEventPending = false; 216 217 if (!keepBufferingGoing) { 218 mQueue.cancelEvent(mBufferingEvent->eventID()); 219 mBufferingEventPending = false; 220 } 221} 222 223void AwesomePlayer::setListener(const wp<MediaPlayerBase> &listener) { 224 Mutex::Autolock autoLock(mLock); 225 mListener = listener; 226} 227 228status_t AwesomePlayer::setDataSource( 229 const char *uri, const KeyedVector<String8, String8> *headers) { 230 Mutex::Autolock autoLock(mLock); 231 return setDataSource_l(uri, headers); 232} 233 234status_t AwesomePlayer::setDataSource_l( 235 const char *uri, const KeyedVector<String8, String8> *headers) { 236 reset_l(); 237 238 mUri = uri; 239 240 if (!strncmp("http://", uri, 7)) { 241 // Hack to support http live. 242 243 size_t len = strlen(uri); 244 if (!strcasecmp(&uri[len - 5], ".m3u8") 245 || strstr(&uri[7], "m3u8") != NULL) { 246 mUri = "httplive://"; 247 mUri.append(&uri[7]); 248 } 249 } 250 251 if (headers) { 252 mUriHeaders = *headers; 253 } 254 255 // The actual work will be done during preparation in the call to 256 // ::finishSetDataSource_l to avoid blocking the calling thread in 257 // setDataSource for any significant time. 258 259 return OK; 260} 261 262status_t AwesomePlayer::setDataSource( 263 int fd, int64_t offset, int64_t length) { 264 Mutex::Autolock autoLock(mLock); 265 266 reset_l(); 267 268 sp<DataSource> dataSource = new FileSource(fd, offset, length); 269 270 status_t err = dataSource->initCheck(); 271 272 if (err != OK) { 273 return err; 274 } 275 276 mFileSource = dataSource; 277 278 return setDataSource_l(dataSource); 279} 280 281status_t AwesomePlayer::setDataSource(const sp<IStreamSource> &source) { 282 return INVALID_OPERATION; 283} 284 285status_t AwesomePlayer::setDataSource_l( 286 const sp<DataSource> &dataSource) { 287 sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource); 288 289 if (extractor == NULL) { 290 return UNKNOWN_ERROR; 291 } 292 293 dataSource->getDrmInfo(&mDecryptHandle, &mDrmManagerClient); 294 if (mDecryptHandle != NULL 295 && RightsStatus::RIGHTS_VALID != mDecryptHandle->status) { 296 notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_NO_LICENSE); 297 } 298 299 return setDataSource_l(extractor); 300} 301 302status_t AwesomePlayer::setDataSource_l(const sp<MediaExtractor> &extractor) { 303 // Attempt to approximate overall stream bitrate by summing all 304 // tracks' individual bitrates, if not all of them advertise bitrate, 305 // we have to fail. 306 307 int64_t totalBitRate = 0; 308 309 for (size_t i = 0; i < extractor->countTracks(); ++i) { 310 sp<MetaData> meta = extractor->getTrackMetaData(i); 311 312 int32_t bitrate; 313 if (!meta->findInt32(kKeyBitRate, &bitrate)) { 314 totalBitRate = -1; 315 break; 316 } 317 318 totalBitRate += bitrate; 319 } 320 321 mBitrate = totalBitRate; 322 323 LOGV("mBitrate = %lld bits/sec", mBitrate); 324 325 bool haveAudio = false; 326 bool haveVideo = false; 327 for (size_t i = 0; i < extractor->countTracks(); ++i) { 328 sp<MetaData> meta = extractor->getTrackMetaData(i); 329 330 const char *mime; 331 CHECK(meta->findCString(kKeyMIMEType, &mime)); 332 333 if (!haveVideo && !strncasecmp(mime, "video/", 6)) { 334 setVideoSource(extractor->getTrack(i)); 335 haveVideo = true; 336 337 // Set the presentation/display size 338 int32_t displayWidth, displayHeight; 339 bool success = meta->findInt32(kKeyDisplayWidth, &displayWidth); 340 if (success) { 341 success = meta->findInt32(kKeyDisplayHeight, &displayHeight); 342 } 343 if (success) { 344 mDisplayWidth = displayWidth; 345 mDisplayHeight = displayHeight; 346 } 347 348 } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) { 349 setAudioSource(extractor->getTrack(i)); 350 haveAudio = true; 351 352 if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) { 353 // Only do this for vorbis audio, none of the other audio 354 // formats even support this ringtone specific hack and 355 // retrieving the metadata on some extractors may turn out 356 // to be very expensive. 357 sp<MetaData> fileMeta = extractor->getMetaData(); 358 int32_t loop; 359 if (fileMeta != NULL 360 && fileMeta->findInt32(kKeyAutoLoop, &loop) && loop != 0) { 361 mFlags |= AUTO_LOOPING; 362 } 363 } 364 } 365 366 if (haveAudio && haveVideo) { 367 break; 368 } 369 } 370 371 if (!haveAudio && !haveVideo) { 372 return UNKNOWN_ERROR; 373 } 374 375 mExtractorFlags = extractor->flags(); 376 377 return OK; 378} 379 380void AwesomePlayer::reset() { 381 LOGI("reset"); 382 383 Mutex::Autolock autoLock(mLock); 384 reset_l(); 385} 386 387void AwesomePlayer::reset_l() { 388 LOGI("reset_l"); 389 mDisplayWidth = 0; 390 mDisplayHeight = 0; 391 392 if (mDecryptHandle != NULL) { 393 mDrmManagerClient->setPlaybackStatus(mDecryptHandle, 394 Playback::STOP, 0); 395 mDrmManagerClient->closeDecryptSession(mDecryptHandle); 396 mDecryptHandle = NULL; 397 mDrmManagerClient = NULL; 398 } 399 400 if (mFlags & PREPARING) { 401 mFlags |= PREPARE_CANCELLED; 402 if (mConnectingDataSource != NULL) { 403 LOGI("interrupting the connection process"); 404 mConnectingDataSource->disconnect(); 405 } 406 407 if (mFlags & PREPARING_CONNECTED) { 408 // We are basically done preparing, we're just buffering 409 // enough data to start playback, we can safely interrupt that. 410 finishAsyncPrepare_l(); 411 } 412 } 413 414 if (mFlags & PREPARING) { 415 LOGI("waiting until preparation is completes."); 416 } 417 418 while (mFlags & PREPARING) { 419 mPreparedCondition.wait(mLock); 420 } 421 422 cancelPlayerEvents(); 423 424 mCachedSource.clear(); 425 mAudioTrack.clear(); 426 mVideoTrack.clear(); 427 428 // Shutdown audio first, so that the respone to the reset request 429 // appears to happen instantaneously as far as the user is concerned 430 // If we did this later, audio would continue playing while we 431 // shutdown the video-related resources and the player appear to 432 // not be as responsive to a reset request. 433 if (mAudioPlayer == NULL && mAudioSource != NULL) { 434 // If we had an audio player, it would have effectively 435 // taken possession of the audio source and stopped it when 436 // _it_ is stopped. Otherwise this is still our responsibility. 437 mAudioSource->stop(); 438 } 439 mAudioSource.clear(); 440 441 LOGI("audio source cleared"); 442 443 mTimeSource = NULL; 444 445 delete mAudioPlayer; 446 mAudioPlayer = NULL; 447 448 mVideoRenderer.clear(); 449 450 if (mVideoBuffer) { 451 mVideoBuffer->release(); 452 mVideoBuffer = NULL; 453 } 454 455 if (mRTSPController != NULL) { 456 mRTSPController->disconnect(); 457 mRTSPController.clear(); 458 } 459 460 if (mLiveSession != NULL) { 461 mLiveSession->disconnect(); 462 mLiveSession.clear(); 463 } 464 465 mRTPPusher.clear(); 466 mRTCPPusher.clear(); 467 mRTPSession.clear(); 468 469 if (mVideoSource != NULL) { 470 mVideoSource->stop(); 471 472 // The following hack is necessary to ensure that the OMX 473 // component is completely released by the time we may try 474 // to instantiate it again. 475 wp<MediaSource> tmp = mVideoSource; 476 mVideoSource.clear(); 477 while (tmp.promote() != NULL) { 478 usleep(1000); 479 } 480 IPCThreadState::self()->flushCommands(); 481 } 482 483 LOGI("video source cleared"); 484 485 mDurationUs = -1; 486 mFlags = 0; 487 mExtractorFlags = 0; 488 mTimeSourceDeltaUs = 0; 489 mVideoTimeUs = 0; 490 491 mSeeking = false; 492 mSeekNotificationSent = false; 493 mSeekTimeUs = 0; 494 495 mUri.setTo(""); 496 mUriHeaders.clear(); 497 498 mFileSource.clear(); 499 500 mBitrate = -1; 501 502 LOGI("reset_l completed"); 503} 504 505void AwesomePlayer::notifyListener_l(int msg, int ext1, int ext2) { 506 if (mListener != NULL) { 507 sp<MediaPlayerBase> listener = mListener.promote(); 508 509 if (listener != NULL) { 510 listener->sendEvent(msg, ext1, ext2); 511 } 512 } 513} 514 515bool AwesomePlayer::getBitrate(int64_t *bitrate) { 516 off64_t size; 517 if (mDurationUs >= 0 && mCachedSource != NULL 518 && mCachedSource->getSize(&size) == OK) { 519 *bitrate = size * 8000000ll / mDurationUs; // in bits/sec 520 return true; 521 } 522 523 if (mBitrate >= 0) { 524 *bitrate = mBitrate; 525 return true; 526 } 527 528 *bitrate = 0; 529 530 return false; 531} 532 533// Returns true iff cached duration is available/applicable. 534bool AwesomePlayer::getCachedDuration_l(int64_t *durationUs, bool *eos) { 535 int64_t bitrate; 536 537 if (mRTSPController != NULL) { 538 *durationUs = mRTSPController->getQueueDurationUs(eos); 539 return true; 540 } else if (mCachedSource != NULL && getBitrate(&bitrate)) { 541 status_t finalStatus; 542 size_t cachedDataRemaining = mCachedSource->approxDataRemaining(&finalStatus); 543 *durationUs = cachedDataRemaining * 8000000ll / bitrate; 544 *eos = (finalStatus != OK); 545 return true; 546 } 547 548 return false; 549} 550 551void AwesomePlayer::ensureCacheIsFetching_l() { 552 if (mCachedSource != NULL) { 553 mCachedSource->resumeFetchingIfNecessary(); 554 } 555} 556 557void AwesomePlayer::onVideoLagUpdate() { 558 Mutex::Autolock autoLock(mLock); 559 if (!mVideoLagEventPending) { 560 return; 561 } 562 mVideoLagEventPending = false; 563 564 int64_t audioTimeUs = mAudioPlayer->getMediaTimeUs(); 565 int64_t videoLateByUs = audioTimeUs - mVideoTimeUs; 566 567 if (videoLateByUs > 300000ll) { 568 LOGV("video late by %lld ms.", videoLateByUs / 1000ll); 569 570 notifyListener_l( 571 MEDIA_INFO, 572 MEDIA_INFO_VIDEO_TRACK_LAGGING, 573 videoLateByUs / 1000ll); 574 } 575 576 postVideoLagEvent_l(); 577} 578 579void AwesomePlayer::onBufferingUpdate() { 580 Mutex::Autolock autoLock(mLock); 581 if (!mBufferingEventPending) { 582 return; 583 } 584 mBufferingEventPending = false; 585 586 if (mCachedSource != NULL) { 587 status_t finalStatus; 588 size_t cachedDataRemaining = mCachedSource->approxDataRemaining(&finalStatus); 589 bool eos = (finalStatus != OK); 590 591 if (eos) { 592 if (finalStatus == ERROR_END_OF_STREAM) { 593 notifyListener_l(MEDIA_BUFFERING_UPDATE, 100); 594 } 595 if (mFlags & PREPARING) { 596 LOGV("cache has reached EOS, prepare is done."); 597 finishAsyncPrepare_l(); 598 } 599 } else { 600 int64_t bitrate; 601 if (getBitrate(&bitrate)) { 602 size_t cachedSize = mCachedSource->cachedSize(); 603 int64_t cachedDurationUs = cachedSize * 8000000ll / bitrate; 604 605 int percentage = 100.0 * (double)cachedDurationUs / mDurationUs; 606 if (percentage > 100) { 607 percentage = 100; 608 } 609 610 notifyListener_l(MEDIA_BUFFERING_UPDATE, percentage); 611 } else { 612 // We don't know the bitrate of the stream, use absolute size 613 // limits to maintain the cache. 614 615 if ((mFlags & PLAYING) && !eos 616 && (cachedDataRemaining < kLowWaterMarkBytes)) { 617 LOGI("cache is running low (< %d) , pausing.", 618 kLowWaterMarkBytes); 619 mFlags |= CACHE_UNDERRUN; 620 pause_l(); 621 ensureCacheIsFetching_l(); 622 notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START); 623 } else if (eos || cachedDataRemaining > kHighWaterMarkBytes) { 624 if (mFlags & CACHE_UNDERRUN) { 625 LOGI("cache has filled up (> %d), resuming.", 626 kHighWaterMarkBytes); 627 mFlags &= ~CACHE_UNDERRUN; 628 play_l(); 629 notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_END); 630 } else if (mFlags & PREPARING) { 631 LOGV("cache has filled up (> %d), prepare is done", 632 kHighWaterMarkBytes); 633 finishAsyncPrepare_l(); 634 } 635 } 636 } 637 } 638 } 639 640 int64_t cachedDurationUs; 641 bool eos; 642 if (getCachedDuration_l(&cachedDurationUs, &eos)) { 643 LOGV("cachedDurationUs = %.2f secs, eos=%d", 644 cachedDurationUs / 1E6, eos); 645 646 if ((mFlags & PLAYING) && !eos 647 && (cachedDurationUs < kLowWaterMarkUs)) { 648 LOGI("cache is running low (%.2f secs) , pausing.", 649 cachedDurationUs / 1E6); 650 mFlags |= CACHE_UNDERRUN; 651 pause_l(); 652 ensureCacheIsFetching_l(); 653 notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START); 654 } else if (eos || cachedDurationUs > kHighWaterMarkUs) { 655 if (mFlags & CACHE_UNDERRUN) { 656 LOGI("cache has filled up (%.2f secs), resuming.", 657 cachedDurationUs / 1E6); 658 mFlags &= ~CACHE_UNDERRUN; 659 play_l(); 660 notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_END); 661 } else if (mFlags & PREPARING) { 662 LOGV("cache has filled up (%.2f secs), prepare is done", 663 cachedDurationUs / 1E6); 664 finishAsyncPrepare_l(); 665 } 666 } 667 } 668 669 postBufferingEvent_l(); 670} 671 672void AwesomePlayer::partial_reset_l() { 673 // Only reset the video renderer and shut down the video decoder. 674 // Then instantiate a new video decoder and resume video playback. 675 676 mVideoRenderer.clear(); 677 678 if (mVideoBuffer) { 679 mVideoBuffer->release(); 680 mVideoBuffer = NULL; 681 } 682 683 { 684 mVideoSource->stop(); 685 686 // The following hack is necessary to ensure that the OMX 687 // component is completely released by the time we may try 688 // to instantiate it again. 689 wp<MediaSource> tmp = mVideoSource; 690 mVideoSource.clear(); 691 while (tmp.promote() != NULL) { 692 usleep(1000); 693 } 694 IPCThreadState::self()->flushCommands(); 695 } 696 697 CHECK_EQ((status_t)OK, 698 initVideoDecoder(OMXCodec::kIgnoreCodecSpecificData)); 699} 700 701void AwesomePlayer::onStreamDone() { 702 // Posted whenever any stream finishes playing. 703 704 Mutex::Autolock autoLock(mLock); 705 if (!mStreamDoneEventPending) { 706 return; 707 } 708 mStreamDoneEventPending = false; 709 710 if (mStreamDoneStatus == INFO_DISCONTINUITY) { 711 // This special status is returned because an http live stream's 712 // video stream switched to a different bandwidth at this point 713 // and future data may have been encoded using different parameters. 714 // This requires us to shutdown the video decoder and reinstantiate 715 // a fresh one. 716 717 LOGV("INFO_DISCONTINUITY"); 718 719 CHECK(mVideoSource != NULL); 720 721 partial_reset_l(); 722 postVideoEvent_l(); 723 return; 724 } else if (mStreamDoneStatus != ERROR_END_OF_STREAM) { 725 LOGV("MEDIA_ERROR %d", mStreamDoneStatus); 726 727 notifyListener_l( 728 MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, mStreamDoneStatus); 729 730 pause_l(true /* at eos */); 731 732 mFlags |= AT_EOS; 733 return; 734 } 735 736 const bool allDone = 737 (mVideoSource == NULL || (mFlags & VIDEO_AT_EOS)) 738 && (mAudioSource == NULL || (mFlags & AUDIO_AT_EOS)); 739 740 if (!allDone) { 741 return; 742 } 743 744 if (mFlags & (LOOPING | AUTO_LOOPING)) { 745 seekTo_l(0); 746 747 if (mVideoSource != NULL) { 748 postVideoEvent_l(); 749 } 750 } else { 751 LOGV("MEDIA_PLAYBACK_COMPLETE"); 752 notifyListener_l(MEDIA_PLAYBACK_COMPLETE); 753 754 pause_l(true /* at eos */); 755 756 mFlags |= AT_EOS; 757 } 758} 759 760status_t AwesomePlayer::play() { 761 Mutex::Autolock autoLock(mLock); 762 763 mFlags &= ~CACHE_UNDERRUN; 764 765 return play_l(); 766} 767 768status_t AwesomePlayer::play_l() { 769 if (mFlags & PLAYING) { 770 return OK; 771 } 772 773 if (!(mFlags & PREPARED)) { 774 status_t err = prepare_l(); 775 776 if (err != OK) { 777 return err; 778 } 779 } 780 781 mFlags |= PLAYING; 782 mFlags |= FIRST_FRAME; 783 784 bool deferredAudioSeek = false; 785 786 if (mDecryptHandle != NULL) { 787 int64_t position; 788 getPosition(&position); 789 mDrmManagerClient->setPlaybackStatus(mDecryptHandle, 790 Playback::START, position / 1000); 791 } 792 793 if (mAudioSource != NULL) { 794 if (mAudioPlayer == NULL) { 795 if (mAudioSink != NULL) { 796 mAudioPlayer = new AudioPlayer(mAudioSink, this); 797 mAudioPlayer->setSource(mAudioSource); 798 799 // We've already started the MediaSource in order to enable 800 // the prefetcher to read its data. 801 status_t err = mAudioPlayer->start( 802 true /* sourceAlreadyStarted */); 803 804 if (err != OK) { 805 delete mAudioPlayer; 806 mAudioPlayer = NULL; 807 808 mFlags &= ~(PLAYING | FIRST_FRAME); 809 810 if (mDecryptHandle != NULL) { 811 mDrmManagerClient->setPlaybackStatus(mDecryptHandle, 812 Playback::STOP, 0); 813 } 814 815 return err; 816 } 817 818 mTimeSource = mAudioPlayer; 819 820 deferredAudioSeek = true; 821 822 mWatchForAudioSeekComplete = false; 823 mWatchForAudioEOS = true; 824 } 825 } else { 826 mAudioPlayer->resume(); 827 } 828 } 829 830 if (mTimeSource == NULL && mAudioPlayer == NULL) { 831 mTimeSource = &mSystemTimeSource; 832 } 833 834 if (mVideoSource != NULL) { 835 // Kick off video playback 836 postVideoEvent_l(); 837 838 if (mAudioSource != NULL && mVideoSource != NULL) { 839 postVideoLagEvent_l(); 840 } 841 } 842 843 if (deferredAudioSeek) { 844 // If there was a seek request while we were paused 845 // and we're just starting up again, honor the request now. 846 seekAudioIfNecessary_l(); 847 } 848 849 if (mFlags & AT_EOS) { 850 // Legacy behaviour, if a stream finishes playing and then 851 // is started again, we play from the start... 852 seekTo_l(0); 853 } 854 855 return OK; 856} 857 858void AwesomePlayer::notifyVideoSize_l() { 859 sp<MetaData> meta = mVideoSource->getFormat(); 860 861 int32_t cropLeft, cropTop, cropRight, cropBottom; 862 if (!meta->findRect( 863 kKeyCropRect, &cropLeft, &cropTop, &cropRight, &cropBottom)) { 864 int32_t width, height; 865 CHECK(meta->findInt32(kKeyWidth, &width)); 866 CHECK(meta->findInt32(kKeyHeight, &height)); 867 868 cropLeft = cropTop = 0; 869 cropRight = width - 1; 870 cropBottom = height - 1; 871 872 LOGV("got dimensions only %d x %d", width, height); 873 } else { 874 LOGV("got crop rect %d, %d, %d, %d", 875 cropLeft, cropTop, cropRight, cropBottom); 876 } 877 878 int32_t usableWidth = cropRight - cropLeft + 1; 879 int32_t usableHeight = cropBottom - cropTop + 1; 880 if (mDisplayWidth != 0) { 881 usableWidth = mDisplayWidth; 882 } 883 if (mDisplayHeight != 0) { 884 usableHeight = mDisplayHeight; 885 } 886 887 int32_t rotationDegrees; 888 if (!mVideoTrack->getFormat()->findInt32( 889 kKeyRotation, &rotationDegrees)) { 890 rotationDegrees = 0; 891 } 892 893 if (rotationDegrees == 90 || rotationDegrees == 270) { 894 notifyListener_l( 895 MEDIA_SET_VIDEO_SIZE, usableHeight, usableWidth); 896 } else { 897 notifyListener_l( 898 MEDIA_SET_VIDEO_SIZE, usableWidth, usableHeight); 899 } 900} 901 902void AwesomePlayer::initRenderer_l() { 903 if (mSurface == NULL) { 904 return; 905 } 906 907 sp<MetaData> meta = mVideoSource->getFormat(); 908 909 int32_t format; 910 const char *component; 911 int32_t decodedWidth, decodedHeight; 912 CHECK(meta->findInt32(kKeyColorFormat, &format)); 913 CHECK(meta->findCString(kKeyDecoderComponent, &component)); 914 CHECK(meta->findInt32(kKeyWidth, &decodedWidth)); 915 CHECK(meta->findInt32(kKeyHeight, &decodedHeight)); 916 917 int32_t rotationDegrees; 918 if (!mVideoTrack->getFormat()->findInt32( 919 kKeyRotation, &rotationDegrees)) { 920 rotationDegrees = 0; 921 } 922 923 mVideoRenderer.clear(); 924 925 // Must ensure that mVideoRenderer's destructor is actually executed 926 // before creating a new one. 927 IPCThreadState::self()->flushCommands(); 928 929 if (USE_SURFACE_ALLOC && strncmp(component, "OMX.", 4) == 0) { 930 // Hardware decoders avoid the CPU color conversion by decoding 931 // directly to ANativeBuffers, so we must use a renderer that 932 // just pushes those buffers to the ANativeWindow. 933 mVideoRenderer = 934 new AwesomeNativeWindowRenderer(mSurface, rotationDegrees); 935 } else { 936 // Other decoders are instantiated locally and as a consequence 937 // allocate their buffers in local address space. This renderer 938 // then performs a color conversion and copy to get the data 939 // into the ANativeBuffer. 940 mVideoRenderer = new AwesomeLocalRenderer(mSurface, meta); 941 } 942} 943 944status_t AwesomePlayer::pause() { 945 Mutex::Autolock autoLock(mLock); 946 947 mFlags &= ~CACHE_UNDERRUN; 948 949 return pause_l(); 950} 951 952status_t AwesomePlayer::pause_l(bool at_eos) { 953 if (!(mFlags & PLAYING)) { 954 return OK; 955 } 956 957 cancelPlayerEvents(true /* keepBufferingGoing */); 958 959 if (mAudioPlayer != NULL) { 960 if (at_eos) { 961 // If we played the audio stream to completion we 962 // want to make sure that all samples remaining in the audio 963 // track's queue are played out. 964 mAudioPlayer->pause(true /* playPendingSamples */); 965 } else { 966 mAudioPlayer->pause(); 967 } 968 } 969 970 mFlags &= ~PLAYING; 971 972 if (mDecryptHandle != NULL) { 973 mDrmManagerClient->setPlaybackStatus(mDecryptHandle, 974 Playback::PAUSE, 0); 975 } 976 977 return OK; 978} 979 980bool AwesomePlayer::isPlaying() const { 981 return (mFlags & PLAYING) || (mFlags & CACHE_UNDERRUN); 982} 983 984void AwesomePlayer::setSurface(const sp<Surface> &surface) { 985 Mutex::Autolock autoLock(mLock); 986 987 mSurface = surface; 988} 989 990void AwesomePlayer::setAudioSink( 991 const sp<MediaPlayerBase::AudioSink> &audioSink) { 992 Mutex::Autolock autoLock(mLock); 993 994 mAudioSink = audioSink; 995} 996 997status_t AwesomePlayer::setLooping(bool shouldLoop) { 998 Mutex::Autolock autoLock(mLock); 999 1000 mFlags = mFlags & ~LOOPING; 1001 1002 if (shouldLoop) { 1003 mFlags |= LOOPING; 1004 } 1005 1006 return OK; 1007} 1008 1009status_t AwesomePlayer::getDuration(int64_t *durationUs) { 1010 Mutex::Autolock autoLock(mMiscStateLock); 1011 1012 if (mDurationUs < 0) { 1013 return UNKNOWN_ERROR; 1014 } 1015 1016 *durationUs = mDurationUs; 1017 1018 return OK; 1019} 1020 1021status_t AwesomePlayer::getPosition(int64_t *positionUs) { 1022 if (mRTSPController != NULL) { 1023 *positionUs = mRTSPController->getNormalPlayTimeUs(); 1024 } 1025 else if (mSeeking) { 1026 *positionUs = mSeekTimeUs; 1027 } else if (mVideoSource != NULL) { 1028 Mutex::Autolock autoLock(mMiscStateLock); 1029 *positionUs = mVideoTimeUs; 1030 } else if (mAudioPlayer != NULL) { 1031 *positionUs = mAudioPlayer->getMediaTimeUs(); 1032 } else { 1033 *positionUs = 0; 1034 } 1035 1036 return OK; 1037} 1038 1039status_t AwesomePlayer::seekTo(int64_t timeUs) { 1040 if (mExtractorFlags & MediaExtractor::CAN_SEEK) { 1041 Mutex::Autolock autoLock(mLock); 1042 return seekTo_l(timeUs); 1043 } 1044 1045 return OK; 1046} 1047 1048// static 1049void AwesomePlayer::OnRTSPSeekDoneWrapper(void *cookie) { 1050 static_cast<AwesomePlayer *>(cookie)->onRTSPSeekDone(); 1051} 1052 1053void AwesomePlayer::onRTSPSeekDone() { 1054 notifyListener_l(MEDIA_SEEK_COMPLETE); 1055 mSeekNotificationSent = true; 1056} 1057 1058status_t AwesomePlayer::seekTo_l(int64_t timeUs) { 1059 if (mRTSPController != NULL) { 1060 mRTSPController->seekAsync(timeUs, OnRTSPSeekDoneWrapper, this); 1061 return OK; 1062 } 1063 1064 if (mFlags & CACHE_UNDERRUN) { 1065 mFlags &= ~CACHE_UNDERRUN; 1066 play_l(); 1067 } 1068 1069 mSeeking = true; 1070 mSeekNotificationSent = false; 1071 mSeekTimeUs = timeUs; 1072 mFlags &= ~(AT_EOS | AUDIO_AT_EOS | VIDEO_AT_EOS); 1073 1074 seekAudioIfNecessary_l(); 1075 1076 if (!(mFlags & PLAYING)) { 1077 LOGV("seeking while paused, sending SEEK_COMPLETE notification" 1078 " immediately."); 1079 1080 notifyListener_l(MEDIA_SEEK_COMPLETE); 1081 mSeekNotificationSent = true; 1082 } 1083 1084 return OK; 1085} 1086 1087void AwesomePlayer::seekAudioIfNecessary_l() { 1088 if (mSeeking && mVideoSource == NULL && mAudioPlayer != NULL) { 1089 mAudioPlayer->seekTo(mSeekTimeUs); 1090 1091 mWatchForAudioSeekComplete = true; 1092 mWatchForAudioEOS = true; 1093 mSeekNotificationSent = false; 1094 1095 if (mDecryptHandle != NULL) { 1096 mDrmManagerClient->setPlaybackStatus(mDecryptHandle, 1097 Playback::PAUSE, 0); 1098 mDrmManagerClient->setPlaybackStatus(mDecryptHandle, 1099 Playback::START, mSeekTimeUs / 1000); 1100 } 1101 } 1102} 1103 1104void AwesomePlayer::setAudioSource(sp<MediaSource> source) { 1105 CHECK(source != NULL); 1106 1107 mAudioTrack = source; 1108} 1109 1110status_t AwesomePlayer::initAudioDecoder() { 1111 sp<MetaData> meta = mAudioTrack->getFormat(); 1112 1113 const char *mime; 1114 CHECK(meta->findCString(kKeyMIMEType, &mime)); 1115 1116 if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 1117 mAudioSource = mAudioTrack; 1118 } else { 1119 mAudioSource = OMXCodec::Create( 1120 mClient.interface(), mAudioTrack->getFormat(), 1121 false, // createEncoder 1122 mAudioTrack); 1123 } 1124 1125 if (mAudioSource != NULL) { 1126 int64_t durationUs; 1127 if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) { 1128 Mutex::Autolock autoLock(mMiscStateLock); 1129 if (mDurationUs < 0 || durationUs > mDurationUs) { 1130 mDurationUs = durationUs; 1131 } 1132 } 1133 1134 status_t err = mAudioSource->start(); 1135 1136 if (err != OK) { 1137 mAudioSource.clear(); 1138 return err; 1139 } 1140 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) { 1141 // For legacy reasons we're simply going to ignore the absence 1142 // of an audio decoder for QCELP instead of aborting playback 1143 // altogether. 1144 return OK; 1145 } 1146 1147 return mAudioSource != NULL ? OK : UNKNOWN_ERROR; 1148} 1149 1150void AwesomePlayer::setVideoSource(sp<MediaSource> source) { 1151 CHECK(source != NULL); 1152 1153 mVideoTrack = source; 1154} 1155 1156status_t AwesomePlayer::initVideoDecoder(uint32_t flags) { 1157 mVideoSource = OMXCodec::Create( 1158 mClient.interface(), mVideoTrack->getFormat(), 1159 false, // createEncoder 1160 mVideoTrack, 1161 NULL, flags, USE_SURFACE_ALLOC ? mSurface : NULL); 1162 1163 if (mVideoSource != NULL) { 1164 int64_t durationUs; 1165 if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) { 1166 Mutex::Autolock autoLock(mMiscStateLock); 1167 if (mDurationUs < 0 || durationUs > mDurationUs) { 1168 mDurationUs = durationUs; 1169 } 1170 } 1171 1172 status_t err = mVideoSource->start(); 1173 1174 if (err != OK) { 1175 mVideoSource.clear(); 1176 return err; 1177 } 1178 } 1179 1180 return mVideoSource != NULL ? OK : UNKNOWN_ERROR; 1181} 1182 1183void AwesomePlayer::finishSeekIfNecessary(int64_t videoTimeUs) { 1184 if (!mSeeking) { 1185 return; 1186 } 1187 1188 if (mAudioPlayer != NULL) { 1189 LOGV("seeking audio to %lld us (%.2f secs).", videoTimeUs, videoTimeUs / 1E6); 1190 1191 // If we don't have a video time, seek audio to the originally 1192 // requested seek time instead. 1193 1194 mAudioPlayer->seekTo(videoTimeUs < 0 ? mSeekTimeUs : videoTimeUs); 1195 mAudioPlayer->resume(); 1196 mWatchForAudioSeekComplete = true; 1197 mWatchForAudioEOS = true; 1198 } else if (!mSeekNotificationSent) { 1199 // If we're playing video only, report seek complete now, 1200 // otherwise audio player will notify us later. 1201 notifyListener_l(MEDIA_SEEK_COMPLETE); 1202 } 1203 1204 mFlags |= FIRST_FRAME; 1205 mSeeking = false; 1206 mSeekNotificationSent = false; 1207 1208 if (mDecryptHandle != NULL) { 1209 mDrmManagerClient->setPlaybackStatus(mDecryptHandle, 1210 Playback::PAUSE, 0); 1211 mDrmManagerClient->setPlaybackStatus(mDecryptHandle, 1212 Playback::START, videoTimeUs / 1000); 1213 } 1214} 1215 1216void AwesomePlayer::onVideoEvent() { 1217 Mutex::Autolock autoLock(mLock); 1218 if (!mVideoEventPending) { 1219 // The event has been cancelled in reset_l() but had already 1220 // been scheduled for execution at that time. 1221 return; 1222 } 1223 mVideoEventPending = false; 1224 1225 if (mSeeking) { 1226 if (mVideoBuffer) { 1227 mVideoBuffer->release(); 1228 mVideoBuffer = NULL; 1229 } 1230 1231 if (mCachedSource != NULL && mAudioSource != NULL) { 1232 // We're going to seek the video source first, followed by 1233 // the audio source. 1234 // In order to avoid jumps in the DataSource offset caused by 1235 // the audio codec prefetching data from the old locations 1236 // while the video codec is already reading data from the new 1237 // locations, we'll "pause" the audio source, causing it to 1238 // stop reading input data until a subsequent seek. 1239 1240 if (mAudioPlayer != NULL) { 1241 mAudioPlayer->pause(); 1242 } 1243 mAudioSource->pause(); 1244 } 1245 } 1246 1247 if (!mVideoBuffer) { 1248 MediaSource::ReadOptions options; 1249 if (mSeeking) { 1250 LOGV("seeking to %lld us (%.2f secs)", mSeekTimeUs, mSeekTimeUs / 1E6); 1251 1252 options.setSeekTo( 1253 mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST_SYNC); 1254 } 1255 for (;;) { 1256 status_t err = mVideoSource->read(&mVideoBuffer, &options); 1257 options.clearSeekTo(); 1258 1259 if (err != OK) { 1260 CHECK(mVideoBuffer == NULL); 1261 1262 if (err == INFO_FORMAT_CHANGED) { 1263 LOGV("VideoSource signalled format change."); 1264 1265 notifyVideoSize_l(); 1266 1267 if (mVideoRenderer != NULL) { 1268 mVideoRendererIsPreview = false; 1269 initRenderer_l(); 1270 } 1271 continue; 1272 } 1273 1274 // So video playback is complete, but we may still have 1275 // a seek request pending that needs to be applied 1276 // to the audio track. 1277 if (mSeeking) { 1278 LOGV("video stream ended while seeking!"); 1279 } 1280 finishSeekIfNecessary(-1); 1281 1282 mFlags |= VIDEO_AT_EOS; 1283 postStreamDoneEvent_l(err); 1284 return; 1285 } 1286 1287 if (mVideoBuffer->range_length() == 0) { 1288 // Some decoders, notably the PV AVC software decoder 1289 // return spurious empty buffers that we just want to ignore. 1290 1291 mVideoBuffer->release(); 1292 mVideoBuffer = NULL; 1293 continue; 1294 } 1295 1296 break; 1297 } 1298 } 1299 1300 int64_t timeUs; 1301 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs)); 1302 1303 { 1304 Mutex::Autolock autoLock(mMiscStateLock); 1305 mVideoTimeUs = timeUs; 1306 } 1307 1308 bool wasSeeking = mSeeking; 1309 finishSeekIfNecessary(timeUs); 1310 1311 TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource; 1312 1313 if (mFlags & FIRST_FRAME) { 1314 mFlags &= ~FIRST_FRAME; 1315 mSinceLastDropped = 0; 1316 mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs; 1317 } 1318 1319 int64_t realTimeUs, mediaTimeUs; 1320 if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL 1321 && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) { 1322 mTimeSourceDeltaUs = realTimeUs - mediaTimeUs; 1323 } 1324 1325 int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs; 1326 1327 int64_t latenessUs = nowUs - timeUs; 1328 1329 if (wasSeeking) { 1330 // Let's display the first frame after seeking right away. 1331 latenessUs = 0; 1332 } 1333 1334 if (mRTPSession != NULL) { 1335 // We'll completely ignore timestamps for gtalk videochat 1336 // and we'll play incoming video as fast as we get it. 1337 latenessUs = 0; 1338 } 1339 1340 if (latenessUs > 40000) { 1341 // We're more than 40ms late. 1342 LOGV("we're late by %lld us (%.2f secs)", latenessUs, latenessUs / 1E6); 1343 if ( mSinceLastDropped > FRAME_DROP_FREQ) 1344 { 1345 LOGV("we're late by %lld us (%.2f secs) dropping one after %d frames", latenessUs, latenessUs / 1E6, mSinceLastDropped); 1346 mSinceLastDropped = 0; 1347 mVideoBuffer->release(); 1348 mVideoBuffer = NULL; 1349 1350 postVideoEvent_l(); 1351 return; 1352 } 1353 } 1354 1355 if (latenessUs < -10000) { 1356 // We're more than 10ms early. 1357 1358 postVideoEvent_l(10000); 1359 return; 1360 } 1361 1362 if (mVideoRendererIsPreview || mVideoRenderer == NULL) { 1363 mVideoRendererIsPreview = false; 1364 1365 initRenderer_l(); 1366 } 1367 1368 if (mVideoRenderer != NULL) { 1369 mSinceLastDropped++; 1370 mVideoRenderer->render(mVideoBuffer); 1371 } 1372 1373 mVideoBuffer->release(); 1374 mVideoBuffer = NULL; 1375 1376 postVideoEvent_l(); 1377} 1378 1379void AwesomePlayer::postVideoEvent_l(int64_t delayUs) { 1380 if (mVideoEventPending) { 1381 return; 1382 } 1383 1384 mVideoEventPending = true; 1385 mQueue.postEventWithDelay(mVideoEvent, delayUs < 0 ? 10000 : delayUs); 1386} 1387 1388void AwesomePlayer::postStreamDoneEvent_l(status_t status) { 1389 if (mStreamDoneEventPending) { 1390 return; 1391 } 1392 mStreamDoneEventPending = true; 1393 1394 mStreamDoneStatus = status; 1395 mQueue.postEvent(mStreamDoneEvent); 1396} 1397 1398void AwesomePlayer::postBufferingEvent_l() { 1399 if (mBufferingEventPending) { 1400 return; 1401 } 1402 mBufferingEventPending = true; 1403 mQueue.postEventWithDelay(mBufferingEvent, 1000000ll); 1404} 1405 1406void AwesomePlayer::postVideoLagEvent_l() { 1407 if (mVideoLagEventPending) { 1408 return; 1409 } 1410 mVideoLagEventPending = true; 1411 mQueue.postEventWithDelay(mVideoLagEvent, 1000000ll); 1412} 1413 1414void AwesomePlayer::postCheckAudioStatusEvent_l() { 1415 if (mAudioStatusEventPending) { 1416 return; 1417 } 1418 mAudioStatusEventPending = true; 1419 mQueue.postEvent(mCheckAudioStatusEvent); 1420} 1421 1422void AwesomePlayer::onCheckAudioStatus() { 1423 Mutex::Autolock autoLock(mLock); 1424 if (!mAudioStatusEventPending) { 1425 // Event was dispatched and while we were blocking on the mutex, 1426 // has already been cancelled. 1427 return; 1428 } 1429 1430 mAudioStatusEventPending = false; 1431 1432 if (mWatchForAudioSeekComplete && !mAudioPlayer->isSeeking()) { 1433 mWatchForAudioSeekComplete = false; 1434 1435 if (!mSeekNotificationSent) { 1436 notifyListener_l(MEDIA_SEEK_COMPLETE); 1437 mSeekNotificationSent = true; 1438 } 1439 1440 mSeeking = false; 1441 } 1442 1443 status_t finalStatus; 1444 if (mWatchForAudioEOS && mAudioPlayer->reachedEOS(&finalStatus)) { 1445 mWatchForAudioEOS = false; 1446 mFlags |= AUDIO_AT_EOS; 1447 mFlags |= FIRST_FRAME; 1448 postStreamDoneEvent_l(finalStatus); 1449 } 1450} 1451 1452status_t AwesomePlayer::prepare() { 1453 Mutex::Autolock autoLock(mLock); 1454 return prepare_l(); 1455} 1456 1457status_t AwesomePlayer::prepare_l() { 1458 if (mFlags & PREPARED) { 1459 return OK; 1460 } 1461 1462 if (mFlags & PREPARING) { 1463 return UNKNOWN_ERROR; 1464 } 1465 1466 mIsAsyncPrepare = false; 1467 status_t err = prepareAsync_l(); 1468 1469 if (err != OK) { 1470 return err; 1471 } 1472 1473 while (mFlags & PREPARING) { 1474 mPreparedCondition.wait(mLock); 1475 } 1476 1477 return mPrepareResult; 1478} 1479 1480status_t AwesomePlayer::prepareAsync() { 1481 Mutex::Autolock autoLock(mLock); 1482 1483 if (mFlags & PREPARING) { 1484 return UNKNOWN_ERROR; // async prepare already pending 1485 } 1486 1487 mIsAsyncPrepare = true; 1488 return prepareAsync_l(); 1489} 1490 1491status_t AwesomePlayer::prepareAsync_l() { 1492 if (mFlags & PREPARING) { 1493 return UNKNOWN_ERROR; // async prepare already pending 1494 } 1495 1496 if (!mQueueStarted) { 1497 mQueue.start(); 1498 mQueueStarted = true; 1499 } 1500 1501 mFlags |= PREPARING; 1502 mAsyncPrepareEvent = new AwesomeEvent( 1503 this, &AwesomePlayer::onPrepareAsyncEvent); 1504 1505 mQueue.postEvent(mAsyncPrepareEvent); 1506 1507 return OK; 1508} 1509 1510status_t AwesomePlayer::finishSetDataSource_l() { 1511 sp<DataSource> dataSource; 1512 1513 if (!strncasecmp("http://", mUri.string(), 7)) { 1514 mConnectingDataSource = new NuHTTPDataSource; 1515 1516 mLock.unlock(); 1517 status_t err = mConnectingDataSource->connect(mUri, &mUriHeaders); 1518 mLock.lock(); 1519 1520 if (err != OK) { 1521 mConnectingDataSource.clear(); 1522 1523 LOGI("mConnectingDataSource->connect() returned %d", err); 1524 return err; 1525 } 1526 1527#if 0 1528 mCachedSource = new NuCachedSource2( 1529 new ThrottledSource( 1530 mConnectingDataSource, 50 * 1024 /* bytes/sec */)); 1531#else 1532 mCachedSource = new NuCachedSource2(mConnectingDataSource); 1533#endif 1534 mConnectingDataSource.clear(); 1535 1536 dataSource = mCachedSource; 1537 1538 // We're going to prefill the cache before trying to instantiate 1539 // the extractor below, as the latter is an operation that otherwise 1540 // could block on the datasource for a significant amount of time. 1541 // During that time we'd be unable to abort the preparation phase 1542 // without this prefill. 1543 1544 mLock.unlock(); 1545 1546 for (;;) { 1547 status_t finalStatus; 1548 size_t cachedDataRemaining = 1549 mCachedSource->approxDataRemaining(&finalStatus); 1550 1551 if (finalStatus != OK || cachedDataRemaining >= kHighWaterMarkBytes 1552 || (mFlags & PREPARE_CANCELLED)) { 1553 break; 1554 } 1555 1556 usleep(200000); 1557 } 1558 1559 mLock.lock(); 1560 1561 if (mFlags & PREPARE_CANCELLED) { 1562 LOGI("Prepare cancelled while waiting for initial cache fill."); 1563 return UNKNOWN_ERROR; 1564 } 1565 } else if (!strncasecmp(mUri.string(), "httplive://", 11)) { 1566 String8 uri("http://"); 1567 uri.append(mUri.string() + 11); 1568 1569 if (mLooper == NULL) { 1570 mLooper = new ALooper; 1571 mLooper->setName("httplive"); 1572 mLooper->start(); 1573 } 1574 1575 mLiveSession = new LiveSession; 1576 mLooper->registerHandler(mLiveSession); 1577 1578 mLiveSession->connect(uri.string()); 1579 dataSource = mLiveSession->getDataSource(); 1580 1581 sp<MediaExtractor> extractor = 1582 MediaExtractor::Create(dataSource, MEDIA_MIMETYPE_CONTAINER_MPEG2TS); 1583 1584 static_cast<MPEG2TSExtractor *>(extractor.get()) 1585 ->setLiveSession(mLiveSession); 1586 1587 return setDataSource_l(extractor); 1588 } else if (!strncmp("rtsp://gtalk/", mUri.string(), 13)) { 1589 if (mLooper == NULL) { 1590 mLooper = new ALooper; 1591 mLooper->setName("gtalk rtp"); 1592 mLooper->start( 1593 false /* runOnCallingThread */, 1594 false /* canCallJava */, 1595 PRIORITY_HIGHEST); 1596 } 1597 1598 const char *startOfCodecString = &mUri.string()[13]; 1599 const char *startOfSlash1 = strchr(startOfCodecString, '/'); 1600 if (startOfSlash1 == NULL) { 1601 return BAD_VALUE; 1602 } 1603 const char *startOfWidthString = &startOfSlash1[1]; 1604 const char *startOfSlash2 = strchr(startOfWidthString, '/'); 1605 if (startOfSlash2 == NULL) { 1606 return BAD_VALUE; 1607 } 1608 const char *startOfHeightString = &startOfSlash2[1]; 1609 1610 String8 codecString(startOfCodecString, startOfSlash1 - startOfCodecString); 1611 String8 widthString(startOfWidthString, startOfSlash2 - startOfWidthString); 1612 String8 heightString(startOfHeightString); 1613 1614#if 0 1615 mRTPPusher = new UDPPusher("/data/misc/rtpout.bin", 5434); 1616 mLooper->registerHandler(mRTPPusher); 1617 1618 mRTCPPusher = new UDPPusher("/data/misc/rtcpout.bin", 5435); 1619 mLooper->registerHandler(mRTCPPusher); 1620#endif 1621 1622 mRTPSession = new ARTPSession; 1623 mLooper->registerHandler(mRTPSession); 1624 1625#if 0 1626 // My AMR SDP 1627 static const char *raw = 1628 "v=0\r\n" 1629 "o=- 64 233572944 IN IP4 127.0.0.0\r\n" 1630 "s=QuickTime\r\n" 1631 "t=0 0\r\n" 1632 "a=range:npt=0-315\r\n" 1633 "a=isma-compliance:2,2.0,2\r\n" 1634 "m=audio 5434 RTP/AVP 97\r\n" 1635 "c=IN IP4 127.0.0.1\r\n" 1636 "b=AS:30\r\n" 1637 "a=rtpmap:97 AMR/8000/1\r\n" 1638 "a=fmtp:97 octet-align\r\n"; 1639#elif 1 1640 String8 sdp; 1641 sdp.appendFormat( 1642 "v=0\r\n" 1643 "o=- 64 233572944 IN IP4 127.0.0.0\r\n" 1644 "s=QuickTime\r\n" 1645 "t=0 0\r\n" 1646 "a=range:npt=0-315\r\n" 1647 "a=isma-compliance:2,2.0,2\r\n" 1648 "m=video 5434 RTP/AVP 97\r\n" 1649 "c=IN IP4 127.0.0.1\r\n" 1650 "b=AS:30\r\n" 1651 "a=rtpmap:97 %s/90000\r\n" 1652 "a=cliprect:0,0,%s,%s\r\n" 1653 "a=framesize:97 %s-%s\r\n", 1654 1655 codecString.string(), 1656 heightString.string(), widthString.string(), 1657 widthString.string(), heightString.string() 1658 ); 1659 const char *raw = sdp.string(); 1660 1661#endif 1662 1663 sp<ASessionDescription> desc = new ASessionDescription; 1664 CHECK(desc->setTo(raw, strlen(raw))); 1665 1666 CHECK_EQ(mRTPSession->setup(desc), (status_t)OK); 1667 1668 if (mRTPPusher != NULL) { 1669 mRTPPusher->start(); 1670 } 1671 1672 if (mRTCPPusher != NULL) { 1673 mRTCPPusher->start(); 1674 } 1675 1676 CHECK_EQ(mRTPSession->countTracks(), 1u); 1677 sp<MediaSource> source = mRTPSession->trackAt(0); 1678 1679#if 0 1680 bool eos; 1681 while (((APacketSource *)source.get()) 1682 ->getQueuedDuration(&eos) < 5000000ll && !eos) { 1683 usleep(100000ll); 1684 } 1685#endif 1686 1687 const char *mime; 1688 CHECK(source->getFormat()->findCString(kKeyMIMEType, &mime)); 1689 1690 if (!strncasecmp("video/", mime, 6)) { 1691 setVideoSource(source); 1692 } else { 1693 CHECK(!strncasecmp("audio/", mime, 6)); 1694 setAudioSource(source); 1695 } 1696 1697 mExtractorFlags = MediaExtractor::CAN_PAUSE; 1698 1699 return OK; 1700 } else if (!strncasecmp("rtsp://", mUri.string(), 7)) { 1701 if (mLooper == NULL) { 1702 mLooper = new ALooper; 1703 mLooper->setName("rtsp"); 1704 mLooper->start(); 1705 } 1706 mRTSPController = new ARTSPController(mLooper); 1707 status_t err = mRTSPController->connect(mUri.string()); 1708 1709 LOGI("ARTSPController::connect returned %d", err); 1710 1711 if (err != OK) { 1712 mRTSPController.clear(); 1713 return err; 1714 } 1715 1716 sp<MediaExtractor> extractor = mRTSPController.get(); 1717 return setDataSource_l(extractor); 1718 } else { 1719 dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders); 1720 } 1721 1722 if (dataSource == NULL) { 1723 return UNKNOWN_ERROR; 1724 } 1725 1726 sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource); 1727 1728 if (extractor == NULL) { 1729 return UNKNOWN_ERROR; 1730 } 1731 1732 dataSource->getDrmInfo(&mDecryptHandle, &mDrmManagerClient); 1733 if (mDecryptHandle != NULL) { 1734 if (RightsStatus::RIGHTS_VALID == mDecryptHandle->status) { 1735 if (DecryptApiType::WV_BASED == mDecryptHandle->decryptApiType) { 1736 LOGD("Setting mCachedSource to NULL for WVM\n"); 1737 mCachedSource.clear(); 1738 } 1739 } else { 1740 notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_NO_LICENSE); 1741 } 1742 } 1743 1744 return setDataSource_l(extractor); 1745} 1746 1747void AwesomePlayer::abortPrepare(status_t err) { 1748 CHECK(err != OK); 1749 1750 if (mIsAsyncPrepare) { 1751 notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err); 1752 } 1753 1754 mPrepareResult = err; 1755 mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED); 1756 mAsyncPrepareEvent = NULL; 1757 mPreparedCondition.broadcast(); 1758} 1759 1760// static 1761bool AwesomePlayer::ContinuePreparation(void *cookie) { 1762 AwesomePlayer *me = static_cast<AwesomePlayer *>(cookie); 1763 1764 return (me->mFlags & PREPARE_CANCELLED) == 0; 1765} 1766 1767void AwesomePlayer::onPrepareAsyncEvent() { 1768 Mutex::Autolock autoLock(mLock); 1769 1770 if (mFlags & PREPARE_CANCELLED) { 1771 LOGI("prepare was cancelled before doing anything"); 1772 abortPrepare(UNKNOWN_ERROR); 1773 return; 1774 } 1775 1776 if (mUri.size() > 0) { 1777 status_t err = finishSetDataSource_l(); 1778 1779 if (err != OK) { 1780 abortPrepare(err); 1781 return; 1782 } 1783 } 1784 1785 if (mVideoTrack != NULL && mVideoSource == NULL) { 1786 status_t err = initVideoDecoder(); 1787 1788 if (err != OK) { 1789 abortPrepare(err); 1790 return; 1791 } 1792 } 1793 1794 if (mAudioTrack != NULL && mAudioSource == NULL) { 1795 status_t err = initAudioDecoder(); 1796 1797 if (err != OK) { 1798 abortPrepare(err); 1799 return; 1800 } 1801 } 1802 1803 mFlags |= PREPARING_CONNECTED; 1804 1805 if (mCachedSource != NULL || mRTSPController != NULL) { 1806 postBufferingEvent_l(); 1807 } else { 1808 finishAsyncPrepare_l(); 1809 } 1810} 1811 1812void AwesomePlayer::finishAsyncPrepare_l() { 1813 if (mIsAsyncPrepare) { 1814 if (mVideoSource == NULL) { 1815 notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0); 1816 } else { 1817 notifyVideoSize_l(); 1818 } 1819 1820 notifyListener_l(MEDIA_PREPARED); 1821 } 1822 1823 mPrepareResult = OK; 1824 mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED); 1825 mFlags |= PREPARED; 1826 mAsyncPrepareEvent = NULL; 1827 mPreparedCondition.broadcast(); 1828} 1829 1830uint32_t AwesomePlayer::flags() const { 1831 return mExtractorFlags; 1832} 1833 1834void AwesomePlayer::postAudioEOS() { 1835 postCheckAudioStatusEvent_l(); 1836} 1837 1838void AwesomePlayer::postAudioSeekComplete() { 1839 postCheckAudioStatusEvent_l(); 1840} 1841 1842} // namespace android 1843