AwesomePlayer.cpp revision 511ea9823b1ab1d45eb86607cb291878c70b26ae
1/* 2 * Copyright (C) 2009 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "AwesomePlayer" 19#include <utils/Log.h> 20 21#include <dlfcn.h> 22 23#include "include/ARTSPController.h" 24#include "include/AwesomePlayer.h" 25#include "include/LiveSource.h" 26#include "include/SoftwareRenderer.h" 27#include "include/NuCachedSource2.h" 28#include "include/ThrottledSource.h" 29 30#include "ARTPSession.h" 31#include "APacketSource.h" 32#include "ASessionDescription.h" 33#include "UDPPusher.h" 34 35#include <binder/IPCThreadState.h> 36#include <media/stagefright/AudioPlayer.h> 37#include <media/stagefright/DataSource.h> 38#include <media/stagefright/FileSource.h> 39#include <media/stagefright/MediaBuffer.h> 40#include <media/stagefright/MediaDefs.h> 41#include <media/stagefright/MediaExtractor.h> 42#include <media/stagefright/MediaDebug.h> 43#include <media/stagefright/MediaSource.h> 44#include <media/stagefright/MetaData.h> 45#include <media/stagefright/OMXCodec.h> 46 47#include <surfaceflinger/Surface.h> 48 49#include <media/stagefright/foundation/ALooper.h> 50 51namespace android { 52 53static int64_t kLowWaterMarkUs = 2000000ll; // 2secs 54static int64_t kHighWaterMarkUs = 10000000ll; // 10secs 55 56struct AwesomeEvent : public TimedEventQueue::Event { 57 AwesomeEvent( 58 AwesomePlayer *player, 59 void (AwesomePlayer::*method)()) 60 : mPlayer(player), 61 mMethod(method) { 62 } 63 64protected: 65 virtual ~AwesomeEvent() {} 66 67 virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) { 68 (mPlayer->*mMethod)(); 69 } 70 71private: 72 AwesomePlayer *mPlayer; 73 void (AwesomePlayer::*mMethod)(); 74 75 AwesomeEvent(const AwesomeEvent &); 76 AwesomeEvent &operator=(const AwesomeEvent &); 77}; 78 79struct AwesomeRemoteRenderer : public AwesomeRenderer { 80 AwesomeRemoteRenderer(const sp<IOMXRenderer> &target) 81 : mTarget(target) { 82 } 83 84 virtual void render(MediaBuffer *buffer) { 85 void *id; 86 if (buffer->meta_data()->findPointer(kKeyBufferID, &id)) { 87 mTarget->render((IOMX::buffer_id)id); 88 } 89 } 90 91private: 92 sp<IOMXRenderer> mTarget; 93 94 AwesomeRemoteRenderer(const AwesomeRemoteRenderer &); 95 AwesomeRemoteRenderer &operator=(const AwesomeRemoteRenderer &); 96}; 97 98struct AwesomeLocalRenderer : public AwesomeRenderer { 99 AwesomeLocalRenderer( 100 bool previewOnly, 101 const char *componentName, 102 OMX_COLOR_FORMATTYPE colorFormat, 103 const sp<ISurface> &isurface, 104 const sp<Surface> &surface, 105 size_t displayWidth, size_t displayHeight, 106 size_t decodedWidth, size_t decodedHeight) 107 : mTarget(NULL), 108 mLibHandle(NULL) { 109 init(previewOnly, componentName, 110 colorFormat, isurface, surface, displayWidth, 111 displayHeight, decodedWidth, decodedHeight); 112 } 113 114 virtual void render(MediaBuffer *buffer) { 115 render((const uint8_t *)buffer->data() + buffer->range_offset(), 116 buffer->range_length()); 117 } 118 119 void render(const void *data, size_t size) { 120 mTarget->render(data, size, NULL); 121 } 122 123protected: 124 virtual ~AwesomeLocalRenderer() { 125 delete mTarget; 126 mTarget = NULL; 127 128 if (mLibHandle) { 129 dlclose(mLibHandle); 130 mLibHandle = NULL; 131 } 132 } 133 134private: 135 VideoRenderer *mTarget; 136 void *mLibHandle; 137 138 void init( 139 bool previewOnly, 140 const char *componentName, 141 OMX_COLOR_FORMATTYPE colorFormat, 142 const sp<ISurface> &isurface, 143 const sp<Surface> &surface, 144 size_t displayWidth, size_t displayHeight, 145 size_t decodedWidth, size_t decodedHeight); 146 147 AwesomeLocalRenderer(const AwesomeLocalRenderer &); 148 AwesomeLocalRenderer &operator=(const AwesomeLocalRenderer &);; 149}; 150 151void AwesomeLocalRenderer::init( 152 bool previewOnly, 153 const char *componentName, 154 OMX_COLOR_FORMATTYPE colorFormat, 155 const sp<ISurface> &isurface, 156 const sp<Surface> &surface, 157 size_t displayWidth, size_t displayHeight, 158 size_t decodedWidth, size_t decodedHeight) { 159 if (!previewOnly) { 160 // We will stick to the vanilla software-color-converting renderer 161 // for "previewOnly" mode, to avoid unneccessarily switching overlays 162 // more often than necessary. 163 164 mLibHandle = dlopen("libstagefrighthw.so", RTLD_NOW); 165 166 if (mLibHandle) { 167 typedef VideoRenderer *(*CreateRendererFunc)( 168 const sp<ISurface> &surface, 169 const char *componentName, 170 OMX_COLOR_FORMATTYPE colorFormat, 171 size_t displayWidth, size_t displayHeight, 172 size_t decodedWidth, size_t decodedHeight); 173 174 CreateRendererFunc func = 175 (CreateRendererFunc)dlsym( 176 mLibHandle, 177 "_Z14createRendererRKN7android2spINS_8ISurfaceEEEPKc20" 178 "OMX_COLOR_FORMATTYPEjjjj"); 179 180 if (func) { 181 mTarget = 182 (*func)(isurface, componentName, colorFormat, 183 displayWidth, displayHeight, 184 decodedWidth, decodedHeight); 185 } 186 } 187 } 188 189 if (mTarget == NULL) { 190 mTarget = new SoftwareRenderer( 191 colorFormat, surface, displayWidth, displayHeight, 192 decodedWidth, decodedHeight); 193 } 194} 195 196AwesomePlayer::AwesomePlayer() 197 : mQueueStarted(false), 198 mTimeSource(NULL), 199 mVideoRendererIsPreview(false), 200 mAudioPlayer(NULL), 201 mFlags(0), 202 mExtractorFlags(0), 203 mLastVideoBuffer(NULL), 204 mVideoBuffer(NULL), 205 mSuspensionState(NULL) { 206 CHECK_EQ(mClient.connect(), OK); 207 208 DataSource::RegisterDefaultSniffers(); 209 210 mVideoEvent = new AwesomeEvent(this, &AwesomePlayer::onVideoEvent); 211 mVideoEventPending = false; 212 mStreamDoneEvent = new AwesomeEvent(this, &AwesomePlayer::onStreamDone); 213 mStreamDoneEventPending = false; 214 mBufferingEvent = new AwesomeEvent(this, &AwesomePlayer::onBufferingUpdate); 215 mBufferingEventPending = false; 216 217 mCheckAudioStatusEvent = new AwesomeEvent( 218 this, &AwesomePlayer::onCheckAudioStatus); 219 220 mAudioStatusEventPending = false; 221 222 reset(); 223} 224 225AwesomePlayer::~AwesomePlayer() { 226 if (mQueueStarted) { 227 mQueue.stop(); 228 } 229 230 reset(); 231 232 mClient.disconnect(); 233} 234 235void AwesomePlayer::cancelPlayerEvents(bool keepBufferingGoing) { 236 mQueue.cancelEvent(mVideoEvent->eventID()); 237 mVideoEventPending = false; 238 mQueue.cancelEvent(mStreamDoneEvent->eventID()); 239 mStreamDoneEventPending = false; 240 mQueue.cancelEvent(mCheckAudioStatusEvent->eventID()); 241 mAudioStatusEventPending = false; 242 243 if (!keepBufferingGoing) { 244 mQueue.cancelEvent(mBufferingEvent->eventID()); 245 mBufferingEventPending = false; 246 } 247} 248 249void AwesomePlayer::setListener(const wp<MediaPlayerBase> &listener) { 250 Mutex::Autolock autoLock(mLock); 251 mListener = listener; 252} 253 254status_t AwesomePlayer::setDataSource( 255 const char *uri, const KeyedVector<String8, String8> *headers) { 256 Mutex::Autolock autoLock(mLock); 257 return setDataSource_l(uri, headers); 258} 259 260status_t AwesomePlayer::setDataSource_l( 261 const char *uri, const KeyedVector<String8, String8> *headers) { 262 reset_l(); 263 264 mUri = uri; 265 266 if (headers) { 267 mUriHeaders = *headers; 268 } 269 270 // The actual work will be done during preparation in the call to 271 // ::finishSetDataSource_l to avoid blocking the calling thread in 272 // setDataSource for any significant time. 273 274 return OK; 275} 276 277status_t AwesomePlayer::setDataSource( 278 int fd, int64_t offset, int64_t length) { 279#if 0 280 // return setDataSource("httplive://qthttp.apple.com.edgesuite.net/1009qpeijrfn/sl.m3u8"); 281 return setDataSource("httplive://qthttp.apple.com.edgesuite.net/1009qpeijrfn/0440.m3u8"); 282 // return setDataSource("httplive://qthttp.apple.com.edgesuite.net/1009qpeijrfn/0640.m3u8"); 283 // return setDataSource("httplive://qthttp.apple.com.edgesuite.net/1009qpeijrfn/1240_vod.m3u8"); 284 // return setDataSource("httplive://iphoned5.akamai.com.edgesuite.net/mhbarron/nasatv/nasatv_96.m3u8"); 285 // return setDataSource("httplive://iphoned5.akamai.com.edgesuite.net/mhbarron/nasatv/nasatv_1500.m3u8"); 286 // return setDataSource("httplive://iphone.video.hsn.com/iPhone_high.m3u8"); 287 // return setDataSource("httplive://iphoned5.akamai.com.edgesuite.net/mhbarron/iphonewebcast/webcast090209_all/webcast090209_all.m3u8"); 288 // return setDataSource("httplive://qthttp.akamai.com.edgesuite.net/iphone_demo/Video_Content/usat/tt_062209_iphone/hi/prog_index.m3u8"); 289 // return setDataSource("httplive://qthttp.akamai.com.edgesuite.net/iphone_demo/Video_Content/usat/tt_googmaps/hi/prog_index.m3u8"); 290 // return setDataSource("httplive://qthttp.akamai.com.edgesuite.net/iphone_demo/Video_Content/mtv/ni_spo_25a_rt74137_clip_syn/hi/prog_index.m3u8"); 291#endif 292 293 Mutex::Autolock autoLock(mLock); 294 295 reset_l(); 296 297 sp<DataSource> dataSource = new FileSource(fd, offset, length); 298 299 status_t err = dataSource->initCheck(); 300 301 if (err != OK) { 302 return err; 303 } 304 305 mFileSource = dataSource; 306 307 return setDataSource_l(dataSource); 308} 309 310status_t AwesomePlayer::setDataSource_l( 311 const sp<DataSource> &dataSource) { 312 sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource); 313 314 if (extractor == NULL) { 315 return UNKNOWN_ERROR; 316 } 317 318 return setDataSource_l(extractor); 319} 320 321status_t AwesomePlayer::setDataSource_l(const sp<MediaExtractor> &extractor) { 322 bool haveAudio = false; 323 bool haveVideo = false; 324 for (size_t i = 0; i < extractor->countTracks(); ++i) { 325 sp<MetaData> meta = extractor->getTrackMetaData(i); 326 327 const char *mime; 328 CHECK(meta->findCString(kKeyMIMEType, &mime)); 329 330 if (!haveVideo && !strncasecmp(mime, "video/", 6)) { 331 setVideoSource(extractor->getTrack(i)); 332 haveVideo = true; 333 } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) { 334 setAudioSource(extractor->getTrack(i)); 335 haveAudio = true; 336 } 337 338 if (haveAudio && haveVideo) { 339 break; 340 } 341 } 342 343 if (!haveAudio && !haveVideo) { 344 return UNKNOWN_ERROR; 345 } 346 347 mExtractorFlags = extractor->flags(); 348 349 return OK; 350} 351 352void AwesomePlayer::reset() { 353 Mutex::Autolock autoLock(mLock); 354 reset_l(); 355} 356 357void AwesomePlayer::reset_l() { 358 if (mFlags & PREPARING) { 359 mFlags |= PREPARE_CANCELLED; 360 if (mConnectingDataSource != NULL) { 361 LOGI("interrupting the connection process"); 362 mConnectingDataSource->disconnect(); 363 } 364 } 365 366 while (mFlags & PREPARING) { 367 mPreparedCondition.wait(mLock); 368 } 369 370 cancelPlayerEvents(); 371 372 mCachedSource.clear(); 373 mAudioTrack.clear(); 374 mVideoTrack.clear(); 375 376 // Shutdown audio first, so that the respone to the reset request 377 // appears to happen instantaneously as far as the user is concerned 378 // If we did this later, audio would continue playing while we 379 // shutdown the video-related resources and the player appear to 380 // not be as responsive to a reset request. 381 if (mAudioPlayer == NULL && mAudioSource != NULL) { 382 // If we had an audio player, it would have effectively 383 // taken possession of the audio source and stopped it when 384 // _it_ is stopped. Otherwise this is still our responsibility. 385 mAudioSource->stop(); 386 } 387 mAudioSource.clear(); 388 389 mTimeSource = NULL; 390 391 delete mAudioPlayer; 392 mAudioPlayer = NULL; 393 394 mVideoRenderer.clear(); 395 396 if (mLastVideoBuffer) { 397 mLastVideoBuffer->release(); 398 mLastVideoBuffer = NULL; 399 } 400 401 if (mVideoBuffer) { 402 mVideoBuffer->release(); 403 mVideoBuffer = NULL; 404 } 405 406 if (mRTSPController != NULL) { 407 mRTSPController->disconnect(); 408 mRTSPController.clear(); 409 } 410 411 mRTPPusher.clear(); 412 mRTCPPusher.clear(); 413 mRTPSession.clear(); 414 415 if (mVideoSource != NULL) { 416 mVideoSource->stop(); 417 418 // The following hack is necessary to ensure that the OMX 419 // component is completely released by the time we may try 420 // to instantiate it again. 421 wp<MediaSource> tmp = mVideoSource; 422 mVideoSource.clear(); 423 while (tmp.promote() != NULL) { 424 usleep(1000); 425 } 426 IPCThreadState::self()->flushCommands(); 427 } 428 429 mDurationUs = -1; 430 mFlags = 0; 431 mExtractorFlags = 0; 432 mVideoWidth = mVideoHeight = -1; 433 mTimeSourceDeltaUs = 0; 434 mVideoTimeUs = 0; 435 436 mSeeking = false; 437 mSeekNotificationSent = false; 438 mSeekTimeUs = 0; 439 440 mUri.setTo(""); 441 mUriHeaders.clear(); 442 443 mFileSource.clear(); 444 445 delete mSuspensionState; 446 mSuspensionState = NULL; 447} 448 449void AwesomePlayer::notifyListener_l(int msg, int ext1, int ext2) { 450 if (mListener != NULL) { 451 sp<MediaPlayerBase> listener = mListener.promote(); 452 453 if (listener != NULL) { 454 listener->sendEvent(msg, ext1, ext2); 455 } 456 } 457} 458 459// Returns true iff cached duration is available/applicable. 460bool AwesomePlayer::getCachedDuration_l(int64_t *durationUs, bool *eos) { 461 off_t totalSize; 462 463 if (mRTSPController != NULL) { 464 *durationUs = mRTSPController->getQueueDurationUs(eos); 465 return true; 466 } else if (mCachedSource != NULL && mDurationUs >= 0 467 && mCachedSource->getSize(&totalSize) == OK) { 468 int64_t bitrate = totalSize * 8000000ll / mDurationUs; // in bits/sec 469 470 size_t cachedDataRemaining = mCachedSource->approxDataRemaining(eos); 471 *durationUs = cachedDataRemaining * 8000000ll / bitrate; 472 return true; 473 } 474 475 return false; 476} 477 478void AwesomePlayer::onBufferingUpdate() { 479 Mutex::Autolock autoLock(mLock); 480 if (!mBufferingEventPending) { 481 return; 482 } 483 mBufferingEventPending = false; 484 485 if (mCachedSource != NULL) { 486 bool eos; 487 size_t cachedDataRemaining = mCachedSource->approxDataRemaining(&eos); 488 489 if (eos) { 490 notifyListener_l(MEDIA_BUFFERING_UPDATE, 100); 491 } else { 492 off_t size; 493 if (mDurationUs >= 0 && mCachedSource->getSize(&size) == OK) { 494 int64_t bitrate = size * 8000000ll / mDurationUs; // in bits/sec 495 496 size_t cachedSize = mCachedSource->cachedSize(); 497 int64_t cachedDurationUs = cachedSize * 8000000ll / bitrate; 498 499 int percentage = 100.0 * (double)cachedDurationUs / mDurationUs; 500 if (percentage > 100) { 501 percentage = 100; 502 } 503 504 notifyListener_l(MEDIA_BUFFERING_UPDATE, percentage); 505 } else { 506 // We don't know the bitrate of the stream, use absolute size 507 // limits to maintain the cache. 508 509 const size_t kLowWaterMarkBytes = 400000; 510 const size_t kHighWaterMarkBytes = 1000000; 511 512 if ((mFlags & PLAYING) && !eos 513 && (cachedDataRemaining < kLowWaterMarkBytes)) { 514 LOGI("cache is running low (< %d) , pausing.", 515 kLowWaterMarkBytes); 516 mFlags |= CACHE_UNDERRUN; 517 pause_l(); 518 notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START); 519 } else if (eos || cachedDataRemaining > kHighWaterMarkBytes) { 520 if (mFlags & CACHE_UNDERRUN) { 521 LOGI("cache has filled up (> %d), resuming.", 522 kHighWaterMarkBytes); 523 mFlags &= ~CACHE_UNDERRUN; 524 play_l(); 525 notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_END); 526 } else if (mFlags & PREPARING) { 527 LOGV("cache has filled up (> %d), prepare is done", 528 kHighWaterMarkBytes); 529 finishAsyncPrepare_l(); 530 } 531 } 532 } 533 } 534 } 535 536 int64_t cachedDurationUs; 537 bool eos; 538 if (getCachedDuration_l(&cachedDurationUs, &eos)) { 539 if ((mFlags & PLAYING) && !eos 540 && (cachedDurationUs < kLowWaterMarkUs)) { 541 LOGI("cache is running low (%.2f secs) , pausing.", 542 cachedDurationUs / 1E6); 543 mFlags |= CACHE_UNDERRUN; 544 pause_l(); 545 notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START); 546 } else if (eos || cachedDurationUs > kHighWaterMarkUs) { 547 if (mFlags & CACHE_UNDERRUN) { 548 LOGI("cache has filled up (%.2f secs), resuming.", 549 cachedDurationUs / 1E6); 550 mFlags &= ~CACHE_UNDERRUN; 551 play_l(); 552 notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_END); 553 } else if (mFlags & PREPARING) { 554 LOGV("cache has filled up (%.2f secs), prepare is done", 555 cachedDurationUs / 1E6); 556 finishAsyncPrepare_l(); 557 } 558 } 559 } 560 561 postBufferingEvent_l(); 562} 563 564void AwesomePlayer::onStreamDone() { 565 // Posted whenever any stream finishes playing. 566 567 Mutex::Autolock autoLock(mLock); 568 if (!mStreamDoneEventPending) { 569 return; 570 } 571 mStreamDoneEventPending = false; 572 573 if (mStreamDoneStatus != ERROR_END_OF_STREAM) { 574 LOGV("MEDIA_ERROR %d", mStreamDoneStatus); 575 576 notifyListener_l( 577 MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, mStreamDoneStatus); 578 579 pause_l(); 580 581 mFlags |= AT_EOS; 582 return; 583 } 584 585 const bool allDone = 586 (mVideoSource == NULL || (mFlags & VIDEO_AT_EOS)) 587 && (mAudioSource == NULL || (mFlags & AUDIO_AT_EOS)); 588 589 if (!allDone) { 590 return; 591 } 592 593 if (mFlags & LOOPING) { 594 seekTo_l(0); 595 596 if (mVideoSource != NULL) { 597 postVideoEvent_l(); 598 } 599 } else { 600 LOGV("MEDIA_PLAYBACK_COMPLETE"); 601 notifyListener_l(MEDIA_PLAYBACK_COMPLETE); 602 603 pause_l(); 604 605 mFlags |= AT_EOS; 606 } 607} 608 609status_t AwesomePlayer::play() { 610 Mutex::Autolock autoLock(mLock); 611 612 mFlags &= ~CACHE_UNDERRUN; 613 614 return play_l(); 615} 616 617status_t AwesomePlayer::play_l() { 618 if (mFlags & PLAYING) { 619 return OK; 620 } 621 622 if (!(mFlags & PREPARED)) { 623 status_t err = prepare_l(); 624 625 if (err != OK) { 626 return err; 627 } 628 } 629 630 mFlags |= PLAYING; 631 mFlags |= FIRST_FRAME; 632 633 bool deferredAudioSeek = false; 634 635 if (mAudioSource != NULL) { 636 if (mAudioPlayer == NULL) { 637 if (mAudioSink != NULL) { 638 mAudioPlayer = new AudioPlayer(mAudioSink); 639 mAudioPlayer->setSource(mAudioSource); 640 641 // We've already started the MediaSource in order to enable 642 // the prefetcher to read its data. 643 status_t err = mAudioPlayer->start( 644 true /* sourceAlreadyStarted */); 645 646 if (err != OK) { 647 delete mAudioPlayer; 648 mAudioPlayer = NULL; 649 650 mFlags &= ~(PLAYING | FIRST_FRAME); 651 652 return err; 653 } 654 655 mTimeSource = mAudioPlayer; 656 657 deferredAudioSeek = true; 658 659 mWatchForAudioSeekComplete = false; 660 mWatchForAudioEOS = true; 661 } 662 } else { 663 mAudioPlayer->resume(); 664 } 665 666 postCheckAudioStatusEvent_l(); 667 } 668 669 if (mTimeSource == NULL && mAudioPlayer == NULL) { 670 mTimeSource = &mSystemTimeSource; 671 } 672 673 if (mVideoSource != NULL) { 674 // Kick off video playback 675 postVideoEvent_l(); 676 } 677 678 if (deferredAudioSeek) { 679 // If there was a seek request while we were paused 680 // and we're just starting up again, honor the request now. 681 seekAudioIfNecessary_l(); 682 } 683 684 if (mFlags & AT_EOS) { 685 // Legacy behaviour, if a stream finishes playing and then 686 // is started again, we play from the start... 687 seekTo_l(0); 688 } 689 690 return OK; 691} 692 693void AwesomePlayer::notifyVideoSize_l() { 694 sp<MetaData> meta = mVideoSource->getFormat(); 695 696 int32_t decodedWidth, decodedHeight; 697 CHECK(meta->findInt32(kKeyWidth, &decodedWidth)); 698 CHECK(meta->findInt32(kKeyHeight, &decodedHeight)); 699 700 notifyListener_l(MEDIA_SET_VIDEO_SIZE, decodedWidth, decodedHeight); 701} 702 703void AwesomePlayer::initRenderer_l() { 704 if (mSurface != NULL || mISurface != NULL) { 705 sp<MetaData> meta = mVideoSource->getFormat(); 706 707 int32_t format; 708 const char *component; 709 int32_t decodedWidth, decodedHeight; 710 CHECK(meta->findInt32(kKeyColorFormat, &format)); 711 CHECK(meta->findCString(kKeyDecoderComponent, &component)); 712 CHECK(meta->findInt32(kKeyWidth, &decodedWidth)); 713 CHECK(meta->findInt32(kKeyHeight, &decodedHeight)); 714 715 mVideoRenderer.clear(); 716 717 // Must ensure that mVideoRenderer's destructor is actually executed 718 // before creating a new one. 719 IPCThreadState::self()->flushCommands(); 720 721 if (mSurface != NULL) { 722 // Other decoders are instantiated locally and as a consequence 723 // allocate their buffers in local address space. 724 mVideoRenderer = new AwesomeLocalRenderer( 725 false, // previewOnly 726 component, 727 (OMX_COLOR_FORMATTYPE)format, 728 mISurface, 729 mSurface, 730 mVideoWidth, mVideoHeight, 731 decodedWidth, decodedHeight); 732 } else { 733 // Our OMX codecs allocate buffers on the media_server side 734 // therefore they require a remote IOMXRenderer that knows how 735 // to display them. 736 mVideoRenderer = new AwesomeRemoteRenderer( 737 mClient.interface()->createRenderer( 738 mISurface, component, 739 (OMX_COLOR_FORMATTYPE)format, 740 decodedWidth, decodedHeight, 741 mVideoWidth, mVideoHeight)); 742 } 743 } 744} 745 746status_t AwesomePlayer::pause() { 747 Mutex::Autolock autoLock(mLock); 748 749 mFlags &= ~CACHE_UNDERRUN; 750 751 return pause_l(); 752} 753 754status_t AwesomePlayer::pause_l() { 755 if (!(mFlags & PLAYING)) { 756 return OK; 757 } 758 759 cancelPlayerEvents(true /* keepBufferingGoing */); 760 761 if (mAudioPlayer != NULL) { 762 mAudioPlayer->pause(); 763 } 764 765 mFlags &= ~PLAYING; 766 767 return OK; 768} 769 770bool AwesomePlayer::isPlaying() const { 771 return (mFlags & PLAYING) || (mFlags & CACHE_UNDERRUN); 772} 773 774void AwesomePlayer::setISurface(const sp<ISurface> &isurface) { 775 Mutex::Autolock autoLock(mLock); 776 777 mISurface = isurface; 778} 779 780void AwesomePlayer::setSurface(const sp<Surface> &surface) { 781 Mutex::Autolock autoLock(mLock); 782 783 mSurface = surface; 784} 785 786void AwesomePlayer::setAudioSink( 787 const sp<MediaPlayerBase::AudioSink> &audioSink) { 788 Mutex::Autolock autoLock(mLock); 789 790 mAudioSink = audioSink; 791} 792 793status_t AwesomePlayer::setLooping(bool shouldLoop) { 794 Mutex::Autolock autoLock(mLock); 795 796 mFlags = mFlags & ~LOOPING; 797 798 if (shouldLoop) { 799 mFlags |= LOOPING; 800 } 801 802 return OK; 803} 804 805status_t AwesomePlayer::getDuration(int64_t *durationUs) { 806 Mutex::Autolock autoLock(mMiscStateLock); 807 808 if (mDurationUs < 0) { 809 return UNKNOWN_ERROR; 810 } 811 812 *durationUs = mDurationUs; 813 814 return OK; 815} 816 817status_t AwesomePlayer::getPosition(int64_t *positionUs) { 818 if (mRTSPController != NULL) { 819 *positionUs = mRTSPController->getNormalPlayTimeUs(); 820 } 821 else if (mSeeking) { 822 *positionUs = mSeekTimeUs; 823 } else if (mVideoSource != NULL) { 824 Mutex::Autolock autoLock(mMiscStateLock); 825 *positionUs = mVideoTimeUs; 826 } else if (mAudioPlayer != NULL) { 827 *positionUs = mAudioPlayer->getMediaTimeUs(); 828 } else { 829 *positionUs = 0; 830 } 831 832 return OK; 833} 834 835status_t AwesomePlayer::seekTo(int64_t timeUs) { 836 if (mExtractorFlags 837 & (MediaExtractor::CAN_SEEK_FORWARD 838 | MediaExtractor::CAN_SEEK_BACKWARD)) { 839 Mutex::Autolock autoLock(mLock); 840 return seekTo_l(timeUs); 841 } 842 843 return OK; 844} 845 846status_t AwesomePlayer::seekTo_l(int64_t timeUs) { 847 if (mRTSPController != NULL) { 848 mRTSPController->seek(timeUs); 849 850 notifyListener_l(MEDIA_SEEK_COMPLETE); 851 mSeekNotificationSent = true; 852 return OK; 853 } 854 855 if (mFlags & CACHE_UNDERRUN) { 856 mFlags &= ~CACHE_UNDERRUN; 857 play_l(); 858 } 859 860 mSeeking = true; 861 mSeekNotificationSent = false; 862 mSeekTimeUs = timeUs; 863 mFlags &= ~(AT_EOS | AUDIO_AT_EOS | VIDEO_AT_EOS); 864 865 seekAudioIfNecessary_l(); 866 867 if (!(mFlags & PLAYING)) { 868 LOGV("seeking while paused, sending SEEK_COMPLETE notification" 869 " immediately."); 870 871 notifyListener_l(MEDIA_SEEK_COMPLETE); 872 mSeekNotificationSent = true; 873 } 874 875 return OK; 876} 877 878void AwesomePlayer::seekAudioIfNecessary_l() { 879 if (mSeeking && mVideoSource == NULL && mAudioPlayer != NULL) { 880 mAudioPlayer->seekTo(mSeekTimeUs); 881 882 mWatchForAudioSeekComplete = true; 883 mWatchForAudioEOS = true; 884 mSeekNotificationSent = false; 885 } 886} 887 888status_t AwesomePlayer::getVideoDimensions( 889 int32_t *width, int32_t *height) const { 890 Mutex::Autolock autoLock(mLock); 891 892 if (mVideoWidth < 0 || mVideoHeight < 0) { 893 return UNKNOWN_ERROR; 894 } 895 896 *width = mVideoWidth; 897 *height = mVideoHeight; 898 899 return OK; 900} 901 902void AwesomePlayer::setAudioSource(sp<MediaSource> source) { 903 CHECK(source != NULL); 904 905 mAudioTrack = source; 906} 907 908status_t AwesomePlayer::initAudioDecoder() { 909 sp<MetaData> meta = mAudioTrack->getFormat(); 910 911 const char *mime; 912 CHECK(meta->findCString(kKeyMIMEType, &mime)); 913 914 if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 915 mAudioSource = mAudioTrack; 916 } else { 917 mAudioSource = OMXCodec::Create( 918 mClient.interface(), mAudioTrack->getFormat(), 919 false, // createEncoder 920 mAudioTrack); 921 } 922 923 if (mAudioSource != NULL) { 924 int64_t durationUs; 925 if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) { 926 Mutex::Autolock autoLock(mMiscStateLock); 927 if (mDurationUs < 0 || durationUs > mDurationUs) { 928 mDurationUs = durationUs; 929 } 930 } 931 932 status_t err = mAudioSource->start(); 933 934 if (err != OK) { 935 mAudioSource.clear(); 936 return err; 937 } 938 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) { 939 // For legacy reasons we're simply going to ignore the absence 940 // of an audio decoder for QCELP instead of aborting playback 941 // altogether. 942 return OK; 943 } 944 945 return mAudioSource != NULL ? OK : UNKNOWN_ERROR; 946} 947 948void AwesomePlayer::setVideoSource(sp<MediaSource> source) { 949 CHECK(source != NULL); 950 951 mVideoTrack = source; 952} 953 954status_t AwesomePlayer::initVideoDecoder() { 955 uint32_t flags = 0; 956 mVideoSource = OMXCodec::Create( 957 mClient.interface(), mVideoTrack->getFormat(), 958 false, // createEncoder 959 mVideoTrack, 960 NULL, flags); 961 962 if (mVideoSource != NULL) { 963 int64_t durationUs; 964 if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) { 965 Mutex::Autolock autoLock(mMiscStateLock); 966 if (mDurationUs < 0 || durationUs > mDurationUs) { 967 mDurationUs = durationUs; 968 } 969 } 970 971 CHECK(mVideoTrack->getFormat()->findInt32(kKeyWidth, &mVideoWidth)); 972 CHECK(mVideoTrack->getFormat()->findInt32(kKeyHeight, &mVideoHeight)); 973 974 status_t err = mVideoSource->start(); 975 976 if (err != OK) { 977 mVideoSource.clear(); 978 return err; 979 } 980 } 981 982 return mVideoSource != NULL ? OK : UNKNOWN_ERROR; 983} 984 985void AwesomePlayer::onVideoEvent() { 986 Mutex::Autolock autoLock(mLock); 987 if (!mVideoEventPending) { 988 // The event has been cancelled in reset_l() but had already 989 // been scheduled for execution at that time. 990 return; 991 } 992 mVideoEventPending = false; 993 994 if (mSeeking) { 995 if (mLastVideoBuffer) { 996 mLastVideoBuffer->release(); 997 mLastVideoBuffer = NULL; 998 } 999 1000 if (mVideoBuffer) { 1001 mVideoBuffer->release(); 1002 mVideoBuffer = NULL; 1003 } 1004 1005 if (mCachedSource != NULL && mAudioSource != NULL) { 1006 // We're going to seek the video source first, followed by 1007 // the audio source. 1008 // In order to avoid jumps in the DataSource offset caused by 1009 // the audio codec prefetching data from the old locations 1010 // while the video codec is already reading data from the new 1011 // locations, we'll "pause" the audio source, causing it to 1012 // stop reading input data until a subsequent seek. 1013 1014 if (mAudioPlayer != NULL) { 1015 mAudioPlayer->pause(); 1016 } 1017 mAudioSource->pause(); 1018 } 1019 } 1020 1021 if (!mVideoBuffer) { 1022 MediaSource::ReadOptions options; 1023 if (mSeeking) { 1024 LOGV("seeking to %lld us (%.2f secs)", mSeekTimeUs, mSeekTimeUs / 1E6); 1025 1026 options.setSeekTo( 1027 mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST_SYNC); 1028 } 1029 for (;;) { 1030 status_t err = mVideoSource->read(&mVideoBuffer, &options); 1031 options.clearSeekTo(); 1032 1033 if (err != OK) { 1034 CHECK_EQ(mVideoBuffer, NULL); 1035 1036 if (err == INFO_FORMAT_CHANGED) { 1037 LOGV("VideoSource signalled format change."); 1038 1039 notifyVideoSize_l(); 1040 1041 if (mVideoRenderer != NULL) { 1042 mVideoRendererIsPreview = false; 1043 initRenderer_l(); 1044 } 1045 continue; 1046 } 1047 1048 mFlags |= VIDEO_AT_EOS; 1049 postStreamDoneEvent_l(err); 1050 return; 1051 } 1052 1053 if (mVideoBuffer->range_length() == 0) { 1054 // Some decoders, notably the PV AVC software decoder 1055 // return spurious empty buffers that we just want to ignore. 1056 1057 mVideoBuffer->release(); 1058 mVideoBuffer = NULL; 1059 continue; 1060 } 1061 1062 break; 1063 } 1064 } 1065 1066 int64_t timeUs; 1067 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs)); 1068 1069 { 1070 Mutex::Autolock autoLock(mMiscStateLock); 1071 mVideoTimeUs = timeUs; 1072 } 1073 1074 if (mSeeking) { 1075 if (mAudioPlayer != NULL) { 1076 LOGV("seeking audio to %lld us (%.2f secs).", timeUs, timeUs / 1E6); 1077 1078 mAudioPlayer->seekTo(timeUs); 1079 mAudioPlayer->resume(); 1080 mWatchForAudioSeekComplete = true; 1081 mWatchForAudioEOS = true; 1082 } else if (!mSeekNotificationSent) { 1083 // If we're playing video only, report seek complete now, 1084 // otherwise audio player will notify us later. 1085 notifyListener_l(MEDIA_SEEK_COMPLETE); 1086 } 1087 1088 mFlags |= FIRST_FRAME; 1089 mSeeking = false; 1090 mSeekNotificationSent = false; 1091 } 1092 1093 TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource; 1094 1095 if (mFlags & FIRST_FRAME) { 1096 mFlags &= ~FIRST_FRAME; 1097 1098 mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs; 1099 } 1100 1101 int64_t realTimeUs, mediaTimeUs; 1102 if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL 1103 && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) { 1104 mTimeSourceDeltaUs = realTimeUs - mediaTimeUs; 1105 } 1106 1107 int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs; 1108 1109 int64_t latenessUs = nowUs - timeUs; 1110 1111 if (mRTPSession != NULL) { 1112 // We'll completely ignore timestamps for gtalk videochat 1113 // and we'll play incoming video as fast as we get it. 1114 latenessUs = 0; 1115 } 1116 1117 if (latenessUs > 40000) { 1118 // We're more than 40ms late. 1119 LOGV("we're late by %lld us (%.2f secs)", latenessUs, latenessUs / 1E6); 1120 1121 mVideoBuffer->release(); 1122 mVideoBuffer = NULL; 1123 1124 postVideoEvent_l(); 1125 return; 1126 } 1127 1128 if (latenessUs < -10000) { 1129 // We're more than 10ms early. 1130 1131 postVideoEvent_l(10000); 1132 return; 1133 } 1134 1135 if (mVideoRendererIsPreview || mVideoRenderer == NULL) { 1136 mVideoRendererIsPreview = false; 1137 1138 initRenderer_l(); 1139 } 1140 1141 if (mVideoRenderer != NULL) { 1142 mVideoRenderer->render(mVideoBuffer); 1143 } 1144 1145 if (mLastVideoBuffer) { 1146 mLastVideoBuffer->release(); 1147 mLastVideoBuffer = NULL; 1148 } 1149 mLastVideoBuffer = mVideoBuffer; 1150 mVideoBuffer = NULL; 1151 1152 postVideoEvent_l(); 1153} 1154 1155void AwesomePlayer::postVideoEvent_l(int64_t delayUs) { 1156 if (mVideoEventPending) { 1157 return; 1158 } 1159 1160 mVideoEventPending = true; 1161 mQueue.postEventWithDelay(mVideoEvent, delayUs < 0 ? 10000 : delayUs); 1162} 1163 1164void AwesomePlayer::postStreamDoneEvent_l(status_t status) { 1165 if (mStreamDoneEventPending) { 1166 return; 1167 } 1168 mStreamDoneEventPending = true; 1169 1170 mStreamDoneStatus = status; 1171 mQueue.postEvent(mStreamDoneEvent); 1172} 1173 1174void AwesomePlayer::postBufferingEvent_l() { 1175 if (mBufferingEventPending) { 1176 return; 1177 } 1178 mBufferingEventPending = true; 1179 mQueue.postEventWithDelay(mBufferingEvent, 1000000ll); 1180} 1181 1182void AwesomePlayer::postCheckAudioStatusEvent_l() { 1183 if (mAudioStatusEventPending) { 1184 return; 1185 } 1186 mAudioStatusEventPending = true; 1187 mQueue.postEventWithDelay(mCheckAudioStatusEvent, 100000ll); 1188} 1189 1190void AwesomePlayer::onCheckAudioStatus() { 1191 Mutex::Autolock autoLock(mLock); 1192 if (!mAudioStatusEventPending) { 1193 // Event was dispatched and while we were blocking on the mutex, 1194 // has already been cancelled. 1195 return; 1196 } 1197 1198 mAudioStatusEventPending = false; 1199 1200 if (mWatchForAudioSeekComplete && !mAudioPlayer->isSeeking()) { 1201 mWatchForAudioSeekComplete = false; 1202 1203 if (!mSeekNotificationSent) { 1204 notifyListener_l(MEDIA_SEEK_COMPLETE); 1205 mSeekNotificationSent = true; 1206 } 1207 1208 mSeeking = false; 1209 } 1210 1211 status_t finalStatus; 1212 if (mWatchForAudioEOS && mAudioPlayer->reachedEOS(&finalStatus)) { 1213 mWatchForAudioEOS = false; 1214 mFlags |= AUDIO_AT_EOS; 1215 mFlags |= FIRST_FRAME; 1216 postStreamDoneEvent_l(finalStatus); 1217 } 1218 1219 postCheckAudioStatusEvent_l(); 1220} 1221 1222status_t AwesomePlayer::prepare() { 1223 Mutex::Autolock autoLock(mLock); 1224 return prepare_l(); 1225} 1226 1227status_t AwesomePlayer::prepare_l() { 1228 if (mFlags & PREPARED) { 1229 return OK; 1230 } 1231 1232 if (mFlags & PREPARING) { 1233 return UNKNOWN_ERROR; 1234 } 1235 1236 mIsAsyncPrepare = false; 1237 status_t err = prepareAsync_l(); 1238 1239 if (err != OK) { 1240 return err; 1241 } 1242 1243 while (mFlags & PREPARING) { 1244 mPreparedCondition.wait(mLock); 1245 } 1246 1247 return mPrepareResult; 1248} 1249 1250status_t AwesomePlayer::prepareAsync() { 1251 Mutex::Autolock autoLock(mLock); 1252 1253 if (mFlags & PREPARING) { 1254 return UNKNOWN_ERROR; // async prepare already pending 1255 } 1256 1257 mIsAsyncPrepare = true; 1258 return prepareAsync_l(); 1259} 1260 1261status_t AwesomePlayer::prepareAsync_l() { 1262 if (mFlags & PREPARING) { 1263 return UNKNOWN_ERROR; // async prepare already pending 1264 } 1265 1266 if (!mQueueStarted) { 1267 mQueue.start(); 1268 mQueueStarted = true; 1269 } 1270 1271 mFlags |= PREPARING; 1272 mAsyncPrepareEvent = new AwesomeEvent( 1273 this, &AwesomePlayer::onPrepareAsyncEvent); 1274 1275 mQueue.postEvent(mAsyncPrepareEvent); 1276 1277 return OK; 1278} 1279 1280status_t AwesomePlayer::finishSetDataSource_l() { 1281 sp<DataSource> dataSource; 1282 1283 if (!strncasecmp("http://", mUri.string(), 7)) { 1284 mConnectingDataSource = new NuHTTPDataSource; 1285 1286 mLock.unlock(); 1287 status_t err = mConnectingDataSource->connect(mUri, &mUriHeaders); 1288 mLock.lock(); 1289 1290 if (err != OK) { 1291 mConnectingDataSource.clear(); 1292 1293 LOGI("mConnectingDataSource->connect() returned %d", err); 1294 return err; 1295 } 1296 1297#if 0 1298 mCachedSource = new NuCachedSource2( 1299 new ThrottledSource( 1300 mConnectingDataSource, 50 * 1024 /* bytes/sec */)); 1301#else 1302 mCachedSource = new NuCachedSource2(mConnectingDataSource); 1303#endif 1304 mConnectingDataSource.clear(); 1305 1306 dataSource = mCachedSource; 1307 } else if (!strncasecmp(mUri.string(), "httplive://", 11)) { 1308 String8 uri("http://"); 1309 uri.append(mUri.string() + 11); 1310 1311 dataSource = new LiveSource(uri.string()); 1312 1313 mCachedSource = new NuCachedSource2(dataSource); 1314 dataSource = mCachedSource; 1315 1316 sp<MediaExtractor> extractor = 1317 MediaExtractor::Create(dataSource, MEDIA_MIMETYPE_CONTAINER_MPEG2TS); 1318 1319 return setDataSource_l(extractor); 1320 } else if (!strncmp("rtsp://gtalk/", mUri.string(), 13)) { 1321 if (mLooper == NULL) { 1322 mLooper = new ALooper; 1323 mLooper->setName("gtalk rtp"); 1324 mLooper->start( 1325 false /* runOnCallingThread */, 1326 false /* canCallJava */, 1327 PRIORITY_HIGHEST); 1328 } 1329 1330 const char *startOfCodecString = &mUri.string()[13]; 1331 const char *startOfSlash1 = strchr(startOfCodecString, '/'); 1332 if (startOfSlash1 == NULL) { 1333 return BAD_VALUE; 1334 } 1335 const char *startOfWidthString = &startOfSlash1[1]; 1336 const char *startOfSlash2 = strchr(startOfWidthString, '/'); 1337 if (startOfSlash2 == NULL) { 1338 return BAD_VALUE; 1339 } 1340 const char *startOfHeightString = &startOfSlash2[1]; 1341 1342 String8 codecString(startOfCodecString, startOfSlash1 - startOfCodecString); 1343 String8 widthString(startOfWidthString, startOfSlash2 - startOfWidthString); 1344 String8 heightString(startOfHeightString); 1345 1346#if 0 1347 mRTPPusher = new UDPPusher("/data/misc/rtpout.bin", 5434); 1348 mLooper->registerHandler(mRTPPusher); 1349 1350 mRTCPPusher = new UDPPusher("/data/misc/rtcpout.bin", 5435); 1351 mLooper->registerHandler(mRTCPPusher); 1352#endif 1353 1354 mRTPSession = new ARTPSession; 1355 mLooper->registerHandler(mRTPSession); 1356 1357#if 0 1358 // My AMR SDP 1359 static const char *raw = 1360 "v=0\r\n" 1361 "o=- 64 233572944 IN IP4 127.0.0.0\r\n" 1362 "s=QuickTime\r\n" 1363 "t=0 0\r\n" 1364 "a=range:npt=0-315\r\n" 1365 "a=isma-compliance:2,2.0,2\r\n" 1366 "m=audio 5434 RTP/AVP 97\r\n" 1367 "c=IN IP4 127.0.0.1\r\n" 1368 "b=AS:30\r\n" 1369 "a=rtpmap:97 AMR/8000/1\r\n" 1370 "a=fmtp:97 octet-align\r\n"; 1371#elif 1 1372 String8 sdp; 1373 sdp.appendFormat( 1374 "v=0\r\n" 1375 "o=- 64 233572944 IN IP4 127.0.0.0\r\n" 1376 "s=QuickTime\r\n" 1377 "t=0 0\r\n" 1378 "a=range:npt=0-315\r\n" 1379 "a=isma-compliance:2,2.0,2\r\n" 1380 "m=video 5434 RTP/AVP 97\r\n" 1381 "c=IN IP4 127.0.0.1\r\n" 1382 "b=AS:30\r\n" 1383 "a=rtpmap:97 %s/90000\r\n" 1384 "a=cliprect:0,0,%s,%s\r\n" 1385 "a=framesize:97 %s-%s\r\n", 1386 1387 codecString.string(), 1388 heightString.string(), widthString.string(), 1389 widthString.string(), heightString.string() 1390 ); 1391 const char *raw = sdp.string(); 1392 1393#endif 1394 1395 sp<ASessionDescription> desc = new ASessionDescription; 1396 CHECK(desc->setTo(raw, strlen(raw))); 1397 1398 CHECK_EQ(mRTPSession->setup(desc), (status_t)OK); 1399 1400 if (mRTPPusher != NULL) { 1401 mRTPPusher->start(); 1402 } 1403 1404 if (mRTCPPusher != NULL) { 1405 mRTCPPusher->start(); 1406 } 1407 1408 CHECK_EQ(mRTPSession->countTracks(), 1u); 1409 sp<MediaSource> source = mRTPSession->trackAt(0); 1410 1411#if 0 1412 bool eos; 1413 while (((APacketSource *)source.get()) 1414 ->getQueuedDuration(&eos) < 5000000ll && !eos) { 1415 usleep(100000ll); 1416 } 1417#endif 1418 1419 const char *mime; 1420 CHECK(source->getFormat()->findCString(kKeyMIMEType, &mime)); 1421 1422 if (!strncasecmp("video/", mime, 6)) { 1423 setVideoSource(source); 1424 } else { 1425 CHECK(!strncasecmp("audio/", mime, 6)); 1426 setAudioSource(source); 1427 } 1428 1429 mExtractorFlags = MediaExtractor::CAN_PAUSE; 1430 1431 return OK; 1432 } else if (!strncasecmp("rtsp://", mUri.string(), 7)) { 1433 if (mLooper == NULL) { 1434 mLooper = new ALooper; 1435 mLooper->setName("rtsp"); 1436 mLooper->start(); 1437 } 1438 mRTSPController = new ARTSPController(mLooper); 1439 status_t err = mRTSPController->connect(mUri.string()); 1440 1441 LOGI("ARTSPController::connect returned %d", err); 1442 1443 if (err != OK) { 1444 mRTSPController.clear(); 1445 return err; 1446 } 1447 1448 sp<MediaExtractor> extractor = mRTSPController.get(); 1449 return setDataSource_l(extractor); 1450 } else { 1451 dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders); 1452 } 1453 1454 if (dataSource == NULL) { 1455 return UNKNOWN_ERROR; 1456 } 1457 1458 sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource); 1459 1460 if (extractor == NULL) { 1461 return UNKNOWN_ERROR; 1462 } 1463 1464 return setDataSource_l(extractor); 1465} 1466 1467void AwesomePlayer::abortPrepare(status_t err) { 1468 CHECK(err != OK); 1469 1470 if (mIsAsyncPrepare) { 1471 notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err); 1472 } 1473 1474 mPrepareResult = err; 1475 mFlags &= ~(PREPARING|PREPARE_CANCELLED); 1476 mAsyncPrepareEvent = NULL; 1477 mPreparedCondition.broadcast(); 1478} 1479 1480// static 1481bool AwesomePlayer::ContinuePreparation(void *cookie) { 1482 AwesomePlayer *me = static_cast<AwesomePlayer *>(cookie); 1483 1484 return (me->mFlags & PREPARE_CANCELLED) == 0; 1485} 1486 1487void AwesomePlayer::onPrepareAsyncEvent() { 1488 Mutex::Autolock autoLock(mLock); 1489 1490 if (mFlags & PREPARE_CANCELLED) { 1491 LOGI("prepare was cancelled before doing anything"); 1492 abortPrepare(UNKNOWN_ERROR); 1493 return; 1494 } 1495 1496 if (mUri.size() > 0) { 1497 status_t err = finishSetDataSource_l(); 1498 1499 if (err != OK) { 1500 abortPrepare(err); 1501 return; 1502 } 1503 } 1504 1505 if (mVideoTrack != NULL && mVideoSource == NULL) { 1506 status_t err = initVideoDecoder(); 1507 1508 if (err != OK) { 1509 abortPrepare(err); 1510 return; 1511 } 1512 } 1513 1514 if (mAudioTrack != NULL && mAudioSource == NULL) { 1515 status_t err = initAudioDecoder(); 1516 1517 if (err != OK) { 1518 abortPrepare(err); 1519 return; 1520 } 1521 } 1522 1523 if (mCachedSource != NULL || mRTSPController != NULL) { 1524 postBufferingEvent_l(); 1525 } else { 1526 finishAsyncPrepare_l(); 1527 } 1528} 1529 1530void AwesomePlayer::finishAsyncPrepare_l() { 1531 if (mIsAsyncPrepare) { 1532 if (mVideoSource == NULL) { 1533 notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0); 1534 } else { 1535 notifyVideoSize_l(); 1536 } 1537 1538 notifyListener_l(MEDIA_PREPARED); 1539 } 1540 1541 mPrepareResult = OK; 1542 mFlags &= ~(PREPARING|PREPARE_CANCELLED); 1543 mFlags |= PREPARED; 1544 mAsyncPrepareEvent = NULL; 1545 mPreparedCondition.broadcast(); 1546} 1547 1548status_t AwesomePlayer::suspend() { 1549 LOGV("suspend"); 1550 Mutex::Autolock autoLock(mLock); 1551 1552 if (mSuspensionState != NULL) { 1553 if (mLastVideoBuffer == NULL) { 1554 //go into here if video is suspended again 1555 //after resuming without being played between 1556 //them 1557 SuspensionState *state = mSuspensionState; 1558 mSuspensionState = NULL; 1559 reset_l(); 1560 mSuspensionState = state; 1561 return OK; 1562 } 1563 1564 delete mSuspensionState; 1565 mSuspensionState = NULL; 1566 } 1567 1568 if (mFlags & PREPARING) { 1569 mFlags |= PREPARE_CANCELLED; 1570 if (mConnectingDataSource != NULL) { 1571 LOGI("interrupting the connection process"); 1572 mConnectingDataSource->disconnect(); 1573 } 1574 } 1575 1576 while (mFlags & PREPARING) { 1577 mPreparedCondition.wait(mLock); 1578 } 1579 1580 SuspensionState *state = new SuspensionState; 1581 state->mUri = mUri; 1582 state->mUriHeaders = mUriHeaders; 1583 state->mFileSource = mFileSource; 1584 1585 state->mFlags = mFlags & (PLAYING | LOOPING | AT_EOS); 1586 getPosition(&state->mPositionUs); 1587 1588 if (mLastVideoBuffer) { 1589 size_t size = mLastVideoBuffer->range_length(); 1590 if (size) { 1591 state->mLastVideoFrameSize = size; 1592 state->mLastVideoFrame = malloc(size); 1593 memcpy(state->mLastVideoFrame, 1594 (const uint8_t *)mLastVideoBuffer->data() 1595 + mLastVideoBuffer->range_offset(), 1596 size); 1597 1598 state->mVideoWidth = mVideoWidth; 1599 state->mVideoHeight = mVideoHeight; 1600 1601 sp<MetaData> meta = mVideoSource->getFormat(); 1602 CHECK(meta->findInt32(kKeyColorFormat, &state->mColorFormat)); 1603 CHECK(meta->findInt32(kKeyWidth, &state->mDecodedWidth)); 1604 CHECK(meta->findInt32(kKeyHeight, &state->mDecodedHeight)); 1605 } 1606 } 1607 1608 reset_l(); 1609 1610 mSuspensionState = state; 1611 1612 return OK; 1613} 1614 1615status_t AwesomePlayer::resume() { 1616 LOGV("resume"); 1617 Mutex::Autolock autoLock(mLock); 1618 1619 if (mSuspensionState == NULL) { 1620 return INVALID_OPERATION; 1621 } 1622 1623 SuspensionState *state = mSuspensionState; 1624 mSuspensionState = NULL; 1625 1626 status_t err; 1627 if (state->mFileSource != NULL) { 1628 err = setDataSource_l(state->mFileSource); 1629 1630 if (err == OK) { 1631 mFileSource = state->mFileSource; 1632 } 1633 } else { 1634 err = setDataSource_l(state->mUri, &state->mUriHeaders); 1635 } 1636 1637 if (err != OK) { 1638 delete state; 1639 state = NULL; 1640 1641 return err; 1642 } 1643 1644 seekTo_l(state->mPositionUs); 1645 1646 mFlags = state->mFlags & (LOOPING | AT_EOS); 1647 1648 if (state->mLastVideoFrame && (mSurface != NULL || mISurface != NULL)) { 1649 mVideoRenderer = 1650 new AwesomeLocalRenderer( 1651 true, // previewOnly 1652 "", 1653 (OMX_COLOR_FORMATTYPE)state->mColorFormat, 1654 mISurface, 1655 mSurface, 1656 state->mVideoWidth, 1657 state->mVideoHeight, 1658 state->mDecodedWidth, 1659 state->mDecodedHeight); 1660 1661 mVideoRendererIsPreview = true; 1662 1663 ((AwesomeLocalRenderer *)mVideoRenderer.get())->render( 1664 state->mLastVideoFrame, state->mLastVideoFrameSize); 1665 } 1666 1667 if (state->mFlags & PLAYING) { 1668 play_l(); 1669 } 1670 1671 mSuspensionState = state; 1672 state = NULL; 1673 1674 return OK; 1675} 1676 1677uint32_t AwesomePlayer::flags() const { 1678 return mExtractorFlags; 1679} 1680 1681} // namespace android 1682 1683