AwesomePlayer.cpp revision 6a63a939601645404fd98f58c19cc38ca818d99e
1/* 2 * Copyright (C) 2009 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "AwesomePlayer" 19#include <utils/Log.h> 20 21#include <dlfcn.h> 22 23#include "include/ARTSPController.h" 24#include "include/AwesomePlayer.h" 25#include "include/LiveSource.h" 26#include "include/SoftwareRenderer.h" 27#include "include/NuCachedSource2.h" 28#include "include/ThrottledSource.h" 29 30#include "ARTPSession.h" 31#include "APacketSource.h" 32#include "ASessionDescription.h" 33#include "UDPPusher.h" 34 35#include <binder/IPCThreadState.h> 36#include <media/stagefright/AudioPlayer.h> 37#include <media/stagefright/DataSource.h> 38#include <media/stagefright/FileSource.h> 39#include <media/stagefright/MediaBuffer.h> 40#include <media/stagefright/MediaDefs.h> 41#include <media/stagefright/MediaExtractor.h> 42#include <media/stagefright/MediaDebug.h> 43#include <media/stagefright/MediaSource.h> 44#include <media/stagefright/MetaData.h> 45#include <media/stagefright/OMXCodec.h> 46 47#include <surfaceflinger/ISurface.h> 48 49#include <media/stagefright/foundation/ALooper.h> 50 51namespace android { 52 53static int64_t kLowWaterMarkUs = 2000000ll; // 2secs 54static int64_t kHighWaterMarkUs = 10000000ll; // 10secs 55 56struct AwesomeEvent : public TimedEventQueue::Event { 57 AwesomeEvent( 58 AwesomePlayer *player, 59 void (AwesomePlayer::*method)()) 60 : mPlayer(player), 61 mMethod(method) { 62 } 63 64protected: 65 virtual ~AwesomeEvent() {} 66 67 virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) { 68 (mPlayer->*mMethod)(); 69 } 70 71private: 72 AwesomePlayer *mPlayer; 73 void (AwesomePlayer::*mMethod)(); 74 75 AwesomeEvent(const AwesomeEvent &); 76 AwesomeEvent &operator=(const AwesomeEvent &); 77}; 78 79struct AwesomeRemoteRenderer : public AwesomeRenderer { 80 AwesomeRemoteRenderer(const sp<IOMXRenderer> &target) 81 : mTarget(target) { 82 } 83 84 virtual void render(MediaBuffer *buffer) { 85 void *id; 86 if (buffer->meta_data()->findPointer(kKeyBufferID, &id)) { 87 mTarget->render((IOMX::buffer_id)id); 88 } 89 } 90 91private: 92 sp<IOMXRenderer> mTarget; 93 94 AwesomeRemoteRenderer(const AwesomeRemoteRenderer &); 95 AwesomeRemoteRenderer &operator=(const AwesomeRemoteRenderer &); 96}; 97 98struct AwesomeLocalRenderer : public AwesomeRenderer { 99 AwesomeLocalRenderer( 100 bool previewOnly, 101 const char *componentName, 102 OMX_COLOR_FORMATTYPE colorFormat, 103 const sp<ISurface> &surface, 104 size_t displayWidth, size_t displayHeight, 105 size_t decodedWidth, size_t decodedHeight) 106 : mTarget(NULL), 107 mLibHandle(NULL) { 108 init(previewOnly, componentName, 109 colorFormat, surface, displayWidth, 110 displayHeight, decodedWidth, decodedHeight); 111 } 112 113 virtual void render(MediaBuffer *buffer) { 114 render((const uint8_t *)buffer->data() + buffer->range_offset(), 115 buffer->range_length()); 116 } 117 118 void render(const void *data, size_t size) { 119 mTarget->render(data, size, NULL); 120 } 121 122protected: 123 virtual ~AwesomeLocalRenderer() { 124 delete mTarget; 125 mTarget = NULL; 126 127 if (mLibHandle) { 128 dlclose(mLibHandle); 129 mLibHandle = NULL; 130 } 131 } 132 133private: 134 VideoRenderer *mTarget; 135 void *mLibHandle; 136 137 void init( 138 bool previewOnly, 139 const char *componentName, 140 OMX_COLOR_FORMATTYPE colorFormat, 141 const sp<ISurface> &surface, 142 size_t displayWidth, size_t displayHeight, 143 size_t decodedWidth, size_t decodedHeight); 144 145 AwesomeLocalRenderer(const AwesomeLocalRenderer &); 146 AwesomeLocalRenderer &operator=(const AwesomeLocalRenderer &);; 147}; 148 149void AwesomeLocalRenderer::init( 150 bool previewOnly, 151 const char *componentName, 152 OMX_COLOR_FORMATTYPE colorFormat, 153 const sp<ISurface> &surface, 154 size_t displayWidth, size_t displayHeight, 155 size_t decodedWidth, size_t decodedHeight) { 156 if (!previewOnly) { 157 // We will stick to the vanilla software-color-converting renderer 158 // for "previewOnly" mode, to avoid unneccessarily switching overlays 159 // more often than necessary. 160 161 mLibHandle = dlopen("libstagefrighthw.so", RTLD_NOW); 162 163 if (mLibHandle) { 164 typedef VideoRenderer *(*CreateRendererFunc)( 165 const sp<ISurface> &surface, 166 const char *componentName, 167 OMX_COLOR_FORMATTYPE colorFormat, 168 size_t displayWidth, size_t displayHeight, 169 size_t decodedWidth, size_t decodedHeight); 170 171 CreateRendererFunc func = 172 (CreateRendererFunc)dlsym( 173 mLibHandle, 174 "_Z14createRendererRKN7android2spINS_8ISurfaceEEEPKc20" 175 "OMX_COLOR_FORMATTYPEjjjj"); 176 177 if (func) { 178 mTarget = 179 (*func)(surface, componentName, colorFormat, 180 displayWidth, displayHeight, 181 decodedWidth, decodedHeight); 182 } 183 } 184 } 185 186 if (mTarget == NULL) { 187 mTarget = new SoftwareRenderer( 188 colorFormat, surface, displayWidth, displayHeight, 189 decodedWidth, decodedHeight); 190 } 191} 192 193AwesomePlayer::AwesomePlayer() 194 : mQueueStarted(false), 195 mTimeSource(NULL), 196 mVideoRendererIsPreview(false), 197 mAudioPlayer(NULL), 198 mFlags(0), 199 mExtractorFlags(0), 200 mLastVideoBuffer(NULL), 201 mVideoBuffer(NULL), 202 mSuspensionState(NULL) { 203 CHECK_EQ(mClient.connect(), OK); 204 205 DataSource::RegisterDefaultSniffers(); 206 207 mVideoEvent = new AwesomeEvent(this, &AwesomePlayer::onVideoEvent); 208 mVideoEventPending = false; 209 mStreamDoneEvent = new AwesomeEvent(this, &AwesomePlayer::onStreamDone); 210 mStreamDoneEventPending = false; 211 mBufferingEvent = new AwesomeEvent(this, &AwesomePlayer::onBufferingUpdate); 212 mBufferingEventPending = false; 213 214 mCheckAudioStatusEvent = new AwesomeEvent( 215 this, &AwesomePlayer::onCheckAudioStatus); 216 217 mAudioStatusEventPending = false; 218 219 reset(); 220} 221 222AwesomePlayer::~AwesomePlayer() { 223 if (mQueueStarted) { 224 mQueue.stop(); 225 } 226 227 reset(); 228 229 mClient.disconnect(); 230} 231 232void AwesomePlayer::cancelPlayerEvents(bool keepBufferingGoing) { 233 mQueue.cancelEvent(mVideoEvent->eventID()); 234 mVideoEventPending = false; 235 mQueue.cancelEvent(mStreamDoneEvent->eventID()); 236 mStreamDoneEventPending = false; 237 mQueue.cancelEvent(mCheckAudioStatusEvent->eventID()); 238 mAudioStatusEventPending = false; 239 240 if (!keepBufferingGoing) { 241 mQueue.cancelEvent(mBufferingEvent->eventID()); 242 mBufferingEventPending = false; 243 } 244} 245 246void AwesomePlayer::setListener(const wp<MediaPlayerBase> &listener) { 247 Mutex::Autolock autoLock(mLock); 248 mListener = listener; 249} 250 251status_t AwesomePlayer::setDataSource( 252 const char *uri, const KeyedVector<String8, String8> *headers) { 253 Mutex::Autolock autoLock(mLock); 254 return setDataSource_l(uri, headers); 255} 256 257status_t AwesomePlayer::setDataSource_l( 258 const char *uri, const KeyedVector<String8, String8> *headers) { 259 reset_l(); 260 261 mUri = uri; 262 263 if (headers) { 264 mUriHeaders = *headers; 265 } 266 267 // The actual work will be done during preparation in the call to 268 // ::finishSetDataSource_l to avoid blocking the calling thread in 269 // setDataSource for any significant time. 270 271 return OK; 272} 273 274status_t AwesomePlayer::setDataSource( 275 int fd, int64_t offset, int64_t length) { 276#if 0 277 // return setDataSource("httplive://ipad.akamai.com/Video_Content/usat/tt_062209_iphone/440k/prog_index.m3u8"); 278 // return setDataSource("httplive://qthttp.apple.com.edgesuite.net/1009qpeijrfn/sl.m3u8"); 279 return setDataSource("httplive://qthttp.apple.com.edgesuite.net/1009qpeijrfn/0440.m3u8"); 280 // return setDataSource("httplive://qthttp.apple.com.edgesuite.net/1009qpeijrfn/0640.m3u8"); 281 // return setDataSource("httplive://qthttp.apple.com.edgesuite.net/1009qpeijrfn/1240_vod.m3u8"); 282 // return setDataSource("httplive://iphoned5.akamai.com.edgesuite.net/mhbarron/nasatv/nasatv_96.m3u8"); 283 // return setDataSource("httplive://iphoned5.akamai.com.edgesuite.net/mhbarron/nasatv/nasatv_1500.m3u8"); 284 // return setDataSource("httplive://iphone.video.hsn.com/iPhone_high.m3u8"); 285 // return setDataSource("httplive://iphoned5.akamai.com.edgesuite.net/mhbarron/iphonewebcast/webcast090209_all/webcast090209_all.m3u8"); 286 // return setDataSource("httplive://qthttp.akamai.com.edgesuite.net/iphone_demo/Video_Content/usat/tt_062209_iphone/hi/prog_index.m3u8"); 287 // return setDataSource("httplive://qthttp.akamai.com.edgesuite.net/iphone_demo/Video_Content/usat/tt_googmaps/hi/prog_index.m3u8"); 288 // return setDataSource("httplive://qthttp.akamai.com.edgesuite.net/iphone_demo/Video_Content/mtv/ni_spo_25a_rt74137_clip_syn/hi/prog_index.m3u8"); 289#endif 290 291 Mutex::Autolock autoLock(mLock); 292 293 reset_l(); 294 295 sp<DataSource> dataSource = new FileSource(fd, offset, length); 296 297 status_t err = dataSource->initCheck(); 298 299 if (err != OK) { 300 return err; 301 } 302 303 mFileSource = dataSource; 304 305 return setDataSource_l(dataSource); 306} 307 308status_t AwesomePlayer::setDataSource_l( 309 const sp<DataSource> &dataSource) { 310 sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource); 311 312 if (extractor == NULL) { 313 return UNKNOWN_ERROR; 314 } 315 316 return setDataSource_l(extractor); 317} 318 319status_t AwesomePlayer::setDataSource_l(const sp<MediaExtractor> &extractor) { 320 bool haveAudio = false; 321 bool haveVideo = false; 322 for (size_t i = 0; i < extractor->countTracks(); ++i) { 323 sp<MetaData> meta = extractor->getTrackMetaData(i); 324 325 const char *mime; 326 CHECK(meta->findCString(kKeyMIMEType, &mime)); 327 328 if (!haveVideo && !strncasecmp(mime, "video/", 6)) { 329 setVideoSource(extractor->getTrack(i)); 330 haveVideo = true; 331 } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) { 332 setAudioSource(extractor->getTrack(i)); 333 haveAudio = true; 334 335 sp<MetaData> fileMeta = extractor->getMetaData(); 336 int32_t loop; 337 if (fileMeta != NULL 338 && fileMeta->findInt32(kKeyAutoLoop, &loop) && loop != 0) { 339 mFlags |= AUTO_LOOPING; 340 } 341 } 342 343 if (haveAudio && haveVideo) { 344 break; 345 } 346 } 347 348 if (!haveAudio && !haveVideo) { 349 return UNKNOWN_ERROR; 350 } 351 352 mExtractorFlags = extractor->flags(); 353 354 return OK; 355} 356 357void AwesomePlayer::reset() { 358 Mutex::Autolock autoLock(mLock); 359 reset_l(); 360} 361 362void AwesomePlayer::reset_l() { 363 if (mFlags & PREPARING) { 364 mFlags |= PREPARE_CANCELLED; 365 if (mConnectingDataSource != NULL) { 366 LOGI("interrupting the connection process"); 367 mConnectingDataSource->disconnect(); 368 } 369 } 370 371 while (mFlags & PREPARING) { 372 mPreparedCondition.wait(mLock); 373 } 374 375 cancelPlayerEvents(); 376 377 mCachedSource.clear(); 378 mAudioTrack.clear(); 379 mVideoTrack.clear(); 380 381 // Shutdown audio first, so that the respone to the reset request 382 // appears to happen instantaneously as far as the user is concerned 383 // If we did this later, audio would continue playing while we 384 // shutdown the video-related resources and the player appear to 385 // not be as responsive to a reset request. 386 if (mAudioPlayer == NULL && mAudioSource != NULL) { 387 // If we had an audio player, it would have effectively 388 // taken possession of the audio source and stopped it when 389 // _it_ is stopped. Otherwise this is still our responsibility. 390 mAudioSource->stop(); 391 } 392 mAudioSource.clear(); 393 394 mTimeSource = NULL; 395 396 delete mAudioPlayer; 397 mAudioPlayer = NULL; 398 399 mVideoRenderer.clear(); 400 401 if (mLastVideoBuffer) { 402 mLastVideoBuffer->release(); 403 mLastVideoBuffer = NULL; 404 } 405 406 if (mVideoBuffer) { 407 mVideoBuffer->release(); 408 mVideoBuffer = NULL; 409 } 410 411 if (mRTSPController != NULL) { 412 mRTSPController->disconnect(); 413 mRTSPController.clear(); 414 } 415 416 mRTPPusher.clear(); 417 mRTCPPusher.clear(); 418 mRTPSession.clear(); 419 420 if (mVideoSource != NULL) { 421 mVideoSource->stop(); 422 423 // The following hack is necessary to ensure that the OMX 424 // component is completely released by the time we may try 425 // to instantiate it again. 426 wp<MediaSource> tmp = mVideoSource; 427 mVideoSource.clear(); 428 while (tmp.promote() != NULL) { 429 usleep(1000); 430 } 431 IPCThreadState::self()->flushCommands(); 432 } 433 434 mDurationUs = -1; 435 mFlags = 0; 436 mExtractorFlags = 0; 437 mVideoWidth = mVideoHeight = -1; 438 mTimeSourceDeltaUs = 0; 439 mVideoTimeUs = 0; 440 441 mSeeking = false; 442 mSeekNotificationSent = false; 443 mSeekTimeUs = 0; 444 445 mUri.setTo(""); 446 mUriHeaders.clear(); 447 448 mFileSource.clear(); 449 450 delete mSuspensionState; 451 mSuspensionState = NULL; 452} 453 454void AwesomePlayer::notifyListener_l(int msg, int ext1, int ext2) { 455 if (mListener != NULL) { 456 sp<MediaPlayerBase> listener = mListener.promote(); 457 458 if (listener != NULL) { 459 listener->sendEvent(msg, ext1, ext2); 460 } 461 } 462} 463 464// Returns true iff cached duration is available/applicable. 465bool AwesomePlayer::getCachedDuration_l(int64_t *durationUs, bool *eos) { 466 off_t totalSize; 467 468 if (mRTSPController != NULL) { 469 *durationUs = mRTSPController->getQueueDurationUs(eos); 470 return true; 471 } else if (mCachedSource != NULL && mDurationUs >= 0 472 && mCachedSource->getSize(&totalSize) == OK) { 473 int64_t bitrate = totalSize * 8000000ll / mDurationUs; // in bits/sec 474 475 size_t cachedDataRemaining = mCachedSource->approxDataRemaining(eos); 476 *durationUs = cachedDataRemaining * 8000000ll / bitrate; 477 return true; 478 } 479 480 return false; 481} 482 483void AwesomePlayer::onBufferingUpdate() { 484 Mutex::Autolock autoLock(mLock); 485 if (!mBufferingEventPending) { 486 return; 487 } 488 mBufferingEventPending = false; 489 490 if (mCachedSource != NULL) { 491 bool eos; 492 size_t cachedDataRemaining = mCachedSource->approxDataRemaining(&eos); 493 494 if (eos) { 495 notifyListener_l(MEDIA_BUFFERING_UPDATE, 100); 496 } else { 497 off_t size; 498 if (mDurationUs >= 0 && mCachedSource->getSize(&size) == OK) { 499 int64_t bitrate = size * 8000000ll / mDurationUs; // in bits/sec 500 501 size_t cachedSize = mCachedSource->cachedSize(); 502 int64_t cachedDurationUs = cachedSize * 8000000ll / bitrate; 503 504 int percentage = 100.0 * (double)cachedDurationUs / mDurationUs; 505 if (percentage > 100) { 506 percentage = 100; 507 } 508 509 notifyListener_l(MEDIA_BUFFERING_UPDATE, percentage); 510 } else { 511 // We don't know the bitrate of the stream, use absolute size 512 // limits to maintain the cache. 513 514 const size_t kLowWaterMarkBytes = 400000; 515 const size_t kHighWaterMarkBytes = 1000000; 516 517 if ((mFlags & PLAYING) && !eos 518 && (cachedDataRemaining < kLowWaterMarkBytes)) { 519 LOGI("cache is running low (< %d) , pausing.", 520 kLowWaterMarkBytes); 521 mFlags |= CACHE_UNDERRUN; 522 pause_l(); 523 notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START); 524 } else if (eos || cachedDataRemaining > kHighWaterMarkBytes) { 525 if (mFlags & CACHE_UNDERRUN) { 526 LOGI("cache has filled up (> %d), resuming.", 527 kHighWaterMarkBytes); 528 mFlags &= ~CACHE_UNDERRUN; 529 play_l(); 530 notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_END); 531 } else if (mFlags & PREPARING) { 532 LOGV("cache has filled up (> %d), prepare is done", 533 kHighWaterMarkBytes); 534 finishAsyncPrepare_l(); 535 } 536 } 537 } 538 } 539 } 540 541 int64_t cachedDurationUs; 542 bool eos; 543 if (getCachedDuration_l(&cachedDurationUs, &eos)) { 544 if ((mFlags & PLAYING) && !eos 545 && (cachedDurationUs < kLowWaterMarkUs)) { 546 LOGI("cache is running low (%.2f secs) , pausing.", 547 cachedDurationUs / 1E6); 548 mFlags |= CACHE_UNDERRUN; 549 pause_l(); 550 notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START); 551 } else if (eos || cachedDurationUs > kHighWaterMarkUs) { 552 if (mFlags & CACHE_UNDERRUN) { 553 LOGI("cache has filled up (%.2f secs), resuming.", 554 cachedDurationUs / 1E6); 555 mFlags &= ~CACHE_UNDERRUN; 556 play_l(); 557 notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_END); 558 } else if (mFlags & PREPARING) { 559 LOGV("cache has filled up (%.2f secs), prepare is done", 560 cachedDurationUs / 1E6); 561 finishAsyncPrepare_l(); 562 } 563 } 564 } 565 566 postBufferingEvent_l(); 567} 568 569void AwesomePlayer::onStreamDone() { 570 // Posted whenever any stream finishes playing. 571 572 Mutex::Autolock autoLock(mLock); 573 if (!mStreamDoneEventPending) { 574 return; 575 } 576 mStreamDoneEventPending = false; 577 578 if (mStreamDoneStatus != ERROR_END_OF_STREAM) { 579 LOGV("MEDIA_ERROR %d", mStreamDoneStatus); 580 581 notifyListener_l( 582 MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, mStreamDoneStatus); 583 584 pause_l(); 585 586 mFlags |= AT_EOS; 587 return; 588 } 589 590 const bool allDone = 591 (mVideoSource == NULL || (mFlags & VIDEO_AT_EOS)) 592 && (mAudioSource == NULL || (mFlags & AUDIO_AT_EOS)); 593 594 if (!allDone) { 595 return; 596 } 597 598 if (mFlags & (LOOPING | AUTO_LOOPING)) { 599 seekTo_l(0); 600 601 if (mVideoSource != NULL) { 602 postVideoEvent_l(); 603 } 604 } else { 605 LOGV("MEDIA_PLAYBACK_COMPLETE"); 606 notifyListener_l(MEDIA_PLAYBACK_COMPLETE); 607 608 pause_l(); 609 610 mFlags |= AT_EOS; 611 } 612} 613 614status_t AwesomePlayer::play() { 615 Mutex::Autolock autoLock(mLock); 616 617 mFlags &= ~CACHE_UNDERRUN; 618 619 return play_l(); 620} 621 622status_t AwesomePlayer::play_l() { 623 if (mFlags & PLAYING) { 624 return OK; 625 } 626 627 if (!(mFlags & PREPARED)) { 628 status_t err = prepare_l(); 629 630 if (err != OK) { 631 return err; 632 } 633 } 634 635 mFlags |= PLAYING; 636 mFlags |= FIRST_FRAME; 637 638 bool deferredAudioSeek = false; 639 640 if (mAudioSource != NULL) { 641 if (mAudioPlayer == NULL) { 642 if (mAudioSink != NULL) { 643 mAudioPlayer = new AudioPlayer(mAudioSink, this); 644 mAudioPlayer->setSource(mAudioSource); 645 646 // We've already started the MediaSource in order to enable 647 // the prefetcher to read its data. 648 status_t err = mAudioPlayer->start( 649 true /* sourceAlreadyStarted */); 650 651 if (err != OK) { 652 delete mAudioPlayer; 653 mAudioPlayer = NULL; 654 655 mFlags &= ~(PLAYING | FIRST_FRAME); 656 657 return err; 658 } 659 660 mTimeSource = mAudioPlayer; 661 662 deferredAudioSeek = true; 663 664 mWatchForAudioSeekComplete = false; 665 mWatchForAudioEOS = true; 666 } 667 } else { 668 mAudioPlayer->resume(); 669 } 670 } 671 672 if (mTimeSource == NULL && mAudioPlayer == NULL) { 673 mTimeSource = &mSystemTimeSource; 674 } 675 676 if (mVideoSource != NULL) { 677 // Kick off video playback 678 postVideoEvent_l(); 679 } 680 681 if (deferredAudioSeek) { 682 // If there was a seek request while we were paused 683 // and we're just starting up again, honor the request now. 684 seekAudioIfNecessary_l(); 685 } 686 687 if (mFlags & AT_EOS) { 688 // Legacy behaviour, if a stream finishes playing and then 689 // is started again, we play from the start... 690 seekTo_l(0); 691 } 692 693 return OK; 694} 695 696void AwesomePlayer::initRenderer_l() { 697 if (mISurface != NULL) { 698 sp<MetaData> meta = mVideoSource->getFormat(); 699 700 int32_t format; 701 const char *component; 702 int32_t decodedWidth, decodedHeight; 703 CHECK(meta->findInt32(kKeyColorFormat, &format)); 704 CHECK(meta->findCString(kKeyDecoderComponent, &component)); 705 CHECK(meta->findInt32(kKeyWidth, &decodedWidth)); 706 CHECK(meta->findInt32(kKeyHeight, &decodedHeight)); 707 708 mVideoRenderer.clear(); 709 710 // Must ensure that mVideoRenderer's destructor is actually executed 711 // before creating a new one. 712 IPCThreadState::self()->flushCommands(); 713 714 if (!strncmp("OMX.", component, 4)) { 715 // Our OMX codecs allocate buffers on the media_server side 716 // therefore they require a remote IOMXRenderer that knows how 717 // to display them. 718 mVideoRenderer = new AwesomeRemoteRenderer( 719 mClient.interface()->createRenderer( 720 mISurface, component, 721 (OMX_COLOR_FORMATTYPE)format, 722 decodedWidth, decodedHeight, 723 mVideoWidth, mVideoHeight)); 724 } else { 725 // Other decoders are instantiated locally and as a consequence 726 // allocate their buffers in local address space. 727 mVideoRenderer = new AwesomeLocalRenderer( 728 false, // previewOnly 729 component, 730 (OMX_COLOR_FORMATTYPE)format, 731 mISurface, 732 mVideoWidth, mVideoHeight, 733 decodedWidth, decodedHeight); 734 } 735 } 736} 737 738status_t AwesomePlayer::pause() { 739 Mutex::Autolock autoLock(mLock); 740 741 mFlags &= ~CACHE_UNDERRUN; 742 743 return pause_l(); 744} 745 746status_t AwesomePlayer::pause_l() { 747 if (!(mFlags & PLAYING)) { 748 return OK; 749 } 750 751 cancelPlayerEvents(true /* keepBufferingGoing */); 752 753 if (mAudioPlayer != NULL) { 754 mAudioPlayer->pause(); 755 } 756 757 mFlags &= ~PLAYING; 758 759 return OK; 760} 761 762bool AwesomePlayer::isPlaying() const { 763 return (mFlags & PLAYING) || (mFlags & CACHE_UNDERRUN); 764} 765 766void AwesomePlayer::setISurface(const sp<ISurface> &isurface) { 767 Mutex::Autolock autoLock(mLock); 768 769 mISurface = isurface; 770} 771 772void AwesomePlayer::setAudioSink( 773 const sp<MediaPlayerBase::AudioSink> &audioSink) { 774 Mutex::Autolock autoLock(mLock); 775 776 mAudioSink = audioSink; 777} 778 779status_t AwesomePlayer::setLooping(bool shouldLoop) { 780 Mutex::Autolock autoLock(mLock); 781 782 mFlags = mFlags & ~LOOPING; 783 784 if (shouldLoop) { 785 mFlags |= LOOPING; 786 } 787 788 return OK; 789} 790 791status_t AwesomePlayer::getDuration(int64_t *durationUs) { 792 Mutex::Autolock autoLock(mMiscStateLock); 793 794 if (mDurationUs < 0) { 795 return UNKNOWN_ERROR; 796 } 797 798 *durationUs = mDurationUs; 799 800 return OK; 801} 802 803status_t AwesomePlayer::getPosition(int64_t *positionUs) { 804 if (mRTSPController != NULL) { 805 *positionUs = mRTSPController->getNormalPlayTimeUs(); 806 } 807 else if (mSeeking) { 808 *positionUs = mSeekTimeUs; 809 } else if (mVideoSource != NULL) { 810 Mutex::Autolock autoLock(mMiscStateLock); 811 *positionUs = mVideoTimeUs; 812 } else if (mAudioPlayer != NULL) { 813 *positionUs = mAudioPlayer->getMediaTimeUs(); 814 } else { 815 *positionUs = 0; 816 } 817 818 return OK; 819} 820 821status_t AwesomePlayer::seekTo(int64_t timeUs) { 822 if (mExtractorFlags 823 & (MediaExtractor::CAN_SEEK_FORWARD 824 | MediaExtractor::CAN_SEEK_BACKWARD)) { 825 Mutex::Autolock autoLock(mLock); 826 return seekTo_l(timeUs); 827 } 828 829 return OK; 830} 831 832status_t AwesomePlayer::seekTo_l(int64_t timeUs) { 833 if (mRTSPController != NULL) { 834 mRTSPController->seek(timeUs); 835 836 notifyListener_l(MEDIA_SEEK_COMPLETE); 837 mSeekNotificationSent = true; 838 return OK; 839 } 840 841 if (mFlags & CACHE_UNDERRUN) { 842 mFlags &= ~CACHE_UNDERRUN; 843 play_l(); 844 } 845 846 mSeeking = true; 847 mSeekNotificationSent = false; 848 mSeekTimeUs = timeUs; 849 mFlags &= ~(AT_EOS | AUDIO_AT_EOS | VIDEO_AT_EOS); 850 851 seekAudioIfNecessary_l(); 852 853 if (!(mFlags & PLAYING)) { 854 LOGV("seeking while paused, sending SEEK_COMPLETE notification" 855 " immediately."); 856 857 notifyListener_l(MEDIA_SEEK_COMPLETE); 858 mSeekNotificationSent = true; 859 } 860 861 return OK; 862} 863 864void AwesomePlayer::seekAudioIfNecessary_l() { 865 if (mSeeking && mVideoSource == NULL && mAudioPlayer != NULL) { 866 mAudioPlayer->seekTo(mSeekTimeUs); 867 868 mWatchForAudioSeekComplete = true; 869 mWatchForAudioEOS = true; 870 mSeekNotificationSent = false; 871 } 872} 873 874status_t AwesomePlayer::getVideoDimensions( 875 int32_t *width, int32_t *height) const { 876 Mutex::Autolock autoLock(mLock); 877 878 if (mVideoWidth < 0 || mVideoHeight < 0) { 879 return UNKNOWN_ERROR; 880 } 881 882 *width = mVideoWidth; 883 *height = mVideoHeight; 884 885 return OK; 886} 887 888void AwesomePlayer::setAudioSource(sp<MediaSource> source) { 889 CHECK(source != NULL); 890 891 mAudioTrack = source; 892} 893 894status_t AwesomePlayer::initAudioDecoder() { 895 sp<MetaData> meta = mAudioTrack->getFormat(); 896 897 const char *mime; 898 CHECK(meta->findCString(kKeyMIMEType, &mime)); 899 900 if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 901 mAudioSource = mAudioTrack; 902 } else { 903 mAudioSource = OMXCodec::Create( 904 mClient.interface(), mAudioTrack->getFormat(), 905 false, // createEncoder 906 mAudioTrack); 907 } 908 909 if (mAudioSource != NULL) { 910 int64_t durationUs; 911 if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) { 912 Mutex::Autolock autoLock(mMiscStateLock); 913 if (mDurationUs < 0 || durationUs > mDurationUs) { 914 mDurationUs = durationUs; 915 } 916 } 917 918 status_t err = mAudioSource->start(); 919 920 if (err != OK) { 921 mAudioSource.clear(); 922 return err; 923 } 924 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) { 925 // For legacy reasons we're simply going to ignore the absence 926 // of an audio decoder for QCELP instead of aborting playback 927 // altogether. 928 return OK; 929 } 930 931 return mAudioSource != NULL ? OK : UNKNOWN_ERROR; 932} 933 934void AwesomePlayer::setVideoSource(sp<MediaSource> source) { 935 CHECK(source != NULL); 936 937 mVideoTrack = source; 938} 939 940status_t AwesomePlayer::initVideoDecoder() { 941 uint32_t flags = 0; 942 mVideoSource = OMXCodec::Create( 943 mClient.interface(), mVideoTrack->getFormat(), 944 false, // createEncoder 945 mVideoTrack, 946 NULL, flags); 947 948 if (mVideoSource != NULL) { 949 int64_t durationUs; 950 if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) { 951 Mutex::Autolock autoLock(mMiscStateLock); 952 if (mDurationUs < 0 || durationUs > mDurationUs) { 953 mDurationUs = durationUs; 954 } 955 } 956 957 CHECK(mVideoTrack->getFormat()->findInt32(kKeyWidth, &mVideoWidth)); 958 CHECK(mVideoTrack->getFormat()->findInt32(kKeyHeight, &mVideoHeight)); 959 960 status_t err = mVideoSource->start(); 961 962 if (err != OK) { 963 mVideoSource.clear(); 964 return err; 965 } 966 } 967 968 return mVideoSource != NULL ? OK : UNKNOWN_ERROR; 969} 970 971void AwesomePlayer::onVideoEvent() { 972 Mutex::Autolock autoLock(mLock); 973 if (!mVideoEventPending) { 974 // The event has been cancelled in reset_l() but had already 975 // been scheduled for execution at that time. 976 return; 977 } 978 mVideoEventPending = false; 979 980 if (mSeeking) { 981 if (mLastVideoBuffer) { 982 mLastVideoBuffer->release(); 983 mLastVideoBuffer = NULL; 984 } 985 986 if (mVideoBuffer) { 987 mVideoBuffer->release(); 988 mVideoBuffer = NULL; 989 } 990 991 if (mCachedSource != NULL && mAudioSource != NULL) { 992 // We're going to seek the video source first, followed by 993 // the audio source. 994 // In order to avoid jumps in the DataSource offset caused by 995 // the audio codec prefetching data from the old locations 996 // while the video codec is already reading data from the new 997 // locations, we'll "pause" the audio source, causing it to 998 // stop reading input data until a subsequent seek. 999 1000 if (mAudioPlayer != NULL) { 1001 mAudioPlayer->pause(); 1002 } 1003 mAudioSource->pause(); 1004 } 1005 } 1006 1007 if (!mVideoBuffer) { 1008 MediaSource::ReadOptions options; 1009 if (mSeeking) { 1010 LOGV("seeking to %lld us (%.2f secs)", mSeekTimeUs, mSeekTimeUs / 1E6); 1011 1012 options.setSeekTo( 1013 mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST_SYNC); 1014 } 1015 for (;;) { 1016 status_t err = mVideoSource->read(&mVideoBuffer, &options); 1017 options.clearSeekTo(); 1018 1019 if (err != OK) { 1020 CHECK_EQ(mVideoBuffer, NULL); 1021 1022 if (err == INFO_FORMAT_CHANGED) { 1023 LOGV("VideoSource signalled format change."); 1024 1025 if (mVideoRenderer != NULL) { 1026 mVideoRendererIsPreview = false; 1027 initRenderer_l(); 1028 } 1029 continue; 1030 } 1031 1032 mFlags |= VIDEO_AT_EOS; 1033 postStreamDoneEvent_l(err); 1034 return; 1035 } 1036 1037 if (mVideoBuffer->range_length() == 0) { 1038 // Some decoders, notably the PV AVC software decoder 1039 // return spurious empty buffers that we just want to ignore. 1040 1041 mVideoBuffer->release(); 1042 mVideoBuffer = NULL; 1043 continue; 1044 } 1045 1046 break; 1047 } 1048 } 1049 1050 int64_t timeUs; 1051 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs)); 1052 1053 { 1054 Mutex::Autolock autoLock(mMiscStateLock); 1055 mVideoTimeUs = timeUs; 1056 } 1057 1058 if (mSeeking) { 1059 if (mAudioPlayer != NULL) { 1060 LOGV("seeking audio to %lld us (%.2f secs).", timeUs, timeUs / 1E6); 1061 1062 mAudioPlayer->seekTo(timeUs); 1063 mAudioPlayer->resume(); 1064 mWatchForAudioSeekComplete = true; 1065 mWatchForAudioEOS = true; 1066 } else if (!mSeekNotificationSent) { 1067 // If we're playing video only, report seek complete now, 1068 // otherwise audio player will notify us later. 1069 notifyListener_l(MEDIA_SEEK_COMPLETE); 1070 } 1071 1072 mFlags |= FIRST_FRAME; 1073 mSeeking = false; 1074 mSeekNotificationSent = false; 1075 } 1076 1077 TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource; 1078 1079 if (mFlags & FIRST_FRAME) { 1080 mFlags &= ~FIRST_FRAME; 1081 1082 mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs; 1083 } 1084 1085 int64_t realTimeUs, mediaTimeUs; 1086 if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL 1087 && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) { 1088 mTimeSourceDeltaUs = realTimeUs - mediaTimeUs; 1089 } 1090 1091 int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs; 1092 1093 int64_t latenessUs = nowUs - timeUs; 1094 1095 if (mRTPSession != NULL) { 1096 // We'll completely ignore timestamps for gtalk videochat 1097 // and we'll play incoming video as fast as we get it. 1098 latenessUs = 0; 1099 } 1100 1101 if (latenessUs > 40000) { 1102 // We're more than 40ms late. 1103 LOGV("we're late by %lld us (%.2f secs)", latenessUs, latenessUs / 1E6); 1104 1105 mVideoBuffer->release(); 1106 mVideoBuffer = NULL; 1107 1108 postVideoEvent_l(); 1109 return; 1110 } 1111 1112 if (latenessUs < -10000) { 1113 // We're more than 10ms early. 1114 1115 postVideoEvent_l(10000); 1116 return; 1117 } 1118 1119 if (mVideoRendererIsPreview || mVideoRenderer == NULL) { 1120 mVideoRendererIsPreview = false; 1121 1122 initRenderer_l(); 1123 } 1124 1125 if (mVideoRenderer != NULL) { 1126 mVideoRenderer->render(mVideoBuffer); 1127 } 1128 1129 if (mLastVideoBuffer) { 1130 mLastVideoBuffer->release(); 1131 mLastVideoBuffer = NULL; 1132 } 1133 mLastVideoBuffer = mVideoBuffer; 1134 mVideoBuffer = NULL; 1135 1136 postVideoEvent_l(); 1137} 1138 1139void AwesomePlayer::postVideoEvent_l(int64_t delayUs) { 1140 if (mVideoEventPending) { 1141 return; 1142 } 1143 1144 mVideoEventPending = true; 1145 mQueue.postEventWithDelay(mVideoEvent, delayUs < 0 ? 10000 : delayUs); 1146} 1147 1148void AwesomePlayer::postStreamDoneEvent_l(status_t status) { 1149 if (mStreamDoneEventPending) { 1150 return; 1151 } 1152 mStreamDoneEventPending = true; 1153 1154 mStreamDoneStatus = status; 1155 mQueue.postEvent(mStreamDoneEvent); 1156} 1157 1158void AwesomePlayer::postBufferingEvent_l() { 1159 if (mBufferingEventPending) { 1160 return; 1161 } 1162 mBufferingEventPending = true; 1163 mQueue.postEventWithDelay(mBufferingEvent, 1000000ll); 1164} 1165 1166void AwesomePlayer::postCheckAudioStatusEvent_l() { 1167 if (mAudioStatusEventPending) { 1168 return; 1169 } 1170 mAudioStatusEventPending = true; 1171 mQueue.postEvent(mCheckAudioStatusEvent); 1172} 1173 1174void AwesomePlayer::onCheckAudioStatus() { 1175 Mutex::Autolock autoLock(mLock); 1176 if (!mAudioStatusEventPending) { 1177 // Event was dispatched and while we were blocking on the mutex, 1178 // has already been cancelled. 1179 return; 1180 } 1181 1182 mAudioStatusEventPending = false; 1183 1184 if (mWatchForAudioSeekComplete && !mAudioPlayer->isSeeking()) { 1185 mWatchForAudioSeekComplete = false; 1186 1187 if (!mSeekNotificationSent) { 1188 notifyListener_l(MEDIA_SEEK_COMPLETE); 1189 mSeekNotificationSent = true; 1190 } 1191 1192 mSeeking = false; 1193 } 1194 1195 status_t finalStatus; 1196 if (mWatchForAudioEOS && mAudioPlayer->reachedEOS(&finalStatus)) { 1197 mWatchForAudioEOS = false; 1198 mFlags |= AUDIO_AT_EOS; 1199 mFlags |= FIRST_FRAME; 1200 postStreamDoneEvent_l(finalStatus); 1201 } 1202} 1203 1204status_t AwesomePlayer::prepare() { 1205 Mutex::Autolock autoLock(mLock); 1206 return prepare_l(); 1207} 1208 1209status_t AwesomePlayer::prepare_l() { 1210 if (mFlags & PREPARED) { 1211 return OK; 1212 } 1213 1214 if (mFlags & PREPARING) { 1215 return UNKNOWN_ERROR; 1216 } 1217 1218 mIsAsyncPrepare = false; 1219 status_t err = prepareAsync_l(); 1220 1221 if (err != OK) { 1222 return err; 1223 } 1224 1225 while (mFlags & PREPARING) { 1226 mPreparedCondition.wait(mLock); 1227 } 1228 1229 return mPrepareResult; 1230} 1231 1232status_t AwesomePlayer::prepareAsync() { 1233 Mutex::Autolock autoLock(mLock); 1234 1235 if (mFlags & PREPARING) { 1236 return UNKNOWN_ERROR; // async prepare already pending 1237 } 1238 1239 mIsAsyncPrepare = true; 1240 return prepareAsync_l(); 1241} 1242 1243status_t AwesomePlayer::prepareAsync_l() { 1244 if (mFlags & PREPARING) { 1245 return UNKNOWN_ERROR; // async prepare already pending 1246 } 1247 1248 if (!mQueueStarted) { 1249 mQueue.start(); 1250 mQueueStarted = true; 1251 } 1252 1253 mFlags |= PREPARING; 1254 mAsyncPrepareEvent = new AwesomeEvent( 1255 this, &AwesomePlayer::onPrepareAsyncEvent); 1256 1257 mQueue.postEvent(mAsyncPrepareEvent); 1258 1259 return OK; 1260} 1261 1262status_t AwesomePlayer::finishSetDataSource_l() { 1263 sp<DataSource> dataSource; 1264 1265 if (!strncasecmp("http://", mUri.string(), 7)) { 1266 mConnectingDataSource = new NuHTTPDataSource; 1267 1268 mLock.unlock(); 1269 status_t err = mConnectingDataSource->connect(mUri, &mUriHeaders); 1270 mLock.lock(); 1271 1272 if (err != OK) { 1273 mConnectingDataSource.clear(); 1274 1275 LOGI("mConnectingDataSource->connect() returned %d", err); 1276 return err; 1277 } 1278 1279#if 0 1280 mCachedSource = new NuCachedSource2( 1281 new ThrottledSource( 1282 mConnectingDataSource, 50 * 1024 /* bytes/sec */)); 1283#else 1284 mCachedSource = new NuCachedSource2(mConnectingDataSource); 1285#endif 1286 mConnectingDataSource.clear(); 1287 1288 dataSource = mCachedSource; 1289 } else if (!strncasecmp(mUri.string(), "httplive://", 11)) { 1290 String8 uri("http://"); 1291 uri.append(mUri.string() + 11); 1292 1293 dataSource = new LiveSource(uri.string()); 1294 1295 mCachedSource = new NuCachedSource2(dataSource); 1296 dataSource = mCachedSource; 1297 1298 sp<MediaExtractor> extractor = 1299 MediaExtractor::Create(dataSource, MEDIA_MIMETYPE_CONTAINER_MPEG2TS); 1300 1301 return setDataSource_l(extractor); 1302 } else if (!strncmp("rtsp://gtalk/", mUri.string(), 13)) { 1303 if (mLooper == NULL) { 1304 mLooper = new ALooper; 1305 mLooper->setName("gtalk rtp"); 1306 mLooper->start( 1307 false /* runOnCallingThread */, 1308 false /* canCallJava */, 1309 PRIORITY_HIGHEST); 1310 } 1311 1312 const char *startOfCodecString = &mUri.string()[13]; 1313 const char *startOfSlash1 = strchr(startOfCodecString, '/'); 1314 if (startOfSlash1 == NULL) { 1315 return BAD_VALUE; 1316 } 1317 const char *startOfWidthString = &startOfSlash1[1]; 1318 const char *startOfSlash2 = strchr(startOfWidthString, '/'); 1319 if (startOfSlash2 == NULL) { 1320 return BAD_VALUE; 1321 } 1322 const char *startOfHeightString = &startOfSlash2[1]; 1323 1324 String8 codecString(startOfCodecString, startOfSlash1 - startOfCodecString); 1325 String8 widthString(startOfWidthString, startOfSlash2 - startOfWidthString); 1326 String8 heightString(startOfHeightString); 1327 1328#if 0 1329 mRTPPusher = new UDPPusher("/data/misc/rtpout.bin", 5434); 1330 mLooper->registerHandler(mRTPPusher); 1331 1332 mRTCPPusher = new UDPPusher("/data/misc/rtcpout.bin", 5435); 1333 mLooper->registerHandler(mRTCPPusher); 1334#endif 1335 1336 mRTPSession = new ARTPSession; 1337 mLooper->registerHandler(mRTPSession); 1338 1339#if 0 1340 // My AMR SDP 1341 static const char *raw = 1342 "v=0\r\n" 1343 "o=- 64 233572944 IN IP4 127.0.0.0\r\n" 1344 "s=QuickTime\r\n" 1345 "t=0 0\r\n" 1346 "a=range:npt=0-315\r\n" 1347 "a=isma-compliance:2,2.0,2\r\n" 1348 "m=audio 5434 RTP/AVP 97\r\n" 1349 "c=IN IP4 127.0.0.1\r\n" 1350 "b=AS:30\r\n" 1351 "a=rtpmap:97 AMR/8000/1\r\n" 1352 "a=fmtp:97 octet-align\r\n"; 1353#elif 1 1354 String8 sdp; 1355 sdp.appendFormat( 1356 "v=0\r\n" 1357 "o=- 64 233572944 IN IP4 127.0.0.0\r\n" 1358 "s=QuickTime\r\n" 1359 "t=0 0\r\n" 1360 "a=range:npt=0-315\r\n" 1361 "a=isma-compliance:2,2.0,2\r\n" 1362 "m=video 5434 RTP/AVP 97\r\n" 1363 "c=IN IP4 127.0.0.1\r\n" 1364 "b=AS:30\r\n" 1365 "a=rtpmap:97 %s/90000\r\n" 1366 "a=cliprect:0,0,%s,%s\r\n" 1367 "a=framesize:97 %s-%s\r\n", 1368 1369 codecString.string(), 1370 heightString.string(), widthString.string(), 1371 widthString.string(), heightString.string() 1372 ); 1373 const char *raw = sdp.string(); 1374 1375#endif 1376 1377 sp<ASessionDescription> desc = new ASessionDescription; 1378 CHECK(desc->setTo(raw, strlen(raw))); 1379 1380 CHECK_EQ(mRTPSession->setup(desc), (status_t)OK); 1381 1382 if (mRTPPusher != NULL) { 1383 mRTPPusher->start(); 1384 } 1385 1386 if (mRTCPPusher != NULL) { 1387 mRTCPPusher->start(); 1388 } 1389 1390 CHECK_EQ(mRTPSession->countTracks(), 1u); 1391 sp<MediaSource> source = mRTPSession->trackAt(0); 1392 1393#if 0 1394 bool eos; 1395 while (((APacketSource *)source.get()) 1396 ->getQueuedDuration(&eos) < 5000000ll && !eos) { 1397 usleep(100000ll); 1398 } 1399#endif 1400 1401 const char *mime; 1402 CHECK(source->getFormat()->findCString(kKeyMIMEType, &mime)); 1403 1404 if (!strncasecmp("video/", mime, 6)) { 1405 setVideoSource(source); 1406 } else { 1407 CHECK(!strncasecmp("audio/", mime, 6)); 1408 setAudioSource(source); 1409 } 1410 1411 mExtractorFlags = MediaExtractor::CAN_PAUSE; 1412 1413 return OK; 1414 } else if (!strncasecmp("rtsp://", mUri.string(), 7)) { 1415 if (mLooper == NULL) { 1416 mLooper = new ALooper; 1417 mLooper->setName("rtsp"); 1418 mLooper->start(); 1419 } 1420 mRTSPController = new ARTSPController(mLooper); 1421 status_t err = mRTSPController->connect(mUri.string()); 1422 1423 LOGI("ARTSPController::connect returned %d", err); 1424 1425 if (err != OK) { 1426 mRTSPController.clear(); 1427 return err; 1428 } 1429 1430 sp<MediaExtractor> extractor = mRTSPController.get(); 1431 return setDataSource_l(extractor); 1432 } else { 1433 dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders); 1434 } 1435 1436 if (dataSource == NULL) { 1437 return UNKNOWN_ERROR; 1438 } 1439 1440 sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource); 1441 1442 if (extractor == NULL) { 1443 return UNKNOWN_ERROR; 1444 } 1445 1446 return setDataSource_l(extractor); 1447} 1448 1449void AwesomePlayer::abortPrepare(status_t err) { 1450 CHECK(err != OK); 1451 1452 if (mIsAsyncPrepare) { 1453 notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err); 1454 } 1455 1456 mPrepareResult = err; 1457 mFlags &= ~(PREPARING|PREPARE_CANCELLED); 1458 mAsyncPrepareEvent = NULL; 1459 mPreparedCondition.broadcast(); 1460} 1461 1462// static 1463bool AwesomePlayer::ContinuePreparation(void *cookie) { 1464 AwesomePlayer *me = static_cast<AwesomePlayer *>(cookie); 1465 1466 return (me->mFlags & PREPARE_CANCELLED) == 0; 1467} 1468 1469void AwesomePlayer::onPrepareAsyncEvent() { 1470 Mutex::Autolock autoLock(mLock); 1471 1472 if (mFlags & PREPARE_CANCELLED) { 1473 LOGI("prepare was cancelled before doing anything"); 1474 abortPrepare(UNKNOWN_ERROR); 1475 return; 1476 } 1477 1478 if (mUri.size() > 0) { 1479 status_t err = finishSetDataSource_l(); 1480 1481 if (err != OK) { 1482 abortPrepare(err); 1483 return; 1484 } 1485 } 1486 1487 if (mVideoTrack != NULL && mVideoSource == NULL) { 1488 status_t err = initVideoDecoder(); 1489 1490 if (err != OK) { 1491 abortPrepare(err); 1492 return; 1493 } 1494 } 1495 1496 if (mAudioTrack != NULL && mAudioSource == NULL) { 1497 status_t err = initAudioDecoder(); 1498 1499 if (err != OK) { 1500 abortPrepare(err); 1501 return; 1502 } 1503 } 1504 1505 if (mCachedSource != NULL || mRTSPController != NULL) { 1506 postBufferingEvent_l(); 1507 } else { 1508 finishAsyncPrepare_l(); 1509 } 1510} 1511 1512void AwesomePlayer::finishAsyncPrepare_l() { 1513 if (mIsAsyncPrepare) { 1514 if (mVideoWidth < 0 || mVideoHeight < 0) { 1515 notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0); 1516 } else { 1517 notifyListener_l(MEDIA_SET_VIDEO_SIZE, mVideoWidth, mVideoHeight); 1518 } 1519 1520 notifyListener_l(MEDIA_PREPARED); 1521 } 1522 1523 mPrepareResult = OK; 1524 mFlags &= ~(PREPARING|PREPARE_CANCELLED); 1525 mFlags |= PREPARED; 1526 mAsyncPrepareEvent = NULL; 1527 mPreparedCondition.broadcast(); 1528} 1529 1530status_t AwesomePlayer::suspend() { 1531 LOGV("suspend"); 1532 Mutex::Autolock autoLock(mLock); 1533 1534 if (mSuspensionState != NULL) { 1535 if (mLastVideoBuffer == NULL) { 1536 //go into here if video is suspended again 1537 //after resuming without being played between 1538 //them 1539 SuspensionState *state = mSuspensionState; 1540 mSuspensionState = NULL; 1541 reset_l(); 1542 mSuspensionState = state; 1543 return OK; 1544 } 1545 1546 delete mSuspensionState; 1547 mSuspensionState = NULL; 1548 } 1549 1550 if (mFlags & PREPARING) { 1551 mFlags |= PREPARE_CANCELLED; 1552 if (mConnectingDataSource != NULL) { 1553 LOGI("interrupting the connection process"); 1554 mConnectingDataSource->disconnect(); 1555 } 1556 } 1557 1558 while (mFlags & PREPARING) { 1559 mPreparedCondition.wait(mLock); 1560 } 1561 1562 SuspensionState *state = new SuspensionState; 1563 state->mUri = mUri; 1564 state->mUriHeaders = mUriHeaders; 1565 state->mFileSource = mFileSource; 1566 1567 state->mFlags = mFlags & (PLAYING | AUTO_LOOPING | LOOPING | AT_EOS); 1568 getPosition(&state->mPositionUs); 1569 1570 if (mLastVideoBuffer) { 1571 size_t size = mLastVideoBuffer->range_length(); 1572 if (size) { 1573 state->mLastVideoFrameSize = size; 1574 state->mLastVideoFrame = malloc(size); 1575 memcpy(state->mLastVideoFrame, 1576 (const uint8_t *)mLastVideoBuffer->data() 1577 + mLastVideoBuffer->range_offset(), 1578 size); 1579 1580 state->mVideoWidth = mVideoWidth; 1581 state->mVideoHeight = mVideoHeight; 1582 1583 sp<MetaData> meta = mVideoSource->getFormat(); 1584 CHECK(meta->findInt32(kKeyColorFormat, &state->mColorFormat)); 1585 CHECK(meta->findInt32(kKeyWidth, &state->mDecodedWidth)); 1586 CHECK(meta->findInt32(kKeyHeight, &state->mDecodedHeight)); 1587 } 1588 } 1589 1590 reset_l(); 1591 1592 mSuspensionState = state; 1593 1594 return OK; 1595} 1596 1597status_t AwesomePlayer::resume() { 1598 LOGV("resume"); 1599 Mutex::Autolock autoLock(mLock); 1600 1601 if (mSuspensionState == NULL) { 1602 return INVALID_OPERATION; 1603 } 1604 1605 SuspensionState *state = mSuspensionState; 1606 mSuspensionState = NULL; 1607 1608 status_t err; 1609 if (state->mFileSource != NULL) { 1610 err = setDataSource_l(state->mFileSource); 1611 1612 if (err == OK) { 1613 mFileSource = state->mFileSource; 1614 } 1615 } else { 1616 err = setDataSource_l(state->mUri, &state->mUriHeaders); 1617 } 1618 1619 if (err != OK) { 1620 delete state; 1621 state = NULL; 1622 1623 return err; 1624 } 1625 1626 seekTo_l(state->mPositionUs); 1627 1628 mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS); 1629 1630 if (state->mLastVideoFrame && mISurface != NULL) { 1631 mVideoRenderer = 1632 new AwesomeLocalRenderer( 1633 true, // previewOnly 1634 "", 1635 (OMX_COLOR_FORMATTYPE)state->mColorFormat, 1636 mISurface, 1637 state->mVideoWidth, 1638 state->mVideoHeight, 1639 state->mDecodedWidth, 1640 state->mDecodedHeight); 1641 1642 mVideoRendererIsPreview = true; 1643 1644 ((AwesomeLocalRenderer *)mVideoRenderer.get())->render( 1645 state->mLastVideoFrame, state->mLastVideoFrameSize); 1646 } 1647 1648 if (state->mFlags & PLAYING) { 1649 play_l(); 1650 } 1651 1652 mSuspensionState = state; 1653 state = NULL; 1654 1655 return OK; 1656} 1657 1658uint32_t AwesomePlayer::flags() const { 1659 return mExtractorFlags; 1660} 1661 1662void AwesomePlayer::postAudioEOS() { 1663 postCheckAudioStatusEvent_l(); 1664} 1665 1666void AwesomePlayer::postAudioSeekComplete() { 1667 postCheckAudioStatusEvent_l(); 1668} 1669 1670} // namespace android 1671 1672