PreviewPlayer.cpp revision a5872f770c50da1db555d53e062a9896ea4321d5
1/* 2 * Copyright (C) 2011 NXP Software 3 * Copyright (C) 2011 The Android Open Source Project 4 * 5 * Licensed under the Apache License, Version 2.0 (the "License"); 6 * you may not use this file except in compliance with the License. 7 * You may obtain a copy of the License at 8 * 9 * http://www.apache.org/licenses/LICENSE-2.0 10 * 11 * Unless required by applicable law or agreed to in writing, software 12 * distributed under the License is distributed on an "AS IS" BASIS, 13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 * See the License for the specific language governing permissions and 15 * limitations under the License. 16 */ 17 18 19#define LOG_NDEBUG 1 20#define LOG_TAG "PreviewPlayer" 21#include <utils/Log.h> 22 23#include <dlfcn.h> 24 25#include "include/ARTSPController.h" 26#include "PreviewPlayer.h" 27#include "DummyAudioSource.h" 28#include "DummyVideoSource.h" 29#include "VideoEditorSRC.h" 30#include "include/NuCachedSource2.h" 31#include "include/ThrottledSource.h" 32 33 34#include "PreviewRenderer.h" 35 36#include <binder/IPCThreadState.h> 37#include <media/stagefright/DataSource.h> 38#include <media/stagefright/FileSource.h> 39#include <media/stagefright/MediaBuffer.h> 40#include <media/stagefright/MediaDefs.h> 41#include <media/stagefright/MediaExtractor.h> 42#include <media/stagefright/MediaDebug.h> 43#include <media/stagefright/MediaSource.h> 44#include <media/stagefright/MetaData.h> 45#include <media/stagefright/OMXCodec.h> 46 47#include <surfaceflinger/Surface.h> 48#include <media/stagefright/foundation/ALooper.h> 49 50namespace android { 51 52 53struct PreviewPlayerEvent : public TimedEventQueue::Event { 54 PreviewPlayerEvent( 55 PreviewPlayer *player, 56 void (PreviewPlayer::*method)()) 57 : mPlayer(player), 58 mMethod(method) { 59 } 60 61protected: 62 virtual ~PreviewPlayerEvent() {} 63 64 virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) { 65 (mPlayer->*mMethod)(); 66 } 67 68private: 69 PreviewPlayer *mPlayer; 70 void (PreviewPlayer::*mMethod)(); 71 72 PreviewPlayerEvent(const PreviewPlayerEvent &); 73 PreviewPlayerEvent &operator=(const PreviewPlayerEvent &); 74}; 75 76 77struct PreviewLocalRenderer : public PreviewPlayerRenderer { 78 79 static PreviewLocalRenderer* initPreviewLocalRenderer ( 80 bool previewOnly, 81 OMX_COLOR_FORMATTYPE colorFormat, 82 const sp<Surface> &surface, 83 size_t displayWidth, size_t displayHeight, 84 size_t decodedWidth, size_t decodedHeight, 85 int32_t rotationDegrees = 0) 86 { 87 PreviewLocalRenderer* mLocalRenderer = new 88 PreviewLocalRenderer( 89 previewOnly, 90 colorFormat, 91 surface, 92 displayWidth, displayHeight, 93 decodedWidth, decodedHeight, 94 rotationDegrees); 95 96 if ( mLocalRenderer->init(previewOnly, 97 colorFormat, surface, 98 displayWidth, displayHeight, 99 decodedWidth, decodedHeight, 100 rotationDegrees) != OK ) 101 { 102 delete mLocalRenderer; 103 return NULL; 104 } 105 return mLocalRenderer; 106 } 107 108 virtual void render(MediaBuffer *buffer) { 109 render((const uint8_t *)buffer->data() + buffer->range_offset(), 110 buffer->range_length()); 111 } 112 113 void render(const void *data, size_t size) { 114 mTarget->render(data, size, NULL); 115 } 116 void render() { 117 mTarget->renderYV12(); 118 } 119 void getBuffer(uint8_t **data, size_t *stride) { 120 mTarget->getBufferYV12(data, stride); 121 } 122 123protected: 124 virtual ~PreviewLocalRenderer() { 125 delete mTarget; 126 mTarget = NULL; 127 } 128 129private: 130 PreviewRenderer *mTarget; 131 132 PreviewLocalRenderer( 133 bool previewOnly, 134 OMX_COLOR_FORMATTYPE colorFormat, 135 const sp<Surface> &surface, 136 size_t displayWidth, size_t displayHeight, 137 size_t decodedWidth, size_t decodedHeight, 138 int32_t rotationDegrees = 0) 139 : mTarget(NULL) { 140 } 141 142 143 int init( 144 bool previewOnly, 145 OMX_COLOR_FORMATTYPE colorFormat, 146 const sp<Surface> &surface, 147 size_t displayWidth, size_t displayHeight, 148 size_t decodedWidth, size_t decodedHeight, 149 int32_t rotationDegrees = 0); 150 151 PreviewLocalRenderer(const PreviewLocalRenderer &); 152 PreviewLocalRenderer &operator=(const PreviewLocalRenderer &);; 153}; 154 155int PreviewLocalRenderer::init( 156 bool previewOnly, 157 OMX_COLOR_FORMATTYPE colorFormat, 158 const sp<Surface> &surface, 159 size_t displayWidth, size_t displayHeight, 160 size_t decodedWidth, size_t decodedHeight, 161 int32_t rotationDegrees) { 162 163 mTarget = PreviewRenderer::CreatePreviewRenderer ( 164 colorFormat, surface, displayWidth, displayHeight, 165 decodedWidth, decodedHeight, rotationDegrees); 166 if (mTarget == M4OSA_NULL) { 167 return UNKNOWN_ERROR; 168 } 169 return OK; 170} 171 172PreviewPlayer::PreviewPlayer() 173 : AwesomePlayer(), 174 mFrameRGBBuffer(NULL), 175 mFrameYUVBuffer(NULL), 176 mReportedWidth(0), 177 mReportedHeight(0), 178 mCurrFramingEffectIndex(0) { 179 180 mVideoRenderer = NULL; 181 mLastVideoBuffer = NULL; 182 mSuspensionState = NULL; 183 mEffectsSettings = NULL; 184 mVeAudioPlayer = NULL; 185 mAudioMixStoryBoardTS = 0; 186 mCurrentMediaBeginCutTime = 0; 187 mCurrentMediaVolumeValue = 0; 188 mNumberEffects = 0; 189 mDecodedVideoTs = 0; 190 mDecVideoTsStoryBoard = 0; 191 mCurrentVideoEffect = VIDEO_EFFECT_NONE; 192 mProgressCbInterval = 0; 193 mNumberDecVideoFrames = 0; 194 mOverlayUpdateEventPosted = false; 195 mIsChangeSourceRequired = true; 196 197 mVideoEvent = new PreviewPlayerEvent(this, &PreviewPlayer::onVideoEvent); 198 mVideoEventPending = false; 199 mStreamDoneEvent = new PreviewPlayerEvent(this, 200 &PreviewPlayer::onStreamDone); 201 202 mStreamDoneEventPending = false; 203 204 mCheckAudioStatusEvent = new PreviewPlayerEvent( 205 this, &AwesomePlayer::onCheckAudioStatus); 206 207 mAudioStatusEventPending = false; 208 209 mProgressCbEvent = new PreviewPlayerEvent(this, 210 &PreviewPlayer::onProgressCbEvent); 211 212 mOverlayUpdateEvent = new PreviewPlayerEvent(this, 213 &PreviewPlayer::onUpdateOverlayEvent); 214 mProgressCbEventPending = false; 215 216 mOverlayUpdateEventPending = false; 217 mResizedVideoBuffer = NULL; 218 mVideoResizedOrCropped = false; 219 mRenderingMode = (M4xVSS_MediaRendering)MEDIA_RENDERING_INVALID; 220 mIsFiftiesEffectStarted = false; 221 reset(); 222} 223 224PreviewPlayer::~PreviewPlayer() { 225 226 if (mQueueStarted) { 227 mQueue.stop(); 228 } 229 230 reset(); 231 232 if(mResizedVideoBuffer != NULL) { 233 M4OSA_free((M4OSA_MemAddr32)(mResizedVideoBuffer->data())); 234 mResizedVideoBuffer = NULL; 235 } 236 237 mVideoRenderer.clear(); 238 mVideoRenderer = NULL; 239} 240 241void PreviewPlayer::cancelPlayerEvents(bool keepBufferingGoing) { 242 mQueue.cancelEvent(mVideoEvent->eventID()); 243 mVideoEventPending = false; 244 mQueue.cancelEvent(mStreamDoneEvent->eventID()); 245 mStreamDoneEventPending = false; 246 mQueue.cancelEvent(mCheckAudioStatusEvent->eventID()); 247 mAudioStatusEventPending = false; 248 249 mQueue.cancelEvent(mProgressCbEvent->eventID()); 250 mProgressCbEventPending = false; 251} 252 253status_t PreviewPlayer::setDataSource( 254 const char *uri, const KeyedVector<String8, String8> *headers) { 255 Mutex::Autolock autoLock(mLock); 256 return setDataSource_l(uri, headers); 257} 258 259status_t PreviewPlayer::setDataSource_l( 260 const char *uri, const KeyedVector<String8, String8> *headers) { 261 reset_l(); 262 263 mUri = uri; 264 265 if (headers) { 266 mUriHeaders = *headers; 267 } 268 269 // The actual work will be done during preparation in the call to 270 // ::finishSetDataSource_l to avoid blocking the calling thread in 271 // setDataSource for any significant time. 272 return OK; 273} 274 275status_t PreviewPlayer::setDataSource_l(const sp<MediaExtractor> &extractor) { 276 bool haveAudio = false; 277 bool haveVideo = false; 278 for (size_t i = 0; i < extractor->countTracks(); ++i) { 279 sp<MetaData> meta = extractor->getTrackMetaData(i); 280 281 const char *mime; 282 CHECK(meta->findCString(kKeyMIMEType, &mime)); 283 284 if (!haveVideo && !strncasecmp(mime, "video/", 6)) { 285 setVideoSource(extractor->getTrack(i)); 286 haveVideo = true; 287 } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) { 288 setAudioSource(extractor->getTrack(i)); 289 haveAudio = true; 290 291 if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) { 292 // Only do this for vorbis audio, none of the other audio 293 // formats even support this ringtone specific hack and 294 // retrieving the metadata on some extractors may turn out 295 // to be very expensive. 296 sp<MetaData> fileMeta = extractor->getMetaData(); 297 int32_t loop; 298 if (fileMeta != NULL 299 && fileMeta->findInt32(kKeyAutoLoop, &loop) 300 && loop != 0) { 301 mFlags |= AUTO_LOOPING; 302 } 303 } 304 } 305 306 if (haveAudio && haveVideo) { 307 break; 308 } 309 } 310 311 /* Add the support for Dummy audio*/ 312 if( !haveAudio ){ 313 LOGV("PreviewPlayer: setDataSource_l Dummyaudiocreation started"); 314 315 mAudioTrack = DummyAudioSource::Create(32000, 2, 20000, 316 ((mPlayEndTimeMsec)*1000)); 317 LOGV("PreviewPlayer: setDataSource_l Dummyauiosource created"); 318 if(mAudioTrack != NULL) { 319 haveAudio = true; 320 } 321 } 322 323 if (!haveAudio && !haveVideo) { 324 return UNKNOWN_ERROR; 325 } 326 327 mExtractorFlags = extractor->flags(); 328 return OK; 329} 330 331status_t PreviewPlayer::setDataSource_l_jpg() { 332 M4OSA_ERR err = M4NO_ERROR; 333 LOGV("PreviewPlayer: setDataSource_l_jpg started"); 334 335 mAudioSource = DummyAudioSource::Create(32000, 2, 20000, 336 ((mPlayEndTimeMsec)*1000)); 337 LOGV("PreviewPlayer: setDataSource_l_jpg Dummyaudiosource created"); 338 if(mAudioSource != NULL) { 339 setAudioSource(mAudioSource); 340 } 341 status_t error = mAudioSource->start(); 342 if (error != OK) { 343 LOGV("Error starting dummy audio source"); 344 mAudioSource.clear(); 345 return err; 346 } 347 348 mDurationUs = (mPlayEndTimeMsec - mPlayBeginTimeMsec)*1000; 349 350 mVideoSource = DummyVideoSource::Create(mVideoWidth, mVideoHeight, 351 mDurationUs, mUri); 352 mReportedWidth = mVideoWidth; 353 mReportedHeight = mVideoHeight; 354 355 setVideoSource(mVideoSource); 356 status_t err1 = mVideoSource->start(); 357 if (err1 != OK) { 358 mVideoSource.clear(); 359 return err; 360 } 361 362 mIsVideoSourceJpg = true; 363 return OK; 364} 365 366void PreviewPlayer::reset() { 367 Mutex::Autolock autoLock(mLock); 368 reset_l(); 369} 370 371void PreviewPlayer::reset_l() { 372 373 if (mFlags & PREPARING) { 374 mFlags |= PREPARE_CANCELLED; 375 } 376 377 while (mFlags & PREPARING) { 378 mPreparedCondition.wait(mLock); 379 } 380 381 cancelPlayerEvents(); 382 mAudioTrack.clear(); 383 mVideoTrack.clear(); 384 385 // Shutdown audio first, so that the respone to the reset request 386 // appears to happen instantaneously as far as the user is concerned 387 // If we did this later, audio would continue playing while we 388 // shutdown the video-related resources and the player appear to 389 // not be as responsive to a reset request. 390 if (mAudioPlayer == NULL && mAudioSource != NULL) { 391 // If we had an audio player, it would have effectively 392 // taken possession of the audio source and stopped it when 393 // _it_ is stopped. Otherwise this is still our responsibility. 394 mAudioSource->stop(); 395 } 396 mAudioSource.clear(); 397 398 mTimeSource = NULL; 399 400 //Single audio player instance used 401 //So donot delete it here 402 //It is deleted from PreviewController class 403 //delete mAudioPlayer; 404 mAudioPlayer = NULL; 405 406 if (mLastVideoBuffer) { 407 mLastVideoBuffer->release(); 408 mLastVideoBuffer = NULL; 409 } 410 411 if (mVideoBuffer) { 412 mVideoBuffer->release(); 413 mVideoBuffer = NULL; 414 } 415 416 if (mVideoSource != NULL) { 417 mVideoSource->stop(); 418 419 // The following hack is necessary to ensure that the OMX 420 // component is completely released by the time we may try 421 // to instantiate it again. 422 wp<MediaSource> tmp = mVideoSource; 423 mVideoSource.clear(); 424 while (tmp.promote() != NULL) { 425 usleep(1000); 426 } 427 IPCThreadState::self()->flushCommands(); 428 } 429 430 mDurationUs = -1; 431 mFlags = 0; 432 mExtractorFlags = 0; 433 mVideoWidth = mVideoHeight = -1; 434 mTimeSourceDeltaUs = 0; 435 mVideoTimeUs = 0; 436 437 mSeeking = NO_SEEK; 438 mSeekNotificationSent = false; 439 mSeekTimeUs = 0; 440 441 mUri.setTo(""); 442 mUriHeaders.clear(); 443 444 mFileSource.clear(); 445 446 delete mSuspensionState; 447 mSuspensionState = NULL; 448 449 mCurrentVideoEffect = VIDEO_EFFECT_NONE; 450 mIsVideoSourceJpg = false; 451 mFrameRGBBuffer = NULL; 452 if(mFrameYUVBuffer != NULL) { 453 M4OSA_free((M4OSA_MemAddr32)mFrameYUVBuffer); 454 mFrameYUVBuffer = NULL; 455 } 456} 457 458status_t PreviewPlayer::play() { 459 Mutex::Autolock autoLock(mLock); 460 461 mFlags &= ~CACHE_UNDERRUN; 462 463 return play_l(); 464} 465 466status_t PreviewPlayer::startAudioPlayer_l() { 467 CHECK(!(mFlags & AUDIO_RUNNING)); 468 469 if (mAudioSource == NULL || mAudioPlayer == NULL) { 470 return OK; 471 } 472 473 if (!(mFlags & AUDIOPLAYER_STARTED)) { 474 mFlags |= AUDIOPLAYER_STARTED; 475 476 // We've already started the MediaSource in order to enable 477 // the prefetcher to read its data. 478 status_t err = mVeAudioPlayer->start( 479 true /* sourceAlreadyStarted */); 480 481 if (err != OK) { 482 notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err); 483 return err; 484 } 485 } else { 486 mVeAudioPlayer->resume(); 487 } 488 489 mFlags |= AUDIO_RUNNING; 490 491 mWatchForAudioEOS = true; 492 493 return OK; 494} 495 496status_t PreviewPlayer::setAudioPlayer(AudioPlayer *audioPlayer) { 497 Mutex::Autolock autoLock(mLock); 498 CHECK(!(mFlags & PLAYING)); 499 mAudioPlayer = audioPlayer; 500 501 LOGV("SetAudioPlayer"); 502 mIsChangeSourceRequired = true; 503 mVeAudioPlayer = 504 (VideoEditorAudioPlayer*)mAudioPlayer; 505 506 // check if the new and old source are dummy 507 sp<MediaSource> anAudioSource = mVeAudioPlayer->getSource(); 508 if (anAudioSource == NULL) { 509 // Audio player does not have any source set. 510 LOGV("setAudioPlayer: Audio player does not have any source set"); 511 return OK; 512 } 513 514 const char *pSrcType1; 515 const char *pSrcType2; 516 sp<MetaData> meta = anAudioSource->getFormat(); 517 518 if (meta->findCString(kKeyDecoderComponent, &pSrcType1)) { 519 if (strcmp(pSrcType1, "DummyAudioSource") == 0) { 520 meta = mAudioSource->getFormat(); 521 if (meta->findCString(kKeyDecoderComponent, &pSrcType2)) { 522 if (strcmp(pSrcType2, "DummyAudioSource") == 0) { 523 mIsChangeSourceRequired = false; 524 // Just set the new play duration for the existing source 525 MediaSource *pMediaSrc = anAudioSource.get(); 526 DummyAudioSource *pDummyAudioSource = (DummyAudioSource*)pMediaSrc; 527 //Increment the duration of audio source 528 pDummyAudioSource->setDuration((int64_t)((mPlayEndTimeMsec)*1000)); 529 } 530 } 531 } 532 } 533 534 return OK; 535} 536 537void PreviewPlayer::onStreamDone() { 538 // Posted whenever any stream finishes playing. 539 540 Mutex::Autolock autoLock(mLock); 541 if (!mStreamDoneEventPending) { 542 return; 543 } 544 mStreamDoneEventPending = false; 545 546 if (mStreamDoneStatus != ERROR_END_OF_STREAM) { 547 LOGV("MEDIA_ERROR %d", mStreamDoneStatus); 548 549 notifyListener_l( 550 MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, mStreamDoneStatus); 551 552 pause_l(true /* at eos */); 553 554 mFlags |= AT_EOS; 555 return; 556 } 557 558 const bool allDone = 559 (mVideoSource == NULL || (mFlags & VIDEO_AT_EOS)) 560 && (mAudioSource == NULL || (mFlags & AUDIO_AT_EOS)); 561 562 if (!allDone) { 563 return; 564 } 565 566 if (mFlags & (LOOPING | AUTO_LOOPING)) { 567 seekTo_l(0); 568 569 if (mVideoSource != NULL) { 570 postVideoEvent_l(); 571 } 572 } else { 573 LOGV("MEDIA_PLAYBACK_COMPLETE"); 574 //pause before sending event 575 pause_l(true /* at eos */); 576 notifyListener_l(MEDIA_PLAYBACK_COMPLETE); 577 578 mFlags |= AT_EOS; 579 } 580} 581 582 583status_t PreviewPlayer::play_l() { 584 585 mFlags &= ~SEEK_PREVIEW; 586 587 if (mFlags & PLAYING) { 588 return OK; 589 } 590 mStartNextPlayer = false; 591 592 if (!(mFlags & PREPARED)) { 593 status_t err = prepare_l(); 594 595 if (err != OK) { 596 return err; 597 } 598 } 599 600 mFlags |= PLAYING; 601 mFlags |= FIRST_FRAME; 602 603 bool deferredAudioSeek = false; 604 605 if (mAudioSource != NULL) { 606 if (mAudioPlayer == NULL) { 607 if (mAudioSink != NULL) { 608 609 mAudioPlayer = new VideoEditorAudioPlayer(mAudioSink, this); 610 mVeAudioPlayer = 611 (VideoEditorAudioPlayer*)mAudioPlayer; 612 613 mAudioPlayer->setSource(mAudioSource); 614 615 mVeAudioPlayer->setAudioMixSettings( 616 mPreviewPlayerAudioMixSettings); 617 618 mVeAudioPlayer->setAudioMixPCMFileHandle( 619 mAudioMixPCMFileHandle); 620 621 mVeAudioPlayer->setAudioMixStoryBoardSkimTimeStamp( 622 mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime, 623 mCurrentMediaVolumeValue); 624 625 mFlags |= AUDIOPLAYER_STARTED; 626 // We've already started the MediaSource in order to enable 627 // the prefetcher to read its data. 628 status_t err = mVeAudioPlayer->start( 629 true /* sourceAlreadyStarted */); 630 631 if (err != OK) { 632 //delete mAudioPlayer; 633 mAudioPlayer = NULL; 634 635 mFlags &= ~(PLAYING | FIRST_FRAME); 636 return err; 637 } 638 639 mTimeSource = mVeAudioPlayer; 640 mFlags |= AUDIO_RUNNING; 641 deferredAudioSeek = true; 642 mWatchForAudioSeekComplete = false; 643 mWatchForAudioEOS = true; 644 } 645 } else { 646 mVeAudioPlayer = (VideoEditorAudioPlayer*)mAudioPlayer; 647 bool isAudioPlayerStarted = mVeAudioPlayer->isStarted(); 648 649 if (mIsChangeSourceRequired == true) { 650 LOGV("play_l: Change audio source required"); 651 652 if (isAudioPlayerStarted == true) { 653 mVeAudioPlayer->pause(); 654 } 655 656 mVeAudioPlayer->setSource(mAudioSource); 657 mVeAudioPlayer->setObserver(this); 658 659 mVeAudioPlayer->setAudioMixSettings( 660 mPreviewPlayerAudioMixSettings); 661 662 mVeAudioPlayer->setAudioMixStoryBoardSkimTimeStamp( 663 mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime, 664 mCurrentMediaVolumeValue); 665 666 if (isAudioPlayerStarted == true) { 667 mVeAudioPlayer->resume(); 668 } else { 669 status_t err = OK; 670 err = mVeAudioPlayer->start(true); 671 if (err != OK) { 672 mAudioPlayer = NULL; 673 mVeAudioPlayer = NULL; 674 675 mFlags &= ~(PLAYING | FIRST_FRAME); 676 return err; 677 } 678 } 679 } else { 680 LOGV("play_l: No Source change required"); 681 mVeAudioPlayer->setAudioMixStoryBoardSkimTimeStamp( 682 mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime, 683 mCurrentMediaVolumeValue); 684 685 mVeAudioPlayer->resume(); 686 } 687 688 mFlags |= AUDIOPLAYER_STARTED; 689 mFlags |= AUDIO_RUNNING; 690 mTimeSource = mVeAudioPlayer; 691 deferredAudioSeek = true; 692 mWatchForAudioSeekComplete = false; 693 mWatchForAudioEOS = true; 694 } 695 } 696 697 if (mTimeSource == NULL && mAudioPlayer == NULL) { 698 mTimeSource = &mSystemTimeSource; 699 } 700 701 // Set the seek option for Image source files and read. 702 // This resets the timestamping for image play 703 if (mIsVideoSourceJpg) { 704 MediaSource::ReadOptions options; 705 MediaBuffer *aLocalBuffer; 706 options.setSeekTo(mSeekTimeUs); 707 mVideoSource->read(&aLocalBuffer, &options); 708 aLocalBuffer->release(); 709 } 710 711 if (mVideoSource != NULL) { 712 // Kick off video playback 713 postVideoEvent_l(); 714 } 715 716 if (deferredAudioSeek) { 717 // If there was a seek request while we were paused 718 // and we're just starting up again, honor the request now. 719 seekAudioIfNecessary_l(); 720 } 721 722 if (mFlags & AT_EOS) { 723 // Legacy behaviour, if a stream finishes playing and then 724 // is started again, we play from the start... 725 seekTo_l(0); 726 } 727 728 return OK; 729} 730 731 732status_t PreviewPlayer::initRenderer_l() { 733 if (mSurface != NULL || mISurface != NULL) { 734 sp<MetaData> meta = mVideoSource->getFormat(); 735 736 int32_t format; 737 const char *component; 738 int32_t decodedWidth, decodedHeight; 739 CHECK(meta->findInt32(kKeyColorFormat, &format)); 740 CHECK(meta->findCString(kKeyDecoderComponent, &component)); 741 CHECK(meta->findInt32(kKeyWidth, &decodedWidth)); 742 CHECK(meta->findInt32(kKeyHeight, &decodedHeight)); 743 744 // Must ensure that mVideoRenderer's destructor is actually executed 745 // before creating a new one. 746 IPCThreadState::self()->flushCommands(); 747 748 // always use localrenderer since decoded buffers are modified 749 // by postprocessing module 750 // Other decoders are instantiated locally and as a consequence 751 // allocate their buffers in local address space. 752 if(mVideoRenderer == NULL) { 753 754 mVideoRenderer = PreviewLocalRenderer:: initPreviewLocalRenderer ( 755 false, // previewOnly 756 (OMX_COLOR_FORMATTYPE)format, 757 mSurface, 758 mOutputVideoWidth, mOutputVideoHeight, 759 mOutputVideoWidth, mOutputVideoHeight); 760 761 if ( mVideoRenderer == NULL ) 762 { 763 return UNKNOWN_ERROR; 764 } 765 return OK; 766 } 767 } 768 return OK; 769} 770 771 772void PreviewPlayer::setISurface(const sp<ISurface> &isurface) { 773 Mutex::Autolock autoLock(mLock); 774 mISurface = isurface; 775} 776 777 778status_t PreviewPlayer::seekTo(int64_t timeUs) { 779 780 if ((mExtractorFlags & MediaExtractor::CAN_SEEK) || (mIsVideoSourceJpg)) { 781 Mutex::Autolock autoLock(mLock); 782 return seekTo_l(timeUs); 783 } 784 785 return OK; 786} 787 788 789status_t PreviewPlayer::getVideoDimensions( 790 int32_t *width, int32_t *height) const { 791 Mutex::Autolock autoLock(mLock); 792 793 if (mVideoWidth < 0 || mVideoHeight < 0) { 794 return UNKNOWN_ERROR; 795 } 796 797 *width = mVideoWidth; 798 *height = mVideoHeight; 799 800 return OK; 801} 802 803 804status_t PreviewPlayer::initAudioDecoder() { 805 sp<MetaData> meta = mAudioTrack->getFormat(); 806 const char *mime; 807 CHECK(meta->findCString(kKeyMIMEType, &mime)); 808 809 if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 810 mAudioSource = mAudioTrack; 811 } else { 812 sp<MediaSource> aRawSource; 813 aRawSource = OMXCodec::Create( 814 mClient.interface(), mAudioTrack->getFormat(), 815 false, // createEncoder 816 mAudioTrack); 817 818 if(aRawSource != NULL) { 819 LOGV("initAudioDecoder: new VideoEditorSRC"); 820 mAudioSource = new VideoEditorSRC(aRawSource); 821 } 822 } 823 824 if (mAudioSource != NULL) { 825 int64_t durationUs; 826 if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) { 827 Mutex::Autolock autoLock(mMiscStateLock); 828 if (mDurationUs < 0 || durationUs > mDurationUs) { 829 mDurationUs = durationUs; 830 } 831 } 832 status_t err = mAudioSource->start(); 833 834 if (err != OK) { 835 mAudioSource.clear(); 836 return err; 837 } 838 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) { 839 // For legacy reasons we're simply going to ignore the absence 840 // of an audio decoder for QCELP instead of aborting playback 841 // altogether. 842 return OK; 843 } 844 845 return mAudioSource != NULL ? OK : UNKNOWN_ERROR; 846} 847 848 849status_t PreviewPlayer::initVideoDecoder(uint32_t flags) { 850 851 mVideoSource = OMXCodec::Create( 852 mClient.interface(), mVideoTrack->getFormat(), 853 false, 854 mVideoTrack, 855 NULL, flags); 856 857 if (mVideoSource != NULL) { 858 int64_t durationUs; 859 if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) { 860 Mutex::Autolock autoLock(mMiscStateLock); 861 if (mDurationUs < 0 || durationUs > mDurationUs) { 862 mDurationUs = durationUs; 863 } 864 } 865 866 CHECK(mVideoTrack->getFormat()->findInt32(kKeyWidth, &mVideoWidth)); 867 CHECK(mVideoTrack->getFormat()->findInt32(kKeyHeight, &mVideoHeight)); 868 869 mReportedWidth = mVideoWidth; 870 mReportedHeight = mVideoHeight; 871 872 status_t err = mVideoSource->start(); 873 874 if (err != OK) { 875 mVideoSource.clear(); 876 return err; 877 } 878 } 879 880 return mVideoSource != NULL ? OK : UNKNOWN_ERROR; 881} 882 883 884void PreviewPlayer::onVideoEvent() { 885 uint32_t i=0; 886 bool bAppliedVideoEffect = false; 887 M4OSA_ERR err1 = M4NO_ERROR; 888 int64_t imageFrameTimeUs = 0; 889 890 Mutex::Autolock autoLock(mLock); 891 if (!mVideoEventPending) { 892 // The event has been cancelled in reset_l() but had already 893 // been scheduled for execution at that time. 894 return; 895 } 896 mVideoEventPending = false; 897 898 if (mFlags & SEEK_PREVIEW) { 899 mFlags &= ~SEEK_PREVIEW; 900 return; 901 } 902 903 TimeSource *ts_st = &mSystemTimeSource; 904 int64_t timeStartUs = ts_st->getRealTimeUs(); 905 906 if (mSeeking != NO_SEEK) { 907 if (mLastVideoBuffer) { 908 mLastVideoBuffer->release(); 909 mLastVideoBuffer = NULL; 910 } 911 912 913 if(mAudioSource != NULL) { 914 915 // We're going to seek the video source first, followed by 916 // the audio source. 917 // In order to avoid jumps in the DataSource offset caused by 918 // the audio codec prefetching data from the old locations 919 // while the video codec is already reading data from the new 920 // locations, we'll "pause" the audio source, causing it to 921 // stop reading input data until a subsequent seek. 922 923 if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) { 924 mAudioPlayer->pause(); 925 mFlags &= ~AUDIO_RUNNING; 926 } 927 mAudioSource->pause(); 928 } 929 } 930 931 if (!mVideoBuffer) { 932 MediaSource::ReadOptions options; 933 if (mSeeking != NO_SEEK) { 934 LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs, 935 mSeekTimeUs / 1E6); 936 937 options.setSeekTo( 938 mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST); 939 } 940 for (;;) { 941 status_t err = mVideoSource->read(&mVideoBuffer, &options); 942 options.clearSeekTo(); 943 944 if (err != OK) { 945 CHECK_EQ(mVideoBuffer, NULL); 946 947 if (err == INFO_FORMAT_CHANGED) { 948 LOGV("LV PLAYER VideoSource signalled format change"); 949 notifyVideoSize_l(); 950 sp<MetaData> meta = mVideoSource->getFormat(); 951 952 CHECK(meta->findInt32(kKeyWidth, &mReportedWidth)); 953 CHECK(meta->findInt32(kKeyHeight, &mReportedHeight)); 954 if (mVideoRenderer != NULL) { 955 mVideoRendererIsPreview = false; 956 err = initRenderer_l(); 957 if (err != OK) { 958 postStreamDoneEvent_l(err); 959 } 960 961 } 962 continue; 963 } 964 // So video playback is complete, but we may still have 965 // a seek request pending that needs to be applied to the audio track 966 if (mSeeking != NO_SEEK) { 967 LOGV("video stream ended while seeking!"); 968 } 969 finishSeekIfNecessary(-1); 970 LOGV("PreviewPlayer: onVideoEvent EOS reached."); 971 mFlags |= VIDEO_AT_EOS; 972 mFlags |= AUDIO_AT_EOS; 973 mOverlayUpdateEventPosted = false; 974 postStreamDoneEvent_l(err); 975 // Set the last decoded timestamp to duration 976 mDecodedVideoTs = (mPlayEndTimeMsec*1000); 977 return; 978 } 979 980 if (mVideoBuffer->range_length() == 0) { 981 // Some decoders, notably the PV AVC software decoder 982 // return spurious empty buffers that we just want to ignore. 983 984 mVideoBuffer->release(); 985 mVideoBuffer = NULL; 986 continue; 987 } 988 989 int64_t videoTimeUs; 990 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs)); 991 992 if (mSeeking != NO_SEEK) { 993 if (videoTimeUs < mSeekTimeUs) { 994 // buffers are before seek time 995 // ignore them 996 mVideoBuffer->release(); 997 mVideoBuffer = NULL; 998 continue; 999 } 1000 } else { 1001 if((videoTimeUs/1000) < mPlayBeginTimeMsec) { 1002 // Frames are before begin cut time 1003 // Donot render 1004 mVideoBuffer->release(); 1005 mVideoBuffer = NULL; 1006 continue; 1007 } 1008 } 1009 break; 1010 } 1011 } 1012 1013 mNumberDecVideoFrames++; 1014 1015 int64_t timeUs; 1016 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs)); 1017 1018 { 1019 Mutex::Autolock autoLock(mMiscStateLock); 1020 mVideoTimeUs = timeUs; 1021 } 1022 1023 1024 if(!mStartNextPlayer) { 1025 int64_t playbackTimeRemaining = (mPlayEndTimeMsec*1000) - timeUs; 1026 if(playbackTimeRemaining <= 1500000) { 1027 //When less than 1.5 sec of playback left 1028 // send notification to start next player 1029 1030 mStartNextPlayer = true; 1031 notifyListener_l(0xAAAAAAAA); 1032 } 1033 } 1034 1035 SeekType wasSeeking = mSeeking; 1036 finishSeekIfNecessary(timeUs); 1037 if (mAudioPlayer != NULL && !(mFlags & (AUDIO_RUNNING))) { 1038 status_t err = startAudioPlayer_l(); 1039 if (err != OK) { 1040 LOGE("Starting the audio player failed w/ err %d", err); 1041 return; 1042 } 1043 } 1044 1045 TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource; 1046 1047 if(ts == NULL) { 1048 mVideoBuffer->release(); 1049 mVideoBuffer = NULL; 1050 return; 1051 } 1052 1053 if(!mIsVideoSourceJpg) { 1054 if (mFlags & FIRST_FRAME) { 1055 mFlags &= ~FIRST_FRAME; 1056 1057 mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs; 1058 } 1059 1060 int64_t realTimeUs, mediaTimeUs; 1061 if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL 1062 && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) { 1063 mTimeSourceDeltaUs = realTimeUs - mediaTimeUs; 1064 } 1065 1066 int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs; 1067 1068 int64_t latenessUs = nowUs - timeUs; 1069 1070 if (wasSeeking != NO_SEEK) { 1071 // Let's display the first frame after seeking right away. 1072 latenessUs = 0; 1073 } 1074 LOGV("Audio time stamp = %lld and video time stamp = %lld", 1075 ts->getRealTimeUs(),timeUs); 1076 if (latenessUs > 40000) { 1077 // We're more than 40ms late. 1078 1079 LOGV("LV PLAYER we're late by %lld us (%.2f secs)", 1080 latenessUs, latenessUs / 1E6); 1081 1082 mVideoBuffer->release(); 1083 mVideoBuffer = NULL; 1084 postVideoEvent_l(0); 1085 return; 1086 } 1087 1088 if (latenessUs < -25000) { 1089 // We're more than 25ms early. 1090 LOGV("We're more than 25ms early, lateness %lld", latenessUs); 1091 1092 postVideoEvent_l(25000); 1093 return; 1094 } 1095 } 1096 1097 if (mVideoRendererIsPreview || mVideoRenderer == NULL) { 1098 mVideoRendererIsPreview = false; 1099 1100 status_t err = initRenderer_l(); 1101 if (err != OK) { 1102 postStreamDoneEvent_l(err); 1103 } 1104 } 1105 1106 // If timestamp exceeds endCutTime of clip, donot render 1107 if((timeUs/1000) > mPlayEndTimeMsec) { 1108 if (mLastVideoBuffer) { 1109 mLastVideoBuffer->release(); 1110 mLastVideoBuffer = NULL; 1111 } 1112 mLastVideoBuffer = mVideoBuffer; 1113 mVideoBuffer = NULL; 1114 mFlags |= VIDEO_AT_EOS; 1115 mFlags |= AUDIO_AT_EOS; 1116 LOGV("PreviewPlayer: onVideoEvent timeUs > mPlayEndTime; send EOS.."); 1117 mOverlayUpdateEventPosted = false; 1118 postStreamDoneEvent_l(ERROR_END_OF_STREAM); 1119 return; 1120 } 1121 // Capture the frame timestamp to be rendered 1122 mDecodedVideoTs = timeUs; 1123 1124 // Post processing to apply video effects 1125 for(i=0;i<mNumberEffects;i++) { 1126 // First check if effect starttime matches the clip being previewed 1127 if((mEffectsSettings[i].uiStartTime < (mDecVideoTsStoryBoard/1000)) || 1128 (mEffectsSettings[i].uiStartTime >= 1129 ((mDecVideoTsStoryBoard/1000) + mPlayEndTimeMsec - mPlayBeginTimeMsec))) 1130 { 1131 // This effect doesn't belong to this clip, check next one 1132 continue; 1133 } 1134 // Check if effect applies to this particular frame timestamp 1135 if((mEffectsSettings[i].uiStartTime <= 1136 (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec)) && 1137 ((mEffectsSettings[i].uiStartTime+mEffectsSettings[i].uiDuration) >= 1138 (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec)) 1139 && (mEffectsSettings[i].uiDuration != 0)) { 1140 setVideoPostProcessingNode( 1141 mEffectsSettings[i].VideoEffectType, TRUE); 1142 } 1143 else { 1144 setVideoPostProcessingNode( 1145 mEffectsSettings[i].VideoEffectType, FALSE); 1146 } 1147 } 1148 1149 //Provide the overlay Update indication when there is an overlay effect 1150 if (mCurrentVideoEffect & VIDEO_EFFECT_FRAMING) { 1151 mCurrentVideoEffect &= ~VIDEO_EFFECT_FRAMING; //never apply framing here. 1152 if (!mOverlayUpdateEventPosted) { 1153 // Find the effect in effectSettings array 1154 int index; 1155 for (index = 0; index < mNumberEffects; index++) { 1156 M4OSA_UInt32 timeMs = mDecodedVideoTs/1000; 1157 M4OSA_UInt32 timeOffset = mDecVideoTsStoryBoard/1000; 1158 if(mEffectsSettings[index].VideoEffectType == 1159 M4xVSS_kVideoEffectType_Framing) { 1160 if (((mEffectsSettings[index].uiStartTime + 1) <= 1161 timeMs + timeOffset - mPlayBeginTimeMsec) && 1162 ((mEffectsSettings[index].uiStartTime - 1 + 1163 mEffectsSettings[index].uiDuration) >= 1164 timeMs + timeOffset - mPlayBeginTimeMsec)) 1165 { 1166 break; 1167 } 1168 } 1169 } 1170 if (index < mNumberEffects) { 1171 mCurrFramingEffectIndex = index; 1172 mOverlayUpdateEventPosted = true; 1173 postOverlayUpdateEvent_l(); 1174 LOGV("Framing index = %d", mCurrFramingEffectIndex); 1175 } else { 1176 LOGV("No framing effects found"); 1177 } 1178 } 1179 1180 } else if (mOverlayUpdateEventPosted) { 1181 //Post the event when the overlay is no more valid 1182 LOGV("Overlay is Done"); 1183 mOverlayUpdateEventPosted = false; 1184 postOverlayUpdateEvent_l(); 1185 } 1186 1187 1188 if (mCurrentVideoEffect != VIDEO_EFFECT_NONE) { 1189 err1 = doVideoPostProcessing(); 1190 if(err1 != M4NO_ERROR) { 1191 LOGE("doVideoPostProcessing returned err"); 1192 bAppliedVideoEffect = false; 1193 } 1194 else { 1195 bAppliedVideoEffect = true; 1196 } 1197 } 1198 else { 1199 bAppliedVideoEffect = false; 1200 if(mRenderingMode != MEDIA_RENDERING_INVALID) { 1201 // No effects to be applied, but media rendering to be done 1202 err1 = doMediaRendering(); 1203 if(err1 != M4NO_ERROR) { 1204 LOGE("doMediaRendering returned err"); 1205 //Use original mVideoBuffer for rendering 1206 mVideoResizedOrCropped = false; 1207 } 1208 } 1209 } 1210 1211 if (mVideoRenderer != NULL) { 1212 LOGV("mVideoRenderer CALL render()"); 1213 mVideoRenderer->render(); 1214 } 1215 1216 if (mLastVideoBuffer) { 1217 mLastVideoBuffer->release(); 1218 mLastVideoBuffer = NULL; 1219 } 1220 1221 mLastVideoBuffer = mVideoBuffer; 1222 mVideoBuffer = NULL; 1223 1224 // Post progress callback based on callback interval set 1225 if(mNumberDecVideoFrames >= mProgressCbInterval) { 1226 postProgressCallbackEvent_l(); 1227 mNumberDecVideoFrames = 0; // reset counter 1228 } 1229 1230 // if reached EndCutTime of clip, post EOS event 1231 if((timeUs/1000) >= mPlayEndTimeMsec) { 1232 LOGV("PreviewPlayer: onVideoEvent EOS."); 1233 mFlags |= VIDEO_AT_EOS; 1234 mFlags |= AUDIO_AT_EOS; 1235 mOverlayUpdateEventPosted = false; 1236 postStreamDoneEvent_l(ERROR_END_OF_STREAM); 1237 } 1238 else { 1239 if ((wasSeeking != NO_SEEK) && (mFlags & SEEK_PREVIEW)) { 1240 mFlags &= ~SEEK_PREVIEW; 1241 return; 1242 } 1243 1244 if(!mIsVideoSourceJpg) { 1245 postVideoEvent_l(0); 1246 } 1247 else { 1248 postVideoEvent_l(33000); 1249 } 1250 } 1251} 1252 1253status_t PreviewPlayer::prepare() { 1254 Mutex::Autolock autoLock(mLock); 1255 return prepare_l(); 1256} 1257 1258status_t PreviewPlayer::prepare_l() { 1259 if (mFlags & PREPARED) { 1260 return OK; 1261 } 1262 1263 if (mFlags & PREPARING) { 1264 return UNKNOWN_ERROR; 1265 } 1266 1267 mIsAsyncPrepare = false; 1268 status_t err = prepareAsync_l(); 1269 1270 if (err != OK) { 1271 return err; 1272 } 1273 1274 while (mFlags & PREPARING) { 1275 mPreparedCondition.wait(mLock); 1276 } 1277 1278 return mPrepareResult; 1279} 1280 1281status_t PreviewPlayer::prepareAsync_l() { 1282 if (mFlags & PREPARING) { 1283 return UNKNOWN_ERROR; // async prepare already pending 1284 } 1285 1286 if (!mQueueStarted) { 1287 mQueue.start(); 1288 mQueueStarted = true; 1289 } 1290 1291 mFlags |= PREPARING; 1292 mAsyncPrepareEvent = new PreviewPlayerEvent( 1293 this, &PreviewPlayer::onPrepareAsyncEvent); 1294 1295 mQueue.postEvent(mAsyncPrepareEvent); 1296 1297 return OK; 1298} 1299 1300status_t PreviewPlayer::finishSetDataSource_l() { 1301 sp<DataSource> dataSource; 1302 sp<MediaExtractor> extractor; 1303 1304 dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders); 1305 1306 if (dataSource == NULL) { 1307 return UNKNOWN_ERROR; 1308 } 1309 1310 //If file type is .rgb, then no need to check for Extractor 1311 int uriLen = strlen(mUri); 1312 int startOffset = uriLen - 4; 1313 if(!strncasecmp(mUri+startOffset, ".rgb", 4)) { 1314 extractor = NULL; 1315 } 1316 else { 1317 extractor = MediaExtractor::Create(dataSource, 1318 MEDIA_MIMETYPE_CONTAINER_MPEG4); 1319 } 1320 1321 if (extractor == NULL) { 1322 LOGV("PreviewPlayer::finishSetDataSource_l extractor == NULL"); 1323 return setDataSource_l_jpg(); 1324 } 1325 1326 return setDataSource_l(extractor); 1327} 1328 1329 1330// static 1331bool PreviewPlayer::ContinuePreparation(void *cookie) { 1332 PreviewPlayer *me = static_cast<PreviewPlayer *>(cookie); 1333 1334 return (me->mFlags & PREPARE_CANCELLED) == 0; 1335} 1336 1337void PreviewPlayer::onPrepareAsyncEvent() { 1338 Mutex::Autolock autoLock(mLock); 1339 LOGV("onPrepareAsyncEvent"); 1340 1341 if (mFlags & PREPARE_CANCELLED) { 1342 LOGV("LV PLAYER prepare was cancelled before doing anything"); 1343 abortPrepare(UNKNOWN_ERROR); 1344 return; 1345 } 1346 1347 if (mUri.size() > 0) { 1348 status_t err = finishSetDataSource_l(); 1349 1350 if (err != OK) { 1351 abortPrepare(err); 1352 return; 1353 } 1354 } 1355 1356 if (mVideoTrack != NULL && mVideoSource == NULL) { 1357 status_t err = initVideoDecoder(OMXCodec::kHardwareCodecsOnly); 1358 1359 if (err != OK) { 1360 abortPrepare(err); 1361 return; 1362 } 1363 } 1364 1365 if (mAudioTrack != NULL && mAudioSource == NULL) { 1366 status_t err = initAudioDecoder(); 1367 1368 if (err != OK) { 1369 abortPrepare(err); 1370 return; 1371 } 1372 } 1373 finishAsyncPrepare_l(); 1374 1375} 1376 1377void PreviewPlayer::finishAsyncPrepare_l() { 1378 if (mIsAsyncPrepare) { 1379 if (mVideoSource == NULL) { 1380 LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE 0 0 "); 1381 notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0); 1382 } else { 1383 LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE"); 1384 notifyVideoSize_l(); 1385 } 1386 LOGV("finishAsyncPrepare_l: MEDIA_PREPARED"); 1387 notifyListener_l(MEDIA_PREPARED); 1388 } 1389 1390 mPrepareResult = OK; 1391 mFlags &= ~(PREPARING|PREPARE_CANCELLED); 1392 mFlags |= PREPARED; 1393 mAsyncPrepareEvent = NULL; 1394 mPreparedCondition.broadcast(); 1395} 1396 1397status_t PreviewPlayer::suspend() { 1398 LOGV("suspend"); 1399 Mutex::Autolock autoLock(mLock); 1400 1401 if (mSuspensionState != NULL) { 1402 if (mLastVideoBuffer == NULL) { 1403 //go into here if video is suspended again 1404 //after resuming without being played between 1405 //them 1406 SuspensionState *state = mSuspensionState; 1407 mSuspensionState = NULL; 1408 reset_l(); 1409 mSuspensionState = state; 1410 return OK; 1411 } 1412 1413 delete mSuspensionState; 1414 mSuspensionState = NULL; 1415 } 1416 1417 if (mFlags & PREPARING) { 1418 mFlags |= PREPARE_CANCELLED; 1419 } 1420 1421 while (mFlags & PREPARING) { 1422 mPreparedCondition.wait(mLock); 1423 } 1424 1425 SuspensionState *state = new SuspensionState; 1426 state->mUri = mUri; 1427 state->mUriHeaders = mUriHeaders; 1428 state->mFileSource = mFileSource; 1429 1430 state->mFlags = mFlags & (PLAYING | AUTO_LOOPING | LOOPING | AT_EOS); 1431 getPosition(&state->mPositionUs); 1432 1433 if (mLastVideoBuffer) { 1434 size_t size = mLastVideoBuffer->range_length(); 1435 if (size) { 1436 int32_t unreadable; 1437 if (!mLastVideoBuffer->meta_data()->findInt32( 1438 kKeyIsUnreadable, &unreadable) 1439 || unreadable == 0) { 1440 state->mLastVideoFrameSize = size; 1441 state->mLastVideoFrame = malloc(size); 1442 memcpy(state->mLastVideoFrame, 1443 (const uint8_t *)mLastVideoBuffer->data() 1444 + mLastVideoBuffer->range_offset(), 1445 size); 1446 1447 state->mVideoWidth = mVideoWidth; 1448 state->mVideoHeight = mVideoHeight; 1449 1450 sp<MetaData> meta = mVideoSource->getFormat(); 1451 CHECK(meta->findInt32(kKeyColorFormat, &state->mColorFormat)); 1452 CHECK(meta->findInt32(kKeyWidth, &state->mDecodedWidth)); 1453 CHECK(meta->findInt32(kKeyHeight, &state->mDecodedHeight)); 1454 } else { 1455 LOGV("Unable to save last video frame, we have no access to " 1456 "the decoded video data."); 1457 } 1458 } 1459 } 1460 1461 reset_l(); 1462 1463 mSuspensionState = state; 1464 1465 return OK; 1466} 1467 1468status_t PreviewPlayer::resume() { 1469 LOGV("resume"); 1470 Mutex::Autolock autoLock(mLock); 1471 1472 if (mSuspensionState == NULL) { 1473 return INVALID_OPERATION; 1474 } 1475 1476 SuspensionState *state = mSuspensionState; 1477 mSuspensionState = NULL; 1478 1479 status_t err; 1480 if (state->mFileSource != NULL) { 1481 err = AwesomePlayer::setDataSource_l(state->mFileSource); 1482 1483 if (err == OK) { 1484 mFileSource = state->mFileSource; 1485 } 1486 } else { 1487 err = AwesomePlayer::setDataSource_l(state->mUri, &state->mUriHeaders); 1488 } 1489 1490 if (err != OK) { 1491 delete state; 1492 state = NULL; 1493 1494 return err; 1495 } 1496 1497 seekTo_l(state->mPositionUs); 1498 1499 mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS); 1500 1501 if (state->mLastVideoFrame && (mSurface != NULL || mISurface != NULL)) { 1502 mVideoRenderer = 1503 PreviewLocalRenderer::initPreviewLocalRenderer( 1504 true, // previewOnly 1505 (OMX_COLOR_FORMATTYPE)state->mColorFormat, 1506 mSurface, 1507 state->mVideoWidth, 1508 state->mVideoHeight, 1509 state->mDecodedWidth, 1510 state->mDecodedHeight); 1511 1512 mVideoRendererIsPreview = true; 1513 1514 ((PreviewLocalRenderer *)mVideoRenderer.get())->render( 1515 state->mLastVideoFrame, state->mLastVideoFrameSize); 1516 } 1517 1518 if (state->mFlags & PLAYING) { 1519 play_l(); 1520 } 1521 1522 mSuspensionState = state; 1523 state = NULL; 1524 1525 return OK; 1526} 1527 1528 1529status_t PreviewPlayer::loadEffectsSettings( 1530 M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) { 1531 M4OSA_UInt32 i = 0, rgbSize = 0; 1532 M4VIFI_UInt8 *tmp = M4OSA_NULL; 1533 1534 mNumberEffects = nEffects; 1535 mEffectsSettings = pEffectSettings; 1536 return OK; 1537} 1538 1539status_t PreviewPlayer::loadAudioMixSettings( 1540 M4xVSS_AudioMixingSettings* pAudioMixSettings) { 1541 1542 LOGV("PreviewPlayer: loadAudioMixSettings: "); 1543 mPreviewPlayerAudioMixSettings = pAudioMixSettings; 1544 return OK; 1545} 1546 1547status_t PreviewPlayer::setAudioMixPCMFileHandle( 1548 M4OSA_Context pAudioMixPCMFileHandle) { 1549 1550 LOGV("PreviewPlayer: setAudioMixPCMFileHandle: "); 1551 mAudioMixPCMFileHandle = pAudioMixPCMFileHandle; 1552 return OK; 1553} 1554 1555status_t PreviewPlayer::setAudioMixStoryBoardParam( 1556 M4OSA_UInt32 audioMixStoryBoardTS, 1557 M4OSA_UInt32 currentMediaBeginCutTime, 1558 M4OSA_UInt32 primaryTrackVolValue ) { 1559 1560 mAudioMixStoryBoardTS = audioMixStoryBoardTS; 1561 mCurrentMediaBeginCutTime = currentMediaBeginCutTime; 1562 mCurrentMediaVolumeValue = primaryTrackVolValue; 1563 return OK; 1564} 1565 1566status_t PreviewPlayer::setPlaybackBeginTime(uint32_t msec) { 1567 1568 mPlayBeginTimeMsec = msec; 1569 return OK; 1570} 1571 1572status_t PreviewPlayer::setPlaybackEndTime(uint32_t msec) { 1573 1574 mPlayEndTimeMsec = msec; 1575 return OK; 1576} 1577 1578status_t PreviewPlayer::setStoryboardStartTime(uint32_t msec) { 1579 1580 mStoryboardStartTimeMsec = msec; 1581 mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000; 1582 return OK; 1583} 1584 1585status_t PreviewPlayer::setProgressCallbackInterval(uint32_t cbInterval) { 1586 1587 mProgressCbInterval = cbInterval; 1588 return OK; 1589} 1590 1591 1592status_t PreviewPlayer::setMediaRenderingMode( 1593 M4xVSS_MediaRendering mode, 1594 M4VIDEOEDITING_VideoFrameSize outputVideoSize) { 1595 1596 mRenderingMode = mode; 1597 1598 /* reset boolean for each clip*/ 1599 mVideoResizedOrCropped = false; 1600 1601 switch(outputVideoSize) { 1602 case M4VIDEOEDITING_kSQCIF: 1603 mOutputVideoWidth = 128; 1604 mOutputVideoHeight = 96; 1605 break; 1606 1607 case M4VIDEOEDITING_kQQVGA: 1608 mOutputVideoWidth = 160; 1609 mOutputVideoHeight = 120; 1610 break; 1611 1612 case M4VIDEOEDITING_kQCIF: 1613 mOutputVideoWidth = 176; 1614 mOutputVideoHeight = 144; 1615 break; 1616 1617 case M4VIDEOEDITING_kQVGA: 1618 mOutputVideoWidth = 320; 1619 mOutputVideoHeight = 240; 1620 break; 1621 1622 case M4VIDEOEDITING_kCIF: 1623 mOutputVideoWidth = 352; 1624 mOutputVideoHeight = 288; 1625 break; 1626 1627 case M4VIDEOEDITING_kVGA: 1628 mOutputVideoWidth = 640; 1629 mOutputVideoHeight = 480; 1630 break; 1631 1632 case M4VIDEOEDITING_kWVGA: 1633 mOutputVideoWidth = 800; 1634 mOutputVideoHeight = 480; 1635 break; 1636 1637 case M4VIDEOEDITING_kNTSC: 1638 mOutputVideoWidth = 720; 1639 mOutputVideoHeight = 480; 1640 break; 1641 1642 case M4VIDEOEDITING_k640_360: 1643 mOutputVideoWidth = 640; 1644 mOutputVideoHeight = 360; 1645 break; 1646 1647 case M4VIDEOEDITING_k854_480: 1648 mOutputVideoWidth = 854; 1649 mOutputVideoHeight = 480; 1650 break; 1651 1652 case M4VIDEOEDITING_kHD1280: 1653 mOutputVideoWidth = 1280; 1654 mOutputVideoHeight = 720; 1655 break; 1656 1657 case M4VIDEOEDITING_kHD1080: 1658 mOutputVideoWidth = 1080; 1659 mOutputVideoHeight = 720; 1660 break; 1661 1662 case M4VIDEOEDITING_kHD960: 1663 mOutputVideoWidth = 960; 1664 mOutputVideoHeight = 720; 1665 break; 1666 1667 default: 1668 LOGE("unsupported output video size set"); 1669 return BAD_VALUE; 1670 } 1671 1672 return OK; 1673} 1674 1675M4OSA_ERR PreviewPlayer::doMediaRendering() { 1676 M4OSA_ERR err = M4NO_ERROR; 1677 M4VIFI_ImagePlane planeIn[3], planeOut[3]; 1678 M4VIFI_UInt8 *inBuffer = M4OSA_NULL, *finalOutputBuffer = M4OSA_NULL; 1679 M4VIFI_UInt8 *tempOutputBuffer= M4OSA_NULL; 1680 size_t videoBufferSize = 0; 1681 M4OSA_UInt32 frameSize = 0, i=0, index =0, nFrameCount =0, bufferOffset =0; 1682 int32_t colorFormat = 0; 1683 1684 if(!mIsVideoSourceJpg) { 1685 sp<MetaData> meta = mVideoSource->getFormat(); 1686 CHECK(meta->findInt32(kKeyColorFormat, &colorFormat)); 1687 } 1688 else { 1689 colorFormat = OMX_COLOR_FormatYUV420Planar; 1690 } 1691 1692 videoBufferSize = mVideoBuffer->size(); 1693 frameSize = (mVideoWidth*mVideoHeight*3) >> 1; 1694 1695 uint8_t* outBuffer; 1696 size_t outBufferStride = 0; 1697 1698 mVideoRenderer->getBuffer(&outBuffer, &outBufferStride); 1699 1700 bufferOffset = index*frameSize; 1701 inBuffer = (M4OSA_UInt8 *)mVideoBuffer->data()+ 1702 mVideoBuffer->range_offset()+bufferOffset; 1703 1704 1705 /* In plane*/ 1706 prepareYUV420ImagePlane(planeIn, mVideoWidth, 1707 mVideoHeight, (M4VIFI_UInt8 *)inBuffer, mReportedWidth, mReportedHeight); 1708 1709 // Set the output YUV420 plane to be compatible with YV12 format 1710 // W & H even 1711 // YVU instead of YUV 1712 // align buffers on 32 bits 1713 1714 //In YV12 format, sizes must be even 1715 M4OSA_UInt32 yv12PlaneWidth = ((mOutputVideoWidth +1)>>1)<<1; 1716 M4OSA_UInt32 yv12PlaneHeight = ((mOutputVideoHeight+1)>>1)<<1; 1717 1718 prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight, 1719 (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer); 1720 1721 1722 err = applyRenderingMode(planeIn, planeOut, mRenderingMode); 1723 1724 if(err != M4NO_ERROR) 1725 { 1726 LOGE("doMediaRendering: applyRenderingMode returned err=0x%x", err); 1727 return err; 1728 } 1729 mVideoResizedOrCropped = true; 1730 1731 return err; 1732} 1733 1734status_t PreviewPlayer::resetJniCallbackTimeStamp() { 1735 1736 mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000; 1737 return OK; 1738} 1739 1740void PreviewPlayer::postProgressCallbackEvent_l() { 1741 if (mProgressCbEventPending) { 1742 return; 1743 } 1744 mProgressCbEventPending = true; 1745 1746 mQueue.postEvent(mProgressCbEvent); 1747} 1748 1749 1750void PreviewPlayer::onProgressCbEvent() { 1751 Mutex::Autolock autoLock(mLock); 1752 if (!mProgressCbEventPending) { 1753 return; 1754 } 1755 mProgressCbEventPending = false; 1756 // If playback starts from previous I-frame, 1757 // then send frame storyboard duration 1758 if((mDecodedVideoTs/1000) < mPlayBeginTimeMsec) { 1759 notifyListener_l(MEDIA_INFO, 0, mDecVideoTsStoryBoard/1000); 1760 } 1761 else { 1762 notifyListener_l(MEDIA_INFO, 0, 1763 (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec)); 1764 } 1765} 1766 1767void PreviewPlayer::postOverlayUpdateEvent_l() { 1768 if (mOverlayUpdateEventPending) { 1769 return; 1770 } 1771 mOverlayUpdateEventPending = true; 1772 mQueue.postEvent(mOverlayUpdateEvent); 1773} 1774 1775void PreviewPlayer::onUpdateOverlayEvent() { 1776 Mutex::Autolock autoLock(mLock); 1777 1778 if (!mOverlayUpdateEventPending) { 1779 return; 1780 } 1781 mOverlayUpdateEventPending = false; 1782 1783 int updateState; 1784 if (mOverlayUpdateEventPosted) { 1785 updateState = 1; 1786 } else { 1787 updateState = 0; 1788 } 1789 notifyListener_l(0xBBBBBBBB, updateState, mCurrFramingEffectIndex); 1790} 1791 1792 1793void PreviewPlayer::setVideoPostProcessingNode( 1794 M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable) { 1795 1796 uint32_t effect = VIDEO_EFFECT_NONE; 1797 1798 //Map M4VSS3GPP_VideoEffectType to local enum 1799 switch(type) { 1800 case M4VSS3GPP_kVideoEffectType_FadeFromBlack: 1801 effect = VIDEO_EFFECT_FADEFROMBLACK; 1802 break; 1803 1804 case M4VSS3GPP_kVideoEffectType_FadeToBlack: 1805 effect = VIDEO_EFFECT_FADETOBLACK; 1806 break; 1807 1808 case M4VSS3GPP_kVideoEffectType_CurtainOpening: 1809 effect = VIDEO_EFFECT_CURTAINOPEN; 1810 break; 1811 1812 case M4VSS3GPP_kVideoEffectType_CurtainClosing: 1813 effect = VIDEO_EFFECT_CURTAINCLOSE; 1814 break; 1815 1816 case M4xVSS_kVideoEffectType_BlackAndWhite: 1817 effect = VIDEO_EFFECT_BLACKANDWHITE; 1818 break; 1819 1820 case M4xVSS_kVideoEffectType_Pink: 1821 effect = VIDEO_EFFECT_PINK; 1822 break; 1823 1824 case M4xVSS_kVideoEffectType_Green: 1825 effect = VIDEO_EFFECT_GREEN; 1826 break; 1827 1828 case M4xVSS_kVideoEffectType_Sepia: 1829 effect = VIDEO_EFFECT_SEPIA; 1830 break; 1831 1832 case M4xVSS_kVideoEffectType_Negative: 1833 effect = VIDEO_EFFECT_NEGATIVE; 1834 break; 1835 1836 case M4xVSS_kVideoEffectType_Framing: 1837 effect = VIDEO_EFFECT_FRAMING; 1838 break; 1839 1840 case M4xVSS_kVideoEffectType_Fifties: 1841 effect = VIDEO_EFFECT_FIFTIES; 1842 break; 1843 1844 case M4xVSS_kVideoEffectType_ColorRGB16: 1845 effect = VIDEO_EFFECT_COLOR_RGB16; 1846 break; 1847 1848 case M4xVSS_kVideoEffectType_Gradient: 1849 effect = VIDEO_EFFECT_GRADIENT; 1850 break; 1851 1852 default: 1853 effect = VIDEO_EFFECT_NONE; 1854 break; 1855 } 1856 1857 if(enable == M4OSA_TRUE) { 1858 //If already set, then no need to set again 1859 if(!(mCurrentVideoEffect & effect)) { 1860 mCurrentVideoEffect |= effect; 1861 if(effect == VIDEO_EFFECT_FIFTIES) { 1862 mIsFiftiesEffectStarted = true; 1863 } 1864 } 1865 } 1866 else { 1867 //Reset only if already set 1868 if(mCurrentVideoEffect & effect) { 1869 mCurrentVideoEffect &= ~effect; 1870 } 1871 } 1872} 1873 1874status_t PreviewPlayer::setImageClipProperties(uint32_t width,uint32_t height) { 1875 mVideoWidth = width; 1876 mVideoHeight = height; 1877 return OK; 1878} 1879 1880 1881M4OSA_ERR PreviewPlayer::doVideoPostProcessing() { 1882 M4OSA_ERR err = M4NO_ERROR; 1883 vePostProcessParams postProcessParams; 1884 int32_t colorFormat = 0; 1885 1886 1887 if(!mIsVideoSourceJpg) { 1888 sp<MetaData> meta = mVideoSource->getFormat(); 1889 CHECK(meta->findInt32(kKeyColorFormat, &colorFormat)); 1890 } 1891 else { 1892 colorFormat = OMX_COLOR_FormatYUV420Planar; 1893 } 1894 1895 if((colorFormat == OMX_COLOR_FormatYUV420SemiPlanar) || 1896 (colorFormat == 0x7FA30C00)) { 1897 LOGE("doVideoPostProcessing: colorFormat YUV420Sp not supported"); 1898 return M4ERR_UNSUPPORTED_MEDIA_TYPE; 1899 } 1900 1901 postProcessParams.vidBuffer = (M4VIFI_UInt8*)mVideoBuffer->data() 1902 + mVideoBuffer->range_offset(); 1903 1904 postProcessParams.videoWidth = mVideoWidth; 1905 postProcessParams.videoHeight = mVideoHeight; 1906 postProcessParams.timeMs = mDecodedVideoTs/1000; 1907 postProcessParams.timeOffset = mDecVideoTsStoryBoard/1000; 1908 postProcessParams.effectsSettings = mEffectsSettings; 1909 postProcessParams.numberEffects = mNumberEffects; 1910 postProcessParams.outVideoWidth = mOutputVideoWidth; 1911 postProcessParams.outVideoHeight = mOutputVideoHeight; 1912 postProcessParams.currentVideoEffect = mCurrentVideoEffect; 1913 postProcessParams.renderingMode = mRenderingMode; 1914 if(mIsFiftiesEffectStarted == M4OSA_TRUE) { 1915 postProcessParams.isFiftiesEffectStarted = M4OSA_TRUE; 1916 mIsFiftiesEffectStarted = M4OSA_FALSE; 1917 } 1918 else { 1919 postProcessParams.isFiftiesEffectStarted = M4OSA_FALSE; 1920 } 1921 1922 postProcessParams.overlayFrameRGBBuffer = mFrameRGBBuffer; 1923 postProcessParams.overlayFrameYUVBuffer = mFrameYUVBuffer; 1924 mVideoRenderer->getBuffer(&(postProcessParams.pOutBuffer), &(postProcessParams.outBufferStride)); 1925 err = applyEffectsAndRenderingMode(&postProcessParams, mReportedWidth, mReportedHeight); 1926 1927 return err; 1928} 1929 1930status_t PreviewPlayer::readFirstVideoFrame() { 1931 LOGV("PreviewPlayer::readFirstVideoFrame"); 1932 1933 if (!mVideoBuffer) { 1934 MediaSource::ReadOptions options; 1935 if (mSeeking != NO_SEEK) { 1936 LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs, 1937 mSeekTimeUs / 1E6); 1938 1939 options.setSeekTo( 1940 mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST); 1941 } 1942 for (;;) { 1943 status_t err = mVideoSource->read(&mVideoBuffer, &options); 1944 options.clearSeekTo(); 1945 1946 if (err != OK) { 1947 CHECK_EQ(mVideoBuffer, NULL); 1948 1949 if (err == INFO_FORMAT_CHANGED) { 1950 LOGV("LV PLAYER VideoSource signalled format change"); 1951 notifyVideoSize_l(); 1952 sp<MetaData> meta = mVideoSource->getFormat(); 1953 1954 CHECK(meta->findInt32(kKeyWidth, &mReportedWidth)); 1955 CHECK(meta->findInt32(kKeyHeight, &mReportedHeight)); 1956 1957 if (mVideoRenderer != NULL) { 1958 mVideoRendererIsPreview = false; 1959 err = initRenderer_l(); 1960 if (err != OK) { 1961 postStreamDoneEvent_l(err); 1962 } 1963 } 1964 continue; 1965 } 1966 LOGV("PreviewPlayer: onVideoEvent EOS reached."); 1967 mFlags |= VIDEO_AT_EOS; 1968 mFlags |= AUDIO_AT_EOS; 1969 postStreamDoneEvent_l(err); 1970 return OK; 1971 } 1972 1973 if (mVideoBuffer->range_length() == 0) { 1974 // Some decoders, notably the PV AVC software decoder 1975 // return spurious empty buffers that we just want to ignore. 1976 1977 mVideoBuffer->release(); 1978 mVideoBuffer = NULL; 1979 continue; 1980 } 1981 1982 int64_t videoTimeUs; 1983 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs)); 1984 if (mSeeking != NO_SEEK) { 1985 if (videoTimeUs < mSeekTimeUs) { 1986 // buffers are before seek time 1987 // ignore them 1988 mVideoBuffer->release(); 1989 mVideoBuffer = NULL; 1990 continue; 1991 } 1992 } else { 1993 if((videoTimeUs/1000) < mPlayBeginTimeMsec) { 1994 // buffers are before begin cut time 1995 // ignore them 1996 mVideoBuffer->release(); 1997 mVideoBuffer = NULL; 1998 continue; 1999 } 2000 } 2001 break; 2002 } 2003 } 2004 2005 int64_t timeUs; 2006 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs)); 2007 2008 { 2009 Mutex::Autolock autoLock(mMiscStateLock); 2010 mVideoTimeUs = timeUs; 2011 } 2012 2013 mDecodedVideoTs = timeUs; 2014 2015 return OK; 2016 2017} 2018 2019status_t PreviewPlayer::getLastRenderedTimeMs(uint32_t *lastRenderedTimeMs) { 2020 *lastRenderedTimeMs = (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec); 2021 return OK; 2022} 2023 2024} // namespace android 2025