PreviewPlayer.cpp revision 5bc7fb407ce1bab13d4a4a67d34a1a3192ee3186
1/* 2 * Copyright (C) 2011 NXP Software 3 * Copyright (C) 2011 The Android Open Source Project 4 * 5 * Licensed under the Apache License, Version 2.0 (the "License"); 6 * you may not use this file except in compliance with the License. 7 * You may obtain a copy of the License at 8 * 9 * http://www.apache.org/licenses/LICENSE-2.0 10 * 11 * Unless required by applicable law or agreed to in writing, software 12 * distributed under the License is distributed on an "AS IS" BASIS, 13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 * See the License for the specific language governing permissions and 15 * limitations under the License. 16 */ 17 18 19#define LOG_NDEBUG 1 20#define LOG_TAG "PreviewPlayer" 21#include <utils/Log.h> 22 23#include <dlfcn.h> 24 25#include "include/ARTSPController.h" 26#include "PreviewPlayer.h" 27#include "DummyAudioSource.h" 28#include "DummyVideoSource.h" 29#include "VideoEditorSRC.h" 30#include "include/LiveSession.h" 31#include "include/NuCachedSource2.h" 32#include "include/ThrottledSource.h" 33 34 35#include "PreviewRenderer.h" 36 37#include <binder/IPCThreadState.h> 38#include <media/stagefright/DataSource.h> 39#include <media/stagefright/FileSource.h> 40#include <media/stagefright/MediaBuffer.h> 41#include <media/stagefright/MediaDefs.h> 42#include <media/stagefright/MediaExtractor.h> 43#include <media/stagefright/MediaDebug.h> 44#include <media/stagefright/MediaSource.h> 45#include <media/stagefright/MetaData.h> 46#include <media/stagefright/OMXCodec.h> 47 48#include <surfaceflinger/Surface.h> 49#include <media/stagefright/foundation/ALooper.h> 50 51namespace android { 52 53 54struct PreviewPlayerEvent : public TimedEventQueue::Event { 55 PreviewPlayerEvent( 56 PreviewPlayer *player, 57 void (PreviewPlayer::*method)()) 58 : mPlayer(player), 59 mMethod(method) { 60 } 61 62protected: 63 virtual ~PreviewPlayerEvent() {} 64 65 virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) { 66 (mPlayer->*mMethod)(); 67 } 68 69private: 70 PreviewPlayer *mPlayer; 71 void (PreviewPlayer::*mMethod)(); 72 73 PreviewPlayerEvent(const PreviewPlayerEvent &); 74 PreviewPlayerEvent &operator=(const PreviewPlayerEvent &); 75}; 76 77 78struct PreviewLocalRenderer : public PreviewPlayerRenderer { 79 80 static PreviewLocalRenderer* initPreviewLocalRenderer ( 81 bool previewOnly, 82 OMX_COLOR_FORMATTYPE colorFormat, 83 const sp<Surface> &surface, 84 size_t displayWidth, size_t displayHeight, 85 size_t decodedWidth, size_t decodedHeight, 86 int32_t rotationDegrees = 0) 87 { 88 PreviewLocalRenderer* mLocalRenderer = new 89 PreviewLocalRenderer( 90 previewOnly, 91 colorFormat, 92 surface, 93 displayWidth, displayHeight, 94 decodedWidth, decodedHeight, 95 rotationDegrees); 96 97 if ( mLocalRenderer->init(previewOnly, 98 colorFormat, surface, 99 displayWidth, displayHeight, 100 decodedWidth, decodedHeight, 101 rotationDegrees) != OK ) 102 { 103 delete mLocalRenderer; 104 return NULL; 105 } 106 return mLocalRenderer; 107 } 108 109 virtual void render(MediaBuffer *buffer) { 110 render((const uint8_t *)buffer->data() + buffer->range_offset(), 111 buffer->range_length()); 112 } 113 114 void render(const void *data, size_t size) { 115 mTarget->render(data, size, NULL); 116 } 117 void render() { 118 mTarget->renderYV12(); 119 } 120 void getBuffer(uint8_t **data, size_t *stride) { 121 mTarget->getBufferYV12(data, stride); 122 } 123 124protected: 125 virtual ~PreviewLocalRenderer() { 126 delete mTarget; 127 mTarget = NULL; 128 } 129 130private: 131 PreviewRenderer *mTarget; 132 133 PreviewLocalRenderer( 134 bool previewOnly, 135 OMX_COLOR_FORMATTYPE colorFormat, 136 const sp<Surface> &surface, 137 size_t displayWidth, size_t displayHeight, 138 size_t decodedWidth, size_t decodedHeight, 139 int32_t rotationDegrees = 0) 140 : mTarget(NULL) { 141 } 142 143 144 int init( 145 bool previewOnly, 146 OMX_COLOR_FORMATTYPE colorFormat, 147 const sp<Surface> &surface, 148 size_t displayWidth, size_t displayHeight, 149 size_t decodedWidth, size_t decodedHeight, 150 int32_t rotationDegrees = 0); 151 152 PreviewLocalRenderer(const PreviewLocalRenderer &); 153 PreviewLocalRenderer &operator=(const PreviewLocalRenderer &);; 154}; 155 156int PreviewLocalRenderer::init( 157 bool previewOnly, 158 OMX_COLOR_FORMATTYPE colorFormat, 159 const sp<Surface> &surface, 160 size_t displayWidth, size_t displayHeight, 161 size_t decodedWidth, size_t decodedHeight, 162 int32_t rotationDegrees) { 163 164 mTarget = PreviewRenderer::CreatePreviewRenderer ( 165 colorFormat, surface, displayWidth, displayHeight, 166 decodedWidth, decodedHeight, rotationDegrees); 167 if (mTarget == M4OSA_NULL) { 168 return UNKNOWN_ERROR; 169 } 170 return OK; 171} 172 173PreviewPlayer::PreviewPlayer() 174 : AwesomePlayer(), 175 mFrameRGBBuffer(NULL), 176 mFrameYUVBuffer(NULL), 177 mReportedWidth(0), 178 mReportedHeight(0), 179 mCurrFramingEffectIndex(0) { 180 181 mVideoRenderer = NULL; 182 mLastVideoBuffer = NULL; 183 mSuspensionState = NULL; 184 mEffectsSettings = NULL; 185 mVeAudioPlayer = NULL; 186 mAudioMixStoryBoardTS = 0; 187 mCurrentMediaBeginCutTime = 0; 188 mCurrentMediaVolumeValue = 0; 189 mNumberEffects = 0; 190 mDecodedVideoTs = 0; 191 mDecVideoTsStoryBoard = 0; 192 mCurrentVideoEffect = VIDEO_EFFECT_NONE; 193 mProgressCbInterval = 0; 194 mNumberDecVideoFrames = 0; 195 mOverlayUpdateEventPosted = false; 196 197 mVideoEvent = new PreviewPlayerEvent(this, &PreviewPlayer::onVideoEvent); 198 mVideoEventPending = false; 199 mStreamDoneEvent = new PreviewPlayerEvent(this, 200 &AwesomePlayer::onStreamDone); 201 202 mStreamDoneEventPending = false; 203 204 mCheckAudioStatusEvent = new PreviewPlayerEvent( 205 this, &AwesomePlayer::onCheckAudioStatus); 206 207 mAudioStatusEventPending = false; 208 209 mProgressCbEvent = new PreviewPlayerEvent(this, 210 &PreviewPlayer::onProgressCbEvent); 211 212 mOverlayUpdateEvent = new PreviewPlayerEvent(this, 213 &PreviewPlayer::onUpdateOverlayEvent); 214 mProgressCbEventPending = false; 215 216 mOverlayUpdateEventPending = false; 217 mResizedVideoBuffer = NULL; 218 mVideoResizedOrCropped = false; 219 mRenderingMode = (M4xVSS_MediaRendering)MEDIA_RENDERING_INVALID; 220 mIsFiftiesEffectStarted = false; 221 reset(); 222} 223 224PreviewPlayer::~PreviewPlayer() { 225 226 if (mQueueStarted) { 227 mQueue.stop(); 228 } 229 230 reset(); 231 232 if(mResizedVideoBuffer != NULL) { 233 M4OSA_free((M4OSA_MemAddr32)(mResizedVideoBuffer->data())); 234 mResizedVideoBuffer = NULL; 235 } 236 237 mVideoRenderer.clear(); 238 mVideoRenderer = NULL; 239} 240 241void PreviewPlayer::cancelPlayerEvents(bool keepBufferingGoing) { 242 mQueue.cancelEvent(mVideoEvent->eventID()); 243 mVideoEventPending = false; 244 mQueue.cancelEvent(mStreamDoneEvent->eventID()); 245 mStreamDoneEventPending = false; 246 mQueue.cancelEvent(mCheckAudioStatusEvent->eventID()); 247 mAudioStatusEventPending = false; 248 249 mQueue.cancelEvent(mProgressCbEvent->eventID()); 250 mProgressCbEventPending = false; 251} 252 253status_t PreviewPlayer::setDataSource( 254 const char *uri, const KeyedVector<String8, String8> *headers) { 255 Mutex::Autolock autoLock(mLock); 256 return setDataSource_l(uri, headers); 257} 258 259status_t PreviewPlayer::setDataSource_l( 260 const char *uri, const KeyedVector<String8, String8> *headers) { 261 reset_l(); 262 263 mUri = uri; 264 265 if (headers) { 266 mUriHeaders = *headers; 267 } 268 269 // The actual work will be done during preparation in the call to 270 // ::finishSetDataSource_l to avoid blocking the calling thread in 271 // setDataSource for any significant time. 272 return OK; 273} 274 275status_t PreviewPlayer::setDataSource_l(const sp<MediaExtractor> &extractor) { 276 bool haveAudio = false; 277 bool haveVideo = false; 278 for (size_t i = 0; i < extractor->countTracks(); ++i) { 279 sp<MetaData> meta = extractor->getTrackMetaData(i); 280 281 const char *mime; 282 CHECK(meta->findCString(kKeyMIMEType, &mime)); 283 284 if (!haveVideo && !strncasecmp(mime, "video/", 6)) { 285 setVideoSource(extractor->getTrack(i)); 286 haveVideo = true; 287 } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) { 288 setAudioSource(extractor->getTrack(i)); 289 haveAudio = true; 290 291 if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) { 292 // Only do this for vorbis audio, none of the other audio 293 // formats even support this ringtone specific hack and 294 // retrieving the metadata on some extractors may turn out 295 // to be very expensive. 296 sp<MetaData> fileMeta = extractor->getMetaData(); 297 int32_t loop; 298 if (fileMeta != NULL 299 && fileMeta->findInt32(kKeyAutoLoop, &loop) 300 && loop != 0) { 301 mFlags |= AUTO_LOOPING; 302 } 303 } 304 } 305 306 if (haveAudio && haveVideo) { 307 break; 308 } 309 } 310 311 /* Add the support for Dummy audio*/ 312 if( !haveAudio ){ 313 LOGV("PreviewPlayer: setDataSource_l Dummyaudiocreation started"); 314 315 mAudioTrack = DummyAudioSource::Create(32000, 2, 20000, 316 ((mPlayEndTimeMsec)*1000)); 317 LOGV("PreviewPlayer: setDataSource_l Dummyauiosource created"); 318 if(mAudioTrack != NULL) { 319 haveAudio = true; 320 } 321 } 322 323 if (!haveAudio && !haveVideo) { 324 return UNKNOWN_ERROR; 325 } 326 327 mExtractorFlags = extractor->flags(); 328 return OK; 329} 330 331status_t PreviewPlayer::setDataSource_l_jpg() { 332 M4OSA_ERR err = M4NO_ERROR; 333 LOGV("PreviewPlayer: setDataSource_l_jpg started"); 334 335 mAudioSource = DummyAudioSource::Create(32000, 2, 20000, 336 ((mPlayEndTimeMsec)*1000)); 337 LOGV("PreviewPlayer: setDataSource_l_jpg Dummyaudiosource created"); 338 if(mAudioSource != NULL) { 339 setAudioSource(mAudioSource); 340 } 341 status_t error = mAudioSource->start(); 342 if (error != OK) { 343 LOGV("Error starting dummy audio source"); 344 mAudioSource.clear(); 345 return err; 346 } 347 348 mDurationUs = (mPlayEndTimeMsec - mPlayBeginTimeMsec)*1000; 349 350 mVideoSource = DummyVideoSource::Create(mVideoWidth, mVideoHeight, 351 mDurationUs, mUri); 352 mReportedWidth = mVideoWidth; 353 mReportedHeight = mVideoHeight; 354 355 setVideoSource(mVideoSource); 356 status_t err1 = mVideoSource->start(); 357 if (err1 != OK) { 358 mVideoSource.clear(); 359 return err; 360 } 361 362 mIsVideoSourceJpg = true; 363 return OK; 364} 365 366void PreviewPlayer::reset() { 367 Mutex::Autolock autoLock(mLock); 368 reset_l(); 369} 370 371void PreviewPlayer::reset_l() { 372 373 if (mFlags & PREPARING) { 374 mFlags |= PREPARE_CANCELLED; 375 } 376 377 while (mFlags & PREPARING) { 378 mPreparedCondition.wait(mLock); 379 } 380 381 cancelPlayerEvents(); 382 mAudioTrack.clear(); 383 mVideoTrack.clear(); 384 385 // Shutdown audio first, so that the respone to the reset request 386 // appears to happen instantaneously as far as the user is concerned 387 // If we did this later, audio would continue playing while we 388 // shutdown the video-related resources and the player appear to 389 // not be as responsive to a reset request. 390 if (mAudioPlayer == NULL && mAudioSource != NULL) { 391 // If we had an audio player, it would have effectively 392 // taken possession of the audio source and stopped it when 393 // _it_ is stopped. Otherwise this is still our responsibility. 394 mAudioSource->stop(); 395 } 396 mAudioSource.clear(); 397 398 mTimeSource = NULL; 399 400 delete mAudioPlayer; 401 mAudioPlayer = NULL; 402 403 if (mLastVideoBuffer) { 404 mLastVideoBuffer->release(); 405 mLastVideoBuffer = NULL; 406 } 407 408 if (mVideoBuffer) { 409 mVideoBuffer->release(); 410 mVideoBuffer = NULL; 411 } 412 413 if (mVideoSource != NULL) { 414 mVideoSource->stop(); 415 416 // The following hack is necessary to ensure that the OMX 417 // component is completely released by the time we may try 418 // to instantiate it again. 419 wp<MediaSource> tmp = mVideoSource; 420 mVideoSource.clear(); 421 while (tmp.promote() != NULL) { 422 usleep(1000); 423 } 424 IPCThreadState::self()->flushCommands(); 425 } 426 427 mDurationUs = -1; 428 mFlags = 0; 429 mExtractorFlags = 0; 430 mVideoWidth = mVideoHeight = -1; 431 mTimeSourceDeltaUs = 0; 432 mVideoTimeUs = 0; 433 434 mSeeking = false; 435 mSeekNotificationSent = false; 436 mSeekTimeUs = 0; 437 438 mUri.setTo(""); 439 mUriHeaders.clear(); 440 441 mFileSource.clear(); 442 443 delete mSuspensionState; 444 mSuspensionState = NULL; 445 446 mCurrentVideoEffect = VIDEO_EFFECT_NONE; 447 mIsVideoSourceJpg = false; 448 mFrameRGBBuffer = NULL; 449 if(mFrameYUVBuffer != NULL) { 450 M4OSA_free((M4OSA_MemAddr32)mFrameYUVBuffer); 451 mFrameYUVBuffer = NULL; 452 } 453} 454 455void PreviewPlayer::partial_reset_l() { 456 457 if (mLastVideoBuffer) { 458 mLastVideoBuffer->release(); 459 mLastVideoBuffer = NULL; 460 } 461 462 /* call base struct */ 463 AwesomePlayer::partial_reset_l(); 464 465} 466 467status_t PreviewPlayer::play() { 468 Mutex::Autolock autoLock(mLock); 469 470 mFlags &= ~CACHE_UNDERRUN; 471 472 return play_l(); 473} 474 475status_t PreviewPlayer::startAudioPlayer_l() { 476 CHECK(!(mFlags & AUDIO_RUNNING)); 477 478 if (mAudioSource == NULL || mAudioPlayer == NULL) { 479 return OK; 480 } 481 482 if (!(mFlags & AUDIOPLAYER_STARTED)) { 483 mFlags |= AUDIOPLAYER_STARTED; 484 485 // We've already started the MediaSource in order to enable 486 // the prefetcher to read its data. 487 status_t err = mVeAudioPlayer->start( 488 true /* sourceAlreadyStarted */); 489 490 if (err != OK) { 491 notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err); 492 return err; 493 } 494 } else { 495 mVeAudioPlayer->resume(); 496 } 497 498 mFlags |= AUDIO_RUNNING; 499 500 mWatchForAudioEOS = true; 501 502 return OK; 503} 504 505status_t PreviewPlayer::play_l() { 506 507 if (mFlags & PLAYING) { 508 return OK; 509 } 510 mStartNextPlayer = false; 511 512 if (!(mFlags & PREPARED)) { 513 status_t err = prepare_l(); 514 515 if (err != OK) { 516 return err; 517 } 518 } 519 520 mFlags |= PLAYING; 521 mFlags |= FIRST_FRAME; 522 523 bool deferredAudioSeek = false; 524 525 if (mAudioSource != NULL) { 526 if (mAudioPlayer == NULL) { 527 if (mAudioSink != NULL) { 528 529 mAudioPlayer = new VideoEditorAudioPlayer(mAudioSink, this); 530 mVeAudioPlayer = 531 (VideoEditorAudioPlayer*)mAudioPlayer; 532 533 mAudioPlayer->setSource(mAudioSource); 534 535 mVeAudioPlayer->setAudioMixSettings( 536 mPreviewPlayerAudioMixSettings); 537 538 mVeAudioPlayer->setAudioMixPCMFileHandle( 539 mAudioMixPCMFileHandle); 540 541 mVeAudioPlayer->setAudioMixStoryBoardSkimTimeStamp( 542 mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime, 543 mCurrentMediaVolumeValue); 544 545 mTimeSource = mVeAudioPlayer; //mAudioPlayer; 546 547 deferredAudioSeek = true; 548 mWatchForAudioSeekComplete = false; 549 mWatchForAudioEOS = true; 550 } 551 } 552 553 CHECK(!(mFlags & AUDIO_RUNNING)); 554 555 if (mVideoSource == NULL) { 556 status_t err = startAudioPlayer_l(); 557 558 if (err != OK) { 559 delete mAudioPlayer; 560 mAudioPlayer = NULL; 561 mFlags &= ~(PLAYING | FIRST_FRAME); 562 return err; 563 } 564 } 565 } 566 567 if (mTimeSource == NULL && mAudioPlayer == NULL) { 568 mTimeSource = &mSystemTimeSource; 569 } 570 571 // Set the seek option for Image source files and read. 572 // This resets the timestamping for image play 573 if (mIsVideoSourceJpg) { 574 MediaSource::ReadOptions options; 575 MediaBuffer *aLocalBuffer; 576 options.setSeekTo(mSeekTimeUs); 577 mVideoSource->read(&aLocalBuffer, &options); 578 } 579 580 if (mVideoSource != NULL) { 581 // Kick off video playback 582 postVideoEvent_l(); 583 } 584 585 if (deferredAudioSeek) { 586 // If there was a seek request while we were paused 587 // and we're just starting up again, honor the request now. 588 seekAudioIfNecessary_l(); 589 } 590 591 if (mFlags & AT_EOS) { 592 // Legacy behaviour, if a stream finishes playing and then 593 // is started again, we play from the start... 594 seekTo_l(0); 595 } 596 597 return OK; 598} 599 600 601status_t PreviewPlayer::initRenderer_l() { 602 if (mSurface != NULL || mISurface != NULL) { 603 sp<MetaData> meta = mVideoSource->getFormat(); 604 605 int32_t format; 606 const char *component; 607 int32_t decodedWidth, decodedHeight; 608 CHECK(meta->findInt32(kKeyColorFormat, &format)); 609 CHECK(meta->findCString(kKeyDecoderComponent, &component)); 610 CHECK(meta->findInt32(kKeyWidth, &decodedWidth)); 611 CHECK(meta->findInt32(kKeyHeight, &decodedHeight)); 612 613 // Must ensure that mVideoRenderer's destructor is actually executed 614 // before creating a new one. 615 IPCThreadState::self()->flushCommands(); 616 617 // always use localrenderer since decoded buffers are modified 618 // by postprocessing module 619 // Other decoders are instantiated locally and as a consequence 620 // allocate their buffers in local address space. 621 if(mVideoRenderer == NULL) { 622 623 mVideoRenderer = PreviewLocalRenderer:: initPreviewLocalRenderer ( 624 false, // previewOnly 625 (OMX_COLOR_FORMATTYPE)format, 626 mSurface, 627 mOutputVideoWidth, mOutputVideoHeight, 628 mOutputVideoWidth, mOutputVideoHeight); 629 630 if ( mVideoRenderer == NULL ) 631 { 632 return UNKNOWN_ERROR; 633 } 634 return OK; 635 } 636 } 637 return OK; 638} 639 640 641void PreviewPlayer::setISurface(const sp<ISurface> &isurface) { 642 Mutex::Autolock autoLock(mLock); 643 mISurface = isurface; 644} 645 646 647status_t PreviewPlayer::seekTo(int64_t timeUs) { 648 649 if ((mExtractorFlags & MediaExtractor::CAN_SEEK) || (mIsVideoSourceJpg)) { 650 Mutex::Autolock autoLock(mLock); 651 return seekTo_l(timeUs); 652 } 653 654 return OK; 655} 656 657 658status_t PreviewPlayer::getVideoDimensions( 659 int32_t *width, int32_t *height) const { 660 Mutex::Autolock autoLock(mLock); 661 662 if (mVideoWidth < 0 || mVideoHeight < 0) { 663 return UNKNOWN_ERROR; 664 } 665 666 *width = mVideoWidth; 667 *height = mVideoHeight; 668 669 return OK; 670} 671 672 673status_t PreviewPlayer::initAudioDecoder() { 674 sp<MetaData> meta = mAudioTrack->getFormat(); 675 const char *mime; 676 CHECK(meta->findCString(kKeyMIMEType, &mime)); 677 678 if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 679 mAudioSource = mAudioTrack; 680 } else { 681 sp<MediaSource> aRawSource; 682 aRawSource = OMXCodec::Create( 683 mClient.interface(), mAudioTrack->getFormat(), 684 false, // createEncoder 685 mAudioTrack); 686 687 if(aRawSource != NULL) { 688 LOGV("initAudioDecoder: new VideoEditorSRC"); 689 mAudioSource = new VideoEditorSRC(aRawSource); 690 } 691 } 692 693 if (mAudioSource != NULL) { 694 int64_t durationUs; 695 if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) { 696 Mutex::Autolock autoLock(mMiscStateLock); 697 if (mDurationUs < 0 || durationUs > mDurationUs) { 698 mDurationUs = durationUs; 699 } 700 } 701 status_t err = mAudioSource->start(); 702 703 if (err != OK) { 704 mAudioSource.clear(); 705 return err; 706 } 707 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) { 708 // For legacy reasons we're simply going to ignore the absence 709 // of an audio decoder for QCELP instead of aborting playback 710 // altogether. 711 return OK; 712 } 713 714 return mAudioSource != NULL ? OK : UNKNOWN_ERROR; 715} 716 717 718status_t PreviewPlayer::initVideoDecoder(uint32_t flags) { 719 720 mVideoSource = OMXCodec::Create( 721 mClient.interface(), mVideoTrack->getFormat(), 722 false, 723 mVideoTrack, 724 NULL, flags); 725 726 if (mVideoSource != NULL) { 727 int64_t durationUs; 728 if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) { 729 Mutex::Autolock autoLock(mMiscStateLock); 730 if (mDurationUs < 0 || durationUs > mDurationUs) { 731 mDurationUs = durationUs; 732 } 733 } 734 735 CHECK(mVideoTrack->getFormat()->findInt32(kKeyWidth, &mVideoWidth)); 736 CHECK(mVideoTrack->getFormat()->findInt32(kKeyHeight, &mVideoHeight)); 737 738 mReportedWidth = mVideoWidth; 739 mReportedHeight = mVideoHeight; 740 741 status_t err = mVideoSource->start(); 742 743 if (err != OK) { 744 mVideoSource.clear(); 745 return err; 746 } 747 } 748 749 return mVideoSource != NULL ? OK : UNKNOWN_ERROR; 750} 751 752 753void PreviewPlayer::onVideoEvent() { 754 uint32_t i=0; 755 bool bAppliedVideoEffect = false; 756 M4OSA_ERR err1 = M4NO_ERROR; 757 int64_t imageFrameTimeUs = 0; 758 759 Mutex::Autolock autoLock(mLock); 760 if (!mVideoEventPending) { 761 // The event has been cancelled in reset_l() but had already 762 // been scheduled for execution at that time. 763 return; 764 } 765 mVideoEventPending = false; 766 767 if (mFlags & SEEK_PREVIEW) { 768 mFlags &= ~SEEK_PREVIEW; 769 return; 770 } 771 772 TimeSource *ts_st = &mSystemTimeSource; 773 int64_t timeStartUs = ts_st->getRealTimeUs(); 774 775 if (mSeeking) { 776 if (mLastVideoBuffer) { 777 mLastVideoBuffer->release(); 778 mLastVideoBuffer = NULL; 779 } 780 781 782 if(mAudioSource != NULL) { 783 784 // We're going to seek the video source first, followed by 785 // the audio source. 786 // In order to avoid jumps in the DataSource offset caused by 787 // the audio codec prefetching data from the old locations 788 // while the video codec is already reading data from the new 789 // locations, we'll "pause" the audio source, causing it to 790 // stop reading input data until a subsequent seek. 791 792 if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) { 793 mAudioPlayer->pause(); 794 mFlags &= ~AUDIO_RUNNING; 795 } 796 mAudioSource->pause(); 797 } 798 } 799 800 if (!mVideoBuffer) { 801 MediaSource::ReadOptions options; 802 if (mSeeking) { 803 LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs, 804 mSeekTimeUs / 1E6); 805 806 options.setSeekTo( 807 mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST); 808 } 809 for (;;) { 810 status_t err = mVideoSource->read(&mVideoBuffer, &options); 811 options.clearSeekTo(); 812 813 if (err != OK) { 814 CHECK_EQ(mVideoBuffer, NULL); 815 816 if (err == INFO_FORMAT_CHANGED) { 817 LOGV("LV PLAYER VideoSource signalled format change"); 818 notifyVideoSize_l(); 819 sp<MetaData> meta = mVideoSource->getFormat(); 820 821 CHECK(meta->findInt32(kKeyWidth, &mReportedWidth)); 822 CHECK(meta->findInt32(kKeyHeight, &mReportedHeight)); 823 if (mVideoRenderer != NULL) { 824 mVideoRendererIsPreview = false; 825 err = initRenderer_l(); 826 if ( err != OK ) 827 postStreamDoneEvent_l(err); // santosh 828 829 } 830 continue; 831 } 832 // So video playback is complete, but we may still have 833 // a seek request pending that needs to be applied to the audio track 834 if (mSeeking) { 835 LOGV("video stream ended while seeking!"); 836 } 837 finishSeekIfNecessary(-1); 838 LOGV("PreviewPlayer: onVideoEvent EOS reached."); 839 mFlags |= VIDEO_AT_EOS; 840 mOverlayUpdateEventPosted = false; 841 postStreamDoneEvent_l(err); 842 return; 843 } 844 845 if (mVideoBuffer->range_length() == 0) { 846 // Some decoders, notably the PV AVC software decoder 847 // return spurious empty buffers that we just want to ignore. 848 849 mVideoBuffer->release(); 850 mVideoBuffer = NULL; 851 continue; 852 } 853 854 int64_t videoTimeUs; 855 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs)); 856 857 if((videoTimeUs/1000) < mPlayBeginTimeMsec) { 858 // Frames are before begin cut time 859 // Donot render 860 mVideoBuffer->release(); 861 mVideoBuffer = NULL; 862 continue; 863 } 864 865 break; 866 } 867 } 868 869 mNumberDecVideoFrames++; 870 871 int64_t timeUs; 872 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs)); 873 874 { 875 Mutex::Autolock autoLock(mMiscStateLock); 876 mVideoTimeUs = timeUs; 877 } 878 879 mDecodedVideoTs = timeUs; 880 881 if(!mStartNextPlayer) { 882 int64_t playbackTimeRemaining = (mPlayEndTimeMsec*1000) - timeUs; 883 if(playbackTimeRemaining <= 1500000) { 884 //When less than 1.5 sec of playback left 885 // send notification to start next player 886 887 mStartNextPlayer = true; 888 notifyListener_l(0xAAAAAAAA); 889 } 890 } 891 892 bool wasSeeking = mSeeking; 893 finishSeekIfNecessary(timeUs); 894 if (mAudioPlayer != NULL && !(mFlags & (AUDIO_RUNNING))) { 895 status_t err = startAudioPlayer_l(); 896 if (err != OK) { 897 LOGE("Starting the audio player failed w/ err %d", err); 898 return; 899 } 900 } 901 902 TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource; 903 904 if(ts == NULL) { 905 mVideoBuffer->release(); 906 mVideoBuffer = NULL; 907 return; 908 } 909 910 if(!mIsVideoSourceJpg) { 911 if (mFlags & FIRST_FRAME) { 912 mFlags &= ~FIRST_FRAME; 913 914 mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs; 915 } 916 917 int64_t realTimeUs, mediaTimeUs; 918 if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL 919 && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) { 920 mTimeSourceDeltaUs = realTimeUs - mediaTimeUs; 921 } 922 923 int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs; 924 925 int64_t latenessUs = nowUs - timeUs; 926 927 if (wasSeeking) { 928 // Let's display the first frame after seeking right away. 929 latenessUs = 0; 930 } 931 LOGV("Audio time stamp = %lld and video time stamp = %lld", 932 ts->getRealTimeUs(),timeUs); 933 if (latenessUs > 40000) { 934 // We're more than 40ms late. 935 936 LOGV("LV PLAYER we're late by %lld us (%.2f secs)", 937 latenessUs, latenessUs / 1E6); 938 939 mVideoBuffer->release(); 940 mVideoBuffer = NULL; 941 postVideoEvent_l(0); 942 return; 943 } 944 945 if (latenessUs < -25000) { 946 // We're more than 25ms early. 947 LOGV("We're more than 25ms early, lateness %lld", latenessUs); 948 949 postVideoEvent_l(25000); 950 return; 951 } 952 } 953 954 if (mVideoRendererIsPreview || mVideoRenderer == NULL) { 955 mVideoRendererIsPreview = false; 956 957 status_t err = initRenderer_l(); 958 if ( err != OK ) 959 postStreamDoneEvent_l(err); // santosh 960 } 961 962 // If timestamp exceeds endCutTime of clip, donot render 963 if((timeUs/1000) > mPlayEndTimeMsec) { 964 if (mLastVideoBuffer) { 965 mLastVideoBuffer->release(); 966 mLastVideoBuffer = NULL; 967 } 968 mLastVideoBuffer = mVideoBuffer; 969 mVideoBuffer = NULL; 970 mFlags |= VIDEO_AT_EOS; 971 mFlags |= AUDIO_AT_EOS; 972 LOGV("PreviewPlayer: onVideoEvent timeUs > mPlayEndTime; send EOS.."); 973 mOverlayUpdateEventPosted = false; 974 postStreamDoneEvent_l(ERROR_END_OF_STREAM); 975 return; 976 } 977 978 // Post processing to apply video effects 979 for(i=0;i<mNumberEffects;i++) { 980 // First check if effect starttime matches the clip being previewed 981 if((mEffectsSettings[i].uiStartTime < (mDecVideoTsStoryBoard/1000)) || 982 (mEffectsSettings[i].uiStartTime >= 983 ((mDecVideoTsStoryBoard/1000) + mPlayEndTimeMsec - mPlayBeginTimeMsec))) 984 { 985 // This effect doesn't belong to this clip, check next one 986 continue; 987 } 988 // Check if effect applies to this particular frame timestamp 989 if((mEffectsSettings[i].uiStartTime <= 990 (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec)) && 991 ((mEffectsSettings[i].uiStartTime+mEffectsSettings[i].uiDuration) >= 992 (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec)) 993 && (mEffectsSettings[i].uiDuration != 0)) { 994 setVideoPostProcessingNode( 995 mEffectsSettings[i].VideoEffectType, TRUE); 996 } 997 else { 998 setVideoPostProcessingNode( 999 mEffectsSettings[i].VideoEffectType, FALSE); 1000 } 1001 } 1002 1003 //Provide the overlay Update indication when there is an overlay effect 1004 if (mCurrentVideoEffect & VIDEO_EFFECT_FRAMING) { 1005 mCurrentVideoEffect &= ~VIDEO_EFFECT_FRAMING; //never apply framing here. 1006 if (!mOverlayUpdateEventPosted) { 1007 // Find the effect in effectSettings array 1008 int index; 1009 for (index = 0; index < mNumberEffects; index++) { 1010 M4OSA_UInt32 timeMs = mDecodedVideoTs/1000; 1011 M4OSA_UInt32 timeOffset = mDecVideoTsStoryBoard/1000; 1012 if(mEffectsSettings[index].VideoEffectType == 1013 M4xVSS_kVideoEffectType_Framing) { 1014 if (((mEffectsSettings[index].uiStartTime + 1) <= 1015 timeMs + timeOffset - mPlayBeginTimeMsec) && 1016 ((mEffectsSettings[index].uiStartTime - 1 + 1017 mEffectsSettings[index].uiDuration) >= 1018 timeMs + timeOffset - mPlayBeginTimeMsec)) 1019 { 1020 break; 1021 } 1022 } 1023 } 1024 if (index < mNumberEffects) { 1025 mCurrFramingEffectIndex = index; 1026 mOverlayUpdateEventPosted = true; 1027 postOverlayUpdateEvent_l(); 1028 LOGV("Framing index = %d", mCurrFramingEffectIndex); 1029 } else { 1030 LOGV("No framing effects found"); 1031 } 1032 } 1033 1034 } else if (mOverlayUpdateEventPosted) { 1035 //Post the event when the overlay is no more valid 1036 LOGV("Overlay is Done"); 1037 mOverlayUpdateEventPosted = false; 1038 postOverlayUpdateEvent_l(); 1039 } 1040 1041 1042 if (mCurrentVideoEffect != VIDEO_EFFECT_NONE) { 1043 err1 = doVideoPostProcessing(); 1044 if(err1 != M4NO_ERROR) { 1045 LOGE("doVideoPostProcessing returned err"); 1046 bAppliedVideoEffect = false; 1047 } 1048 else { 1049 bAppliedVideoEffect = true; 1050 } 1051 } 1052 else { 1053 bAppliedVideoEffect = false; 1054 if(mRenderingMode != MEDIA_RENDERING_INVALID) { 1055 // No effects to be applied, but media rendering to be done 1056 err1 = doMediaRendering(); 1057 if(err1 != M4NO_ERROR) { 1058 LOGE("doMediaRendering returned err"); 1059 //Use original mVideoBuffer for rendering 1060 mVideoResizedOrCropped = false; 1061 } 1062 } 1063 } 1064 1065 if (mVideoRenderer != NULL) { 1066 LOGV("mVideoRenderer CALL render()"); 1067 mVideoRenderer->render(); 1068 } 1069 1070 if (mLastVideoBuffer) { 1071 mLastVideoBuffer->release(); 1072 mLastVideoBuffer = NULL; 1073 } 1074 1075 mLastVideoBuffer = mVideoBuffer; 1076 mVideoBuffer = NULL; 1077 1078 // Post progress callback based on callback interval set 1079 if(mNumberDecVideoFrames >= mProgressCbInterval) { 1080 postProgressCallbackEvent_l(); 1081 mNumberDecVideoFrames = 0; // reset counter 1082 } 1083 1084 // if reached EndCutTime of clip, post EOS event 1085 if((timeUs/1000) >= mPlayEndTimeMsec) { 1086 LOGV("PreviewPlayer: onVideoEvent EOS."); 1087 mFlags |= VIDEO_AT_EOS; 1088 mFlags |= AUDIO_AT_EOS; 1089 mOverlayUpdateEventPosted = false; 1090 postStreamDoneEvent_l(ERROR_END_OF_STREAM); 1091 } 1092 else { 1093 if(!mIsVideoSourceJpg) { 1094 postVideoEvent_l(0); 1095 } 1096 else { 1097 postVideoEvent_l(33000); 1098 } 1099 } 1100} 1101 1102status_t PreviewPlayer::prepare() { 1103 Mutex::Autolock autoLock(mLock); 1104 return prepare_l(); 1105} 1106 1107status_t PreviewPlayer::prepare_l() { 1108 if (mFlags & PREPARED) { 1109 return OK; 1110 } 1111 1112 if (mFlags & PREPARING) { 1113 return UNKNOWN_ERROR; 1114 } 1115 1116 mIsAsyncPrepare = false; 1117 status_t err = prepareAsync_l(); 1118 1119 if (err != OK) { 1120 return err; 1121 } 1122 1123 while (mFlags & PREPARING) { 1124 mPreparedCondition.wait(mLock); 1125 } 1126 1127 return mPrepareResult; 1128} 1129 1130status_t PreviewPlayer::prepareAsync_l() { 1131 if (mFlags & PREPARING) { 1132 return UNKNOWN_ERROR; // async prepare already pending 1133 } 1134 1135 if (!mQueueStarted) { 1136 mQueue.start(); 1137 mQueueStarted = true; 1138 } 1139 1140 mFlags |= PREPARING; 1141 mAsyncPrepareEvent = new PreviewPlayerEvent( 1142 this, &PreviewPlayer::onPrepareAsyncEvent); 1143 1144 mQueue.postEvent(mAsyncPrepareEvent); 1145 1146 return OK; 1147} 1148 1149status_t PreviewPlayer::finishSetDataSource_l() { 1150 sp<DataSource> dataSource; 1151 sp<MediaExtractor> extractor; 1152 1153 dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders); 1154 1155 if (dataSource == NULL) { 1156 return UNKNOWN_ERROR; 1157 } 1158 1159 //If file type is .rgb, then no need to check for Extractor 1160 int uriLen = strlen(mUri); 1161 int startOffset = uriLen - 4; 1162 if(!strncasecmp(mUri+startOffset, ".rgb", 4)) { 1163 extractor = NULL; 1164 } 1165 else { 1166 extractor = MediaExtractor::Create(dataSource, 1167 MEDIA_MIMETYPE_CONTAINER_MPEG4); 1168 } 1169 1170 if (extractor == NULL) { 1171 LOGV("PreviewPlayer::finishSetDataSource_l extractor == NULL"); 1172 return setDataSource_l_jpg(); 1173 } 1174 1175 return setDataSource_l(extractor); 1176} 1177 1178 1179// static 1180bool PreviewPlayer::ContinuePreparation(void *cookie) { 1181 PreviewPlayer *me = static_cast<PreviewPlayer *>(cookie); 1182 1183 return (me->mFlags & PREPARE_CANCELLED) == 0; 1184} 1185 1186void PreviewPlayer::onPrepareAsyncEvent() { 1187 Mutex::Autolock autoLock(mLock); 1188 LOGV("onPrepareAsyncEvent"); 1189 1190 if (mFlags & PREPARE_CANCELLED) { 1191 LOGV("LV PLAYER prepare was cancelled before doing anything"); 1192 abortPrepare(UNKNOWN_ERROR); 1193 return; 1194 } 1195 1196 if (mUri.size() > 0) { 1197 status_t err = finishSetDataSource_l(); 1198 1199 if (err != OK) { 1200 abortPrepare(err); 1201 return; 1202 } 1203 } 1204 1205 if (mVideoTrack != NULL && mVideoSource == NULL) { 1206 status_t err = initVideoDecoder(OMXCodec::kHardwareCodecsOnly); 1207 1208 if (err != OK) { 1209 abortPrepare(err); 1210 return; 1211 } 1212 } 1213 1214 if (mAudioTrack != NULL && mAudioSource == NULL) { 1215 status_t err = initAudioDecoder(); 1216 1217 if (err != OK) { 1218 abortPrepare(err); 1219 return; 1220 } 1221 } 1222 finishAsyncPrepare_l(); 1223 1224} 1225 1226void PreviewPlayer::finishAsyncPrepare_l() { 1227 if (mIsAsyncPrepare) { 1228 if (mVideoSource == NULL) { 1229 LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE 0 0 "); 1230 notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0); 1231 } else { 1232 LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE"); 1233 notifyVideoSize_l(); 1234 } 1235 LOGV("finishAsyncPrepare_l: MEDIA_PREPARED"); 1236 notifyListener_l(MEDIA_PREPARED); 1237 } 1238 1239 mPrepareResult = OK; 1240 mFlags &= ~(PREPARING|PREPARE_CANCELLED); 1241 mFlags |= PREPARED; 1242 mAsyncPrepareEvent = NULL; 1243 mPreparedCondition.broadcast(); 1244} 1245 1246status_t PreviewPlayer::suspend() { 1247 LOGV("suspend"); 1248 Mutex::Autolock autoLock(mLock); 1249 1250 if (mSuspensionState != NULL) { 1251 if (mLastVideoBuffer == NULL) { 1252 //go into here if video is suspended again 1253 //after resuming without being played between 1254 //them 1255 SuspensionState *state = mSuspensionState; 1256 mSuspensionState = NULL; 1257 reset_l(); 1258 mSuspensionState = state; 1259 return OK; 1260 } 1261 1262 delete mSuspensionState; 1263 mSuspensionState = NULL; 1264 } 1265 1266 if (mFlags & PREPARING) { 1267 mFlags |= PREPARE_CANCELLED; 1268 } 1269 1270 while (mFlags & PREPARING) { 1271 mPreparedCondition.wait(mLock); 1272 } 1273 1274 SuspensionState *state = new SuspensionState; 1275 state->mUri = mUri; 1276 state->mUriHeaders = mUriHeaders; 1277 state->mFileSource = mFileSource; 1278 1279 state->mFlags = mFlags & (PLAYING | AUTO_LOOPING | LOOPING | AT_EOS); 1280 getPosition(&state->mPositionUs); 1281 1282 if (mLastVideoBuffer) { 1283 size_t size = mLastVideoBuffer->range_length(); 1284 if (size) { 1285 int32_t unreadable; 1286 if (!mLastVideoBuffer->meta_data()->findInt32( 1287 kKeyIsUnreadable, &unreadable) 1288 || unreadable == 0) { 1289 state->mLastVideoFrameSize = size; 1290 state->mLastVideoFrame = malloc(size); 1291 memcpy(state->mLastVideoFrame, 1292 (const uint8_t *)mLastVideoBuffer->data() 1293 + mLastVideoBuffer->range_offset(), 1294 size); 1295 1296 state->mVideoWidth = mVideoWidth; 1297 state->mVideoHeight = mVideoHeight; 1298 1299 sp<MetaData> meta = mVideoSource->getFormat(); 1300 CHECK(meta->findInt32(kKeyColorFormat, &state->mColorFormat)); 1301 CHECK(meta->findInt32(kKeyWidth, &state->mDecodedWidth)); 1302 CHECK(meta->findInt32(kKeyHeight, &state->mDecodedHeight)); 1303 } else { 1304 LOGV("Unable to save last video frame, we have no access to " 1305 "the decoded video data."); 1306 } 1307 } 1308 } 1309 1310 reset_l(); 1311 1312 mSuspensionState = state; 1313 1314 return OK; 1315} 1316 1317status_t PreviewPlayer::resume() { 1318 LOGV("resume"); 1319 Mutex::Autolock autoLock(mLock); 1320 1321 if (mSuspensionState == NULL) { 1322 return INVALID_OPERATION; 1323 } 1324 1325 SuspensionState *state = mSuspensionState; 1326 mSuspensionState = NULL; 1327 1328 status_t err; 1329 if (state->mFileSource != NULL) { 1330 err = AwesomePlayer::setDataSource_l(state->mFileSource); 1331 1332 if (err == OK) { 1333 mFileSource = state->mFileSource; 1334 } 1335 } else { 1336 err = AwesomePlayer::setDataSource_l(state->mUri, &state->mUriHeaders); 1337 } 1338 1339 if (err != OK) { 1340 delete state; 1341 state = NULL; 1342 1343 return err; 1344 } 1345 1346 seekTo_l(state->mPositionUs); 1347 1348 mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS); 1349 1350 if (state->mLastVideoFrame && (mSurface != NULL || mISurface != NULL)) { 1351 mVideoRenderer = 1352 PreviewLocalRenderer::initPreviewLocalRenderer( 1353 true, // previewOnly 1354 (OMX_COLOR_FORMATTYPE)state->mColorFormat, 1355 mSurface, 1356 state->mVideoWidth, 1357 state->mVideoHeight, 1358 state->mDecodedWidth, 1359 state->mDecodedHeight); 1360 1361 mVideoRendererIsPreview = true; 1362 1363 ((PreviewLocalRenderer *)mVideoRenderer.get())->render( 1364 state->mLastVideoFrame, state->mLastVideoFrameSize); 1365 } 1366 1367 if (state->mFlags & PLAYING) { 1368 play_l(); 1369 } 1370 1371 mSuspensionState = state; 1372 state = NULL; 1373 1374 return OK; 1375} 1376 1377 1378status_t PreviewPlayer::loadEffectsSettings( 1379 M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) { 1380 M4OSA_UInt32 i = 0, rgbSize = 0; 1381 M4VIFI_UInt8 *tmp = M4OSA_NULL; 1382 1383 mNumberEffects = nEffects; 1384 mEffectsSettings = pEffectSettings; 1385 return OK; 1386} 1387 1388status_t PreviewPlayer::loadAudioMixSettings( 1389 M4xVSS_AudioMixingSettings* pAudioMixSettings) { 1390 1391 LOGV("PreviewPlayer: loadAudioMixSettings: "); 1392 mPreviewPlayerAudioMixSettings = pAudioMixSettings; 1393 return OK; 1394} 1395 1396status_t PreviewPlayer::setAudioMixPCMFileHandle( 1397 M4OSA_Context pAudioMixPCMFileHandle) { 1398 1399 LOGV("PreviewPlayer: setAudioMixPCMFileHandle: "); 1400 mAudioMixPCMFileHandle = pAudioMixPCMFileHandle; 1401 return OK; 1402} 1403 1404status_t PreviewPlayer::setAudioMixStoryBoardParam( 1405 M4OSA_UInt32 audioMixStoryBoardTS, 1406 M4OSA_UInt32 currentMediaBeginCutTime, 1407 M4OSA_UInt32 primaryTrackVolValue ) { 1408 1409 mAudioMixStoryBoardTS = audioMixStoryBoardTS; 1410 mCurrentMediaBeginCutTime = currentMediaBeginCutTime; 1411 mCurrentMediaVolumeValue = primaryTrackVolValue; 1412 return OK; 1413} 1414 1415status_t PreviewPlayer::setPlaybackBeginTime(uint32_t msec) { 1416 1417 mPlayBeginTimeMsec = msec; 1418 return OK; 1419} 1420 1421status_t PreviewPlayer::setPlaybackEndTime(uint32_t msec) { 1422 1423 mPlayEndTimeMsec = msec; 1424 return OK; 1425} 1426 1427status_t PreviewPlayer::setStoryboardStartTime(uint32_t msec) { 1428 1429 mStoryboardStartTimeMsec = msec; 1430 mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000; 1431 return OK; 1432} 1433 1434status_t PreviewPlayer::setProgressCallbackInterval(uint32_t cbInterval) { 1435 1436 mProgressCbInterval = cbInterval; 1437 return OK; 1438} 1439 1440 1441status_t PreviewPlayer::setMediaRenderingMode( 1442 M4xVSS_MediaRendering mode, 1443 M4VIDEOEDITING_VideoFrameSize outputVideoSize) { 1444 1445 mRenderingMode = mode; 1446 1447 /* reset boolean for each clip*/ 1448 mVideoResizedOrCropped = false; 1449 1450 switch(outputVideoSize) { 1451 case M4VIDEOEDITING_kSQCIF: 1452 mOutputVideoWidth = 128; 1453 mOutputVideoHeight = 96; 1454 break; 1455 1456 case M4VIDEOEDITING_kQQVGA: 1457 mOutputVideoWidth = 160; 1458 mOutputVideoHeight = 120; 1459 break; 1460 1461 case M4VIDEOEDITING_kQCIF: 1462 mOutputVideoWidth = 176; 1463 mOutputVideoHeight = 144; 1464 break; 1465 1466 case M4VIDEOEDITING_kQVGA: 1467 mOutputVideoWidth = 320; 1468 mOutputVideoHeight = 240; 1469 break; 1470 1471 case M4VIDEOEDITING_kCIF: 1472 mOutputVideoWidth = 352; 1473 mOutputVideoHeight = 288; 1474 break; 1475 1476 case M4VIDEOEDITING_kVGA: 1477 mOutputVideoWidth = 640; 1478 mOutputVideoHeight = 480; 1479 break; 1480 1481 case M4VIDEOEDITING_kWVGA: 1482 mOutputVideoWidth = 800; 1483 mOutputVideoHeight = 480; 1484 break; 1485 1486 case M4VIDEOEDITING_kNTSC: 1487 mOutputVideoWidth = 720; 1488 mOutputVideoHeight = 480; 1489 break; 1490 1491 case M4VIDEOEDITING_k640_360: 1492 mOutputVideoWidth = 640; 1493 mOutputVideoHeight = 360; 1494 break; 1495 1496 case M4VIDEOEDITING_k854_480: 1497 mOutputVideoWidth = 854; 1498 mOutputVideoHeight = 480; 1499 break; 1500 1501 case M4VIDEOEDITING_kHD1280: 1502 mOutputVideoWidth = 1280; 1503 mOutputVideoHeight = 720; 1504 break; 1505 1506 case M4VIDEOEDITING_kHD1080: 1507 mOutputVideoWidth = 1080; 1508 mOutputVideoHeight = 720; 1509 break; 1510 1511 case M4VIDEOEDITING_kHD960: 1512 mOutputVideoWidth = 960; 1513 mOutputVideoHeight = 720; 1514 break; 1515 1516 default: 1517 LOGE("unsupported output video size set"); 1518 return BAD_VALUE; 1519 } 1520 1521 return OK; 1522} 1523 1524M4OSA_ERR PreviewPlayer::doMediaRendering() { 1525 M4OSA_ERR err = M4NO_ERROR; 1526 M4VIFI_ImagePlane planeIn[3], planeOut[3]; 1527 M4VIFI_UInt8 *inBuffer = M4OSA_NULL, *finalOutputBuffer = M4OSA_NULL; 1528 M4VIFI_UInt8 *tempOutputBuffer= M4OSA_NULL; 1529 size_t videoBufferSize = 0; 1530 M4OSA_UInt32 frameSize = 0, i=0, index =0, nFrameCount =0, bufferOffset =0; 1531 int32_t colorFormat = 0; 1532 1533 if(!mIsVideoSourceJpg) { 1534 sp<MetaData> meta = mVideoSource->getFormat(); 1535 CHECK(meta->findInt32(kKeyColorFormat, &colorFormat)); 1536 } 1537 else { 1538 colorFormat = OMX_COLOR_FormatYUV420Planar; 1539 } 1540 1541 videoBufferSize = mVideoBuffer->size(); 1542 frameSize = (mVideoWidth*mVideoHeight*3) >> 1; 1543 1544 uint8_t* outBuffer; 1545 size_t outBufferStride = 0; 1546 1547 mVideoRenderer->getBuffer(&outBuffer, &outBufferStride); 1548 1549 bufferOffset = index*frameSize; 1550 inBuffer = (M4OSA_UInt8 *)mVideoBuffer->data()+ 1551 mVideoBuffer->range_offset()+bufferOffset; 1552 1553 1554 /* In plane*/ 1555 prepareYUV420ImagePlane(planeIn, mVideoWidth, 1556 mVideoHeight, (M4VIFI_UInt8 *)inBuffer, mReportedWidth, mReportedHeight); 1557 1558 // Set the output YUV420 plane to be compatible with YV12 format 1559 // W & H even 1560 // YVU instead of YUV 1561 // align buffers on 32 bits 1562 1563 //In YV12 format, sizes must be even 1564 M4OSA_UInt32 yv12PlaneWidth = ((mOutputVideoWidth +1)>>1)<<1; 1565 M4OSA_UInt32 yv12PlaneHeight = ((mOutputVideoHeight+1)>>1)<<1; 1566 1567 prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight, 1568 (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer); 1569 1570 1571 err = applyRenderingMode(planeIn, planeOut, mRenderingMode); 1572 1573 if(err != M4NO_ERROR) 1574 { 1575 LOGE("doMediaRendering: applyRenderingMode returned err=0x%x", err); 1576 return err; 1577 } 1578 mVideoResizedOrCropped = true; 1579 1580 return err; 1581} 1582 1583status_t PreviewPlayer::resetJniCallbackTimeStamp() { 1584 1585 mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000; 1586 return OK; 1587} 1588 1589void PreviewPlayer::postProgressCallbackEvent_l() { 1590 if (mProgressCbEventPending) { 1591 return; 1592 } 1593 mProgressCbEventPending = true; 1594 1595 mQueue.postEvent(mProgressCbEvent); 1596} 1597 1598 1599void PreviewPlayer::onProgressCbEvent() { 1600 Mutex::Autolock autoLock(mLock); 1601 if (!mProgressCbEventPending) { 1602 return; 1603 } 1604 mProgressCbEventPending = false; 1605 // If playback starts from previous I-frame, 1606 // then send frame storyboard duration 1607 if((mDecodedVideoTs/1000) < mPlayBeginTimeMsec) { 1608 notifyListener_l(MEDIA_INFO, 0, mDecVideoTsStoryBoard/1000); 1609 } 1610 else { 1611 notifyListener_l(MEDIA_INFO, 0, 1612 (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec)); 1613 } 1614} 1615 1616void PreviewPlayer::postOverlayUpdateEvent_l() { 1617 if (mOverlayUpdateEventPending) { 1618 return; 1619 } 1620 mOverlayUpdateEventPending = true; 1621 mQueue.postEvent(mOverlayUpdateEvent); 1622} 1623 1624void PreviewPlayer::onUpdateOverlayEvent() { 1625 Mutex::Autolock autoLock(mLock); 1626 1627 if (!mOverlayUpdateEventPending) { 1628 return; 1629 } 1630 mOverlayUpdateEventPending = false; 1631 1632 int updateState; 1633 if (mOverlayUpdateEventPosted) { 1634 updateState = 1; 1635 } else { 1636 updateState = 0; 1637 } 1638 notifyListener_l(0xBBBBBBBB, updateState, mCurrFramingEffectIndex); 1639} 1640 1641 1642void PreviewPlayer::setVideoPostProcessingNode( 1643 M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable) { 1644 1645 uint32_t effect = VIDEO_EFFECT_NONE; 1646 1647 //Map M4VSS3GPP_VideoEffectType to local enum 1648 switch(type) { 1649 case M4VSS3GPP_kVideoEffectType_FadeFromBlack: 1650 effect = VIDEO_EFFECT_FADEFROMBLACK; 1651 break; 1652 1653 case M4VSS3GPP_kVideoEffectType_FadeToBlack: 1654 effect = VIDEO_EFFECT_FADETOBLACK; 1655 break; 1656 1657 case M4VSS3GPP_kVideoEffectType_CurtainOpening: 1658 effect = VIDEO_EFFECT_CURTAINOPEN; 1659 break; 1660 1661 case M4VSS3GPP_kVideoEffectType_CurtainClosing: 1662 effect = VIDEO_EFFECT_CURTAINCLOSE; 1663 break; 1664 1665 case M4xVSS_kVideoEffectType_BlackAndWhite: 1666 effect = VIDEO_EFFECT_BLACKANDWHITE; 1667 break; 1668 1669 case M4xVSS_kVideoEffectType_Pink: 1670 effect = VIDEO_EFFECT_PINK; 1671 break; 1672 1673 case M4xVSS_kVideoEffectType_Green: 1674 effect = VIDEO_EFFECT_GREEN; 1675 break; 1676 1677 case M4xVSS_kVideoEffectType_Sepia: 1678 effect = VIDEO_EFFECT_SEPIA; 1679 break; 1680 1681 case M4xVSS_kVideoEffectType_Negative: 1682 effect = VIDEO_EFFECT_NEGATIVE; 1683 break; 1684 1685 case M4xVSS_kVideoEffectType_Framing: 1686 effect = VIDEO_EFFECT_FRAMING; 1687 break; 1688 1689 case M4xVSS_kVideoEffectType_Fifties: 1690 effect = VIDEO_EFFECT_FIFTIES; 1691 break; 1692 1693 case M4xVSS_kVideoEffectType_ColorRGB16: 1694 effect = VIDEO_EFFECT_COLOR_RGB16; 1695 break; 1696 1697 case M4xVSS_kVideoEffectType_Gradient: 1698 effect = VIDEO_EFFECT_GRADIENT; 1699 break; 1700 1701 default: 1702 effect = VIDEO_EFFECT_NONE; 1703 break; 1704 } 1705 1706 if(enable == M4OSA_TRUE) { 1707 //If already set, then no need to set again 1708 if(!(mCurrentVideoEffect & effect)) { 1709 mCurrentVideoEffect |= effect; 1710 if(effect == VIDEO_EFFECT_FIFTIES) { 1711 mIsFiftiesEffectStarted = true; 1712 } 1713 } 1714 } 1715 else { 1716 //Reset only if already set 1717 if(mCurrentVideoEffect & effect) { 1718 mCurrentVideoEffect &= ~effect; 1719 } 1720 } 1721} 1722 1723status_t PreviewPlayer::setImageClipProperties(uint32_t width,uint32_t height) { 1724 mVideoWidth = width; 1725 mVideoHeight = height; 1726 return OK; 1727} 1728 1729 1730M4OSA_ERR PreviewPlayer::doVideoPostProcessing() { 1731 M4OSA_ERR err = M4NO_ERROR; 1732 vePostProcessParams postProcessParams; 1733 int32_t colorFormat = 0; 1734 1735 1736 if(!mIsVideoSourceJpg) { 1737 sp<MetaData> meta = mVideoSource->getFormat(); 1738 CHECK(meta->findInt32(kKeyColorFormat, &colorFormat)); 1739 } 1740 else { 1741 colorFormat = OMX_COLOR_FormatYUV420Planar; 1742 } 1743 1744 if((colorFormat == OMX_COLOR_FormatYUV420SemiPlanar) || 1745 (colorFormat == 0x7FA30C00)) { 1746 LOGE("doVideoPostProcessing: colorFormat YUV420Sp not supported"); 1747 return M4ERR_UNSUPPORTED_MEDIA_TYPE; 1748 } 1749 1750 postProcessParams.vidBuffer = (M4VIFI_UInt8*)mVideoBuffer->data() 1751 + mVideoBuffer->range_offset(); 1752 1753 postProcessParams.videoWidth = mVideoWidth; 1754 postProcessParams.videoHeight = mVideoHeight; 1755 postProcessParams.timeMs = mDecodedVideoTs/1000; 1756 postProcessParams.timeOffset = mDecVideoTsStoryBoard/1000; 1757 postProcessParams.effectsSettings = mEffectsSettings; 1758 postProcessParams.numberEffects = mNumberEffects; 1759 postProcessParams.outVideoWidth = mOutputVideoWidth; 1760 postProcessParams.outVideoHeight = mOutputVideoHeight; 1761 postProcessParams.currentVideoEffect = mCurrentVideoEffect; 1762 postProcessParams.renderingMode = mRenderingMode; 1763 if(mIsFiftiesEffectStarted == M4OSA_TRUE) { 1764 postProcessParams.isFiftiesEffectStarted = M4OSA_TRUE; 1765 mIsFiftiesEffectStarted = M4OSA_FALSE; 1766 } 1767 else { 1768 postProcessParams.isFiftiesEffectStarted = M4OSA_FALSE; 1769 } 1770 1771 postProcessParams.overlayFrameRGBBuffer = mFrameRGBBuffer; 1772 postProcessParams.overlayFrameYUVBuffer = mFrameYUVBuffer; 1773 mVideoRenderer->getBuffer(&(postProcessParams.pOutBuffer), &(postProcessParams.outBufferStride)); 1774 err = applyEffectsAndRenderingMode(&postProcessParams, mReportedWidth, mReportedHeight); 1775 1776 return err; 1777} 1778 1779status_t PreviewPlayer::readFirstVideoFrame() { 1780 LOGV("PreviewPlayer::readFirstVideoFrame"); 1781 1782 if (mFlags & SEEK_PREVIEW) { 1783 mFlags &= ~SEEK_PREVIEW; 1784 return OK; 1785 } 1786 1787 if (!mVideoBuffer) { 1788 MediaSource::ReadOptions options; 1789 if (mSeeking) { 1790 LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs, 1791 mSeekTimeUs / 1E6); 1792 1793 options.setSeekTo( 1794 mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST); 1795 } 1796 for (;;) { 1797 status_t err = mVideoSource->read(&mVideoBuffer, &options); 1798 options.clearSeekTo(); 1799 1800 if (err != OK) { 1801 CHECK_EQ(mVideoBuffer, NULL); 1802 1803 if (err == INFO_FORMAT_CHANGED) { 1804 LOGV("LV PLAYER VideoSource signalled format change"); 1805 notifyVideoSize_l(); 1806 sp<MetaData> meta = mVideoSource->getFormat(); 1807 1808 CHECK(meta->findInt32(kKeyWidth, &mReportedWidth)); 1809 CHECK(meta->findInt32(kKeyHeight, &mReportedHeight)); 1810 1811 if (mVideoRenderer != NULL) { 1812 mVideoRendererIsPreview = false; 1813 err = initRenderer_l(); 1814 if ( err != OK ) 1815 postStreamDoneEvent_l(err); // santosh 1816 } 1817 continue; 1818 } 1819 LOGV("PreviewPlayer: onVideoEvent EOS reached."); 1820 mFlags |= VIDEO_AT_EOS; 1821 postStreamDoneEvent_l(err); 1822 return OK; 1823 } 1824 1825 if (mVideoBuffer->range_length() == 0) { 1826 // Some decoders, notably the PV AVC software decoder 1827 // return spurious empty buffers that we just want to ignore. 1828 1829 mVideoBuffer->release(); 1830 mVideoBuffer = NULL; 1831 continue; 1832 } 1833 1834 int64_t videoTimeUs; 1835 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs)); 1836 1837 if((videoTimeUs/1000) < mPlayBeginTimeMsec) { 1838 // buffers are before begin cut time 1839 // ignore them 1840 mVideoBuffer->release(); 1841 mVideoBuffer = NULL; 1842 continue; 1843 } 1844 1845 break; 1846 } 1847 } 1848 1849 int64_t timeUs; 1850 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs)); 1851 1852 { 1853 Mutex::Autolock autoLock(mMiscStateLock); 1854 mVideoTimeUs = timeUs; 1855 } 1856 1857 mDecodedVideoTs = timeUs; 1858 1859 return OK; 1860 1861} 1862 1863} // namespace android 1864