PreviewPlayer.cpp revision a5a42c5ceb98942909d84b20d44a920eed85a8cf
1/*
2 * Copyright (C) 2011 NXP Software
3 * Copyright (C) 2011 The Android Open Source Project
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 *      http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18
19#define LOG_NDEBUG 1
20#define LOG_TAG "PreviewPlayer"
21#include <utils/Log.h>
22
23#include <dlfcn.h>
24
25#include "include/ARTSPController.h"
26#include "PreviewPlayer.h"
27#include "DummyAudioSource.h"
28#include "DummyVideoSource.h"
29#include "VideoEditorSRC.h"
30#include "include/NuCachedSource2.h"
31#include "include/ThrottledSource.h"
32
33
34#include "PreviewRenderer.h"
35
36#include <binder/IPCThreadState.h>
37#include <media/stagefright/DataSource.h>
38#include <media/stagefright/FileSource.h>
39#include <media/stagefright/MediaBuffer.h>
40#include <media/stagefright/MediaDefs.h>
41#include <media/stagefright/MediaExtractor.h>
42#include <media/stagefright/MediaDebug.h>
43#include <media/stagefright/MediaSource.h>
44#include <media/stagefright/MetaData.h>
45#include <media/stagefright/OMXCodec.h>
46
47#include <surfaceflinger/Surface.h>
48#include <media/stagefright/foundation/ALooper.h>
49
50namespace android {
51
52
53struct PreviewPlayerEvent : public TimedEventQueue::Event {
54    PreviewPlayerEvent(
55            PreviewPlayer *player,
56            void (PreviewPlayer::*method)())
57        : mPlayer(player),
58          mMethod(method) {
59    }
60
61protected:
62    virtual ~PreviewPlayerEvent() {}
63
64    virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) {
65        (mPlayer->*mMethod)();
66    }
67
68private:
69    PreviewPlayer *mPlayer;
70    void (PreviewPlayer::*mMethod)();
71
72    PreviewPlayerEvent(const PreviewPlayerEvent &);
73    PreviewPlayerEvent &operator=(const PreviewPlayerEvent &);
74};
75
76
77struct PreviewLocalRenderer : public PreviewPlayerRenderer {
78
79    static PreviewLocalRenderer* initPreviewLocalRenderer (
80            bool previewOnly,
81            OMX_COLOR_FORMATTYPE colorFormat,
82            const sp<Surface> &surface,
83            size_t displayWidth, size_t displayHeight,
84            size_t decodedWidth, size_t decodedHeight,
85            int32_t rotationDegrees = 0)
86    {
87        PreviewLocalRenderer* mLocalRenderer = new
88            PreviewLocalRenderer(
89                previewOnly,
90                colorFormat,
91                surface,
92                displayWidth, displayHeight,
93                decodedWidth, decodedHeight,
94                rotationDegrees);
95
96        if ( mLocalRenderer->init(previewOnly,
97                 colorFormat, surface,
98                 displayWidth, displayHeight,
99                 decodedWidth, decodedHeight,
100                 rotationDegrees) != OK )
101        {
102            delete mLocalRenderer;
103            return NULL;
104        }
105        return mLocalRenderer;
106    }
107
108    virtual void render(MediaBuffer *buffer) {
109        render((const uint8_t *)buffer->data() + buffer->range_offset(),
110               buffer->range_length());
111    }
112
113    void render(const void *data, size_t size) {
114        mTarget->render(data, size, NULL);
115    }
116    void render() {
117        mTarget->renderYV12();
118    }
119    void getBuffer(uint8_t **data, size_t *stride) {
120        mTarget->getBufferYV12(data, stride);
121    }
122
123protected:
124    virtual ~PreviewLocalRenderer() {
125        delete mTarget;
126        mTarget = NULL;
127    }
128
129private:
130    PreviewRenderer *mTarget;
131
132    PreviewLocalRenderer(
133            bool previewOnly,
134            OMX_COLOR_FORMATTYPE colorFormat,
135            const sp<Surface> &surface,
136            size_t displayWidth, size_t displayHeight,
137            size_t decodedWidth, size_t decodedHeight,
138            int32_t rotationDegrees = 0)
139        : mTarget(NULL) {
140    }
141
142
143    int init(
144            bool previewOnly,
145            OMX_COLOR_FORMATTYPE colorFormat,
146            const sp<Surface> &surface,
147            size_t displayWidth, size_t displayHeight,
148            size_t decodedWidth, size_t decodedHeight,
149            int32_t rotationDegrees = 0);
150
151    PreviewLocalRenderer(const PreviewLocalRenderer &);
152    PreviewLocalRenderer &operator=(const PreviewLocalRenderer &);;
153};
154
155int PreviewLocalRenderer::init(
156        bool previewOnly,
157        OMX_COLOR_FORMATTYPE colorFormat,
158        const sp<Surface> &surface,
159        size_t displayWidth, size_t displayHeight,
160        size_t decodedWidth, size_t decodedHeight,
161        int32_t rotationDegrees) {
162
163    mTarget = PreviewRenderer::CreatePreviewRenderer (
164            colorFormat, surface, displayWidth, displayHeight,
165            decodedWidth, decodedHeight, rotationDegrees);
166    if (mTarget == M4OSA_NULL) {
167        return UNKNOWN_ERROR;
168    }
169    return OK;
170}
171
172PreviewPlayer::PreviewPlayer()
173    : AwesomePlayer(),
174      mFrameRGBBuffer(NULL),
175      mFrameYUVBuffer(NULL),
176      mReportedWidth(0),
177      mReportedHeight(0),
178      mCurrFramingEffectIndex(0) {
179
180    mVideoRenderer = NULL;
181    mLastVideoBuffer = NULL;
182    mSuspensionState = NULL;
183    mEffectsSettings = NULL;
184    mVeAudioPlayer = NULL;
185    mAudioMixStoryBoardTS = 0;
186    mCurrentMediaBeginCutTime = 0;
187    mCurrentMediaVolumeValue = 0;
188    mNumberEffects = 0;
189    mDecodedVideoTs = 0;
190    mDecVideoTsStoryBoard = 0;
191    mCurrentVideoEffect = VIDEO_EFFECT_NONE;
192    mProgressCbInterval = 0;
193    mNumberDecVideoFrames = 0;
194    mOverlayUpdateEventPosted = false;
195
196    mVideoEvent = new PreviewPlayerEvent(this, &PreviewPlayer::onVideoEvent);
197    mVideoEventPending = false;
198    mStreamDoneEvent = new PreviewPlayerEvent(this,
199         &AwesomePlayer::onStreamDone);
200
201    mStreamDoneEventPending = false;
202
203    mCheckAudioStatusEvent = new PreviewPlayerEvent(
204        this, &AwesomePlayer::onCheckAudioStatus);
205
206    mAudioStatusEventPending = false;
207
208    mProgressCbEvent = new PreviewPlayerEvent(this,
209         &PreviewPlayer::onProgressCbEvent);
210
211    mOverlayUpdateEvent = new PreviewPlayerEvent(this,
212        &PreviewPlayer::onUpdateOverlayEvent);
213    mProgressCbEventPending = false;
214
215    mOverlayUpdateEventPending = false;
216    mResizedVideoBuffer = NULL;
217    mVideoResizedOrCropped = false;
218    mRenderingMode = (M4xVSS_MediaRendering)MEDIA_RENDERING_INVALID;
219    mIsFiftiesEffectStarted = false;
220    reset();
221}
222
223PreviewPlayer::~PreviewPlayer() {
224
225    if (mQueueStarted) {
226        mQueue.stop();
227    }
228
229    reset();
230
231    if(mResizedVideoBuffer != NULL) {
232        M4OSA_free((M4OSA_MemAddr32)(mResizedVideoBuffer->data()));
233        mResizedVideoBuffer = NULL;
234    }
235
236    mVideoRenderer.clear();
237    mVideoRenderer = NULL;
238}
239
240void PreviewPlayer::cancelPlayerEvents(bool keepBufferingGoing) {
241    mQueue.cancelEvent(mVideoEvent->eventID());
242    mVideoEventPending = false;
243    mQueue.cancelEvent(mStreamDoneEvent->eventID());
244    mStreamDoneEventPending = false;
245    mQueue.cancelEvent(mCheckAudioStatusEvent->eventID());
246    mAudioStatusEventPending = false;
247
248    mQueue.cancelEvent(mProgressCbEvent->eventID());
249    mProgressCbEventPending = false;
250}
251
252status_t PreviewPlayer::setDataSource(
253        const char *uri, const KeyedVector<String8, String8> *headers) {
254    Mutex::Autolock autoLock(mLock);
255    return setDataSource_l(uri, headers);
256}
257
258status_t PreviewPlayer::setDataSource_l(
259        const char *uri, const KeyedVector<String8, String8> *headers) {
260    reset_l();
261
262    mUri = uri;
263
264    if (headers) {
265        mUriHeaders = *headers;
266    }
267
268    // The actual work will be done during preparation in the call to
269    // ::finishSetDataSource_l to avoid blocking the calling thread in
270    // setDataSource for any significant time.
271    return OK;
272}
273
274status_t PreviewPlayer::setDataSource_l(const sp<MediaExtractor> &extractor) {
275    bool haveAudio = false;
276    bool haveVideo = false;
277    for (size_t i = 0; i < extractor->countTracks(); ++i) {
278        sp<MetaData> meta = extractor->getTrackMetaData(i);
279
280        const char *mime;
281        CHECK(meta->findCString(kKeyMIMEType, &mime));
282
283        if (!haveVideo && !strncasecmp(mime, "video/", 6)) {
284            setVideoSource(extractor->getTrack(i));
285            haveVideo = true;
286        } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) {
287            setAudioSource(extractor->getTrack(i));
288            haveAudio = true;
289
290            if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
291                // Only do this for vorbis audio, none of the other audio
292                // formats even support this ringtone specific hack and
293                // retrieving the metadata on some extractors may turn out
294                // to be very expensive.
295                sp<MetaData> fileMeta = extractor->getMetaData();
296                int32_t loop;
297                if (fileMeta != NULL
298                        && fileMeta->findInt32(kKeyAutoLoop, &loop)
299                         && loop != 0) {
300                    mFlags |= AUTO_LOOPING;
301                }
302            }
303        }
304
305        if (haveAudio && haveVideo) {
306            break;
307        }
308    }
309
310    /* Add the support for Dummy audio*/
311    if( !haveAudio ){
312        LOGV("PreviewPlayer: setDataSource_l Dummyaudiocreation started");
313
314        mAudioTrack = DummyAudioSource::Create(32000, 2, 20000,
315                                              ((mPlayEndTimeMsec)*1000));
316        LOGV("PreviewPlayer: setDataSource_l Dummyauiosource created");
317        if(mAudioTrack != NULL) {
318            haveAudio = true;
319        }
320    }
321
322    if (!haveAudio && !haveVideo) {
323        return UNKNOWN_ERROR;
324    }
325
326    mExtractorFlags = extractor->flags();
327    return OK;
328}
329
330status_t PreviewPlayer::setDataSource_l_jpg() {
331    M4OSA_ERR err = M4NO_ERROR;
332    LOGV("PreviewPlayer: setDataSource_l_jpg started");
333
334    mAudioSource = DummyAudioSource::Create(32000, 2, 20000,
335                                          ((mPlayEndTimeMsec)*1000));
336    LOGV("PreviewPlayer: setDataSource_l_jpg Dummyaudiosource created");
337    if(mAudioSource != NULL) {
338        setAudioSource(mAudioSource);
339    }
340    status_t error = mAudioSource->start();
341    if (error != OK) {
342        LOGV("Error starting dummy audio source");
343        mAudioSource.clear();
344        return err;
345    }
346
347    mDurationUs = (mPlayEndTimeMsec - mPlayBeginTimeMsec)*1000;
348
349    mVideoSource = DummyVideoSource::Create(mVideoWidth, mVideoHeight,
350                                            mDurationUs, mUri);
351    mReportedWidth = mVideoWidth;
352    mReportedHeight = mVideoHeight;
353
354    setVideoSource(mVideoSource);
355    status_t err1 = mVideoSource->start();
356    if (err1 != OK) {
357        mVideoSource.clear();
358        return err;
359    }
360
361    mIsVideoSourceJpg = true;
362    return OK;
363}
364
365void PreviewPlayer::reset() {
366    Mutex::Autolock autoLock(mLock);
367    reset_l();
368}
369
370void PreviewPlayer::reset_l() {
371
372    if (mFlags & PREPARING) {
373        mFlags |= PREPARE_CANCELLED;
374    }
375
376    while (mFlags & PREPARING) {
377        mPreparedCondition.wait(mLock);
378    }
379
380    cancelPlayerEvents();
381    mAudioTrack.clear();
382    mVideoTrack.clear();
383
384    // Shutdown audio first, so that the respone to the reset request
385    // appears to happen instantaneously as far as the user is concerned
386    // If we did this later, audio would continue playing while we
387    // shutdown the video-related resources and the player appear to
388    // not be as responsive to a reset request.
389    if (mAudioPlayer == NULL && mAudioSource != NULL) {
390        // If we had an audio player, it would have effectively
391        // taken possession of the audio source and stopped it when
392        // _it_ is stopped. Otherwise this is still our responsibility.
393        mAudioSource->stop();
394    }
395    mAudioSource.clear();
396
397    mTimeSource = NULL;
398
399    delete mAudioPlayer;
400    mAudioPlayer = NULL;
401
402    if (mLastVideoBuffer) {
403        mLastVideoBuffer->release();
404        mLastVideoBuffer = NULL;
405    }
406
407    if (mVideoBuffer) {
408        mVideoBuffer->release();
409        mVideoBuffer = NULL;
410    }
411
412    if (mVideoSource != NULL) {
413        mVideoSource->stop();
414
415        // The following hack is necessary to ensure that the OMX
416        // component is completely released by the time we may try
417        // to instantiate it again.
418        wp<MediaSource> tmp = mVideoSource;
419        mVideoSource.clear();
420        while (tmp.promote() != NULL) {
421            usleep(1000);
422        }
423        IPCThreadState::self()->flushCommands();
424    }
425
426    mDurationUs = -1;
427    mFlags = 0;
428    mExtractorFlags = 0;
429    mVideoWidth = mVideoHeight = -1;
430    mTimeSourceDeltaUs = 0;
431    mVideoTimeUs = 0;
432
433    mSeeking = false;
434    mSeekNotificationSent = false;
435    mSeekTimeUs = 0;
436
437    mUri.setTo("");
438    mUriHeaders.clear();
439
440    mFileSource.clear();
441
442    delete mSuspensionState;
443    mSuspensionState = NULL;
444
445    mCurrentVideoEffect = VIDEO_EFFECT_NONE;
446    mIsVideoSourceJpg = false;
447    mFrameRGBBuffer = NULL;
448    if(mFrameYUVBuffer != NULL) {
449        M4OSA_free((M4OSA_MemAddr32)mFrameYUVBuffer);
450        mFrameYUVBuffer = NULL;
451    }
452}
453
454status_t PreviewPlayer::play() {
455    Mutex::Autolock autoLock(mLock);
456
457    mFlags &= ~CACHE_UNDERRUN;
458
459    return play_l();
460}
461
462status_t PreviewPlayer::startAudioPlayer_l() {
463    CHECK(!(mFlags & AUDIO_RUNNING));
464
465    if (mAudioSource == NULL || mAudioPlayer == NULL) {
466        return OK;
467    }
468
469    if (!(mFlags & AUDIOPLAYER_STARTED)) {
470        mFlags |= AUDIOPLAYER_STARTED;
471
472        // We've already started the MediaSource in order to enable
473        // the prefetcher to read its data.
474        status_t err = mVeAudioPlayer->start(
475                true /* sourceAlreadyStarted */);
476
477        if (err != OK) {
478            notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
479            return err;
480        }
481    } else {
482        mVeAudioPlayer->resume();
483    }
484
485    mFlags |= AUDIO_RUNNING;
486
487    mWatchForAudioEOS = true;
488
489    return OK;
490}
491
492status_t PreviewPlayer::play_l() {
493
494    if (mFlags & PLAYING) {
495        return OK;
496    }
497    mStartNextPlayer = false;
498
499    if (!(mFlags & PREPARED)) {
500        status_t err = prepare_l();
501
502        if (err != OK) {
503            return err;
504        }
505    }
506
507    mFlags |= PLAYING;
508    mFlags |= FIRST_FRAME;
509
510    bool deferredAudioSeek = false;
511
512    if (mAudioSource != NULL) {
513        if (mAudioPlayer == NULL) {
514            if (mAudioSink != NULL) {
515
516                mAudioPlayer = new VideoEditorAudioPlayer(mAudioSink, this);
517                mVeAudioPlayer =
518                          (VideoEditorAudioPlayer*)mAudioPlayer;
519
520                mAudioPlayer->setSource(mAudioSource);
521
522                mVeAudioPlayer->setAudioMixSettings(
523                 mPreviewPlayerAudioMixSettings);
524
525                mVeAudioPlayer->setAudioMixPCMFileHandle(
526                 mAudioMixPCMFileHandle);
527
528                mVeAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
529                 mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
530                 mCurrentMediaVolumeValue);
531
532                mTimeSource = mVeAudioPlayer; //mAudioPlayer;
533
534                deferredAudioSeek = true;
535                mWatchForAudioSeekComplete = false;
536                mWatchForAudioEOS = true;
537            }
538         }
539
540        CHECK(!(mFlags & AUDIO_RUNNING));
541
542        if (mVideoSource == NULL) {
543            status_t err = startAudioPlayer_l();
544
545            if (err != OK) {
546                delete mAudioPlayer;
547                mAudioPlayer = NULL;
548                mFlags &= ~(PLAYING | FIRST_FRAME);
549                return err;
550            }
551        }
552    }
553
554    if (mTimeSource == NULL && mAudioPlayer == NULL) {
555        mTimeSource = &mSystemTimeSource;
556    }
557
558    // Set the seek option for Image source files and read.
559    // This resets the timestamping for image play
560    if (mIsVideoSourceJpg) {
561        MediaSource::ReadOptions options;
562        MediaBuffer *aLocalBuffer;
563        options.setSeekTo(mSeekTimeUs);
564        mVideoSource->read(&aLocalBuffer, &options);
565        aLocalBuffer->release();
566    }
567
568    if (mVideoSource != NULL) {
569        // Kick off video playback
570        postVideoEvent_l();
571    }
572
573    if (deferredAudioSeek) {
574        // If there was a seek request while we were paused
575        // and we're just starting up again, honor the request now.
576        seekAudioIfNecessary_l();
577    }
578
579    if (mFlags & AT_EOS) {
580        // Legacy behaviour, if a stream finishes playing and then
581        // is started again, we play from the start...
582        seekTo_l(0);
583    }
584
585    return OK;
586}
587
588
589status_t PreviewPlayer::initRenderer_l() {
590    if (mSurface != NULL || mISurface != NULL) {
591        sp<MetaData> meta = mVideoSource->getFormat();
592
593        int32_t format;
594        const char *component;
595        int32_t decodedWidth, decodedHeight;
596        CHECK(meta->findInt32(kKeyColorFormat, &format));
597        CHECK(meta->findCString(kKeyDecoderComponent, &component));
598        CHECK(meta->findInt32(kKeyWidth, &decodedWidth));
599        CHECK(meta->findInt32(kKeyHeight, &decodedHeight));
600
601        // Must ensure that mVideoRenderer's destructor is actually executed
602        // before creating a new one.
603        IPCThreadState::self()->flushCommands();
604
605        // always use localrenderer since decoded buffers are modified
606        // by postprocessing module
607        // Other decoders are instantiated locally and as a consequence
608        // allocate their buffers in local address space.
609        if(mVideoRenderer == NULL) {
610
611            mVideoRenderer = PreviewLocalRenderer:: initPreviewLocalRenderer (
612                false,  // previewOnly
613                (OMX_COLOR_FORMATTYPE)format,
614                mSurface,
615                mOutputVideoWidth, mOutputVideoHeight,
616                mOutputVideoWidth, mOutputVideoHeight);
617
618            if ( mVideoRenderer == NULL )
619            {
620                return UNKNOWN_ERROR;
621            }
622            return OK;
623        }
624    }
625    return OK;
626}
627
628
629void PreviewPlayer::setISurface(const sp<ISurface> &isurface) {
630    Mutex::Autolock autoLock(mLock);
631    mISurface = isurface;
632}
633
634
635status_t PreviewPlayer::seekTo(int64_t timeUs) {
636
637    if ((mExtractorFlags & MediaExtractor::CAN_SEEK) || (mIsVideoSourceJpg)) {
638        Mutex::Autolock autoLock(mLock);
639        return seekTo_l(timeUs);
640    }
641
642    return OK;
643}
644
645
646status_t PreviewPlayer::getVideoDimensions(
647        int32_t *width, int32_t *height) const {
648    Mutex::Autolock autoLock(mLock);
649
650    if (mVideoWidth < 0 || mVideoHeight < 0) {
651        return UNKNOWN_ERROR;
652    }
653
654    *width = mVideoWidth;
655    *height = mVideoHeight;
656
657    return OK;
658}
659
660
661status_t PreviewPlayer::initAudioDecoder() {
662    sp<MetaData> meta = mAudioTrack->getFormat();
663    const char *mime;
664    CHECK(meta->findCString(kKeyMIMEType, &mime));
665
666    if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
667        mAudioSource = mAudioTrack;
668    } else {
669        sp<MediaSource> aRawSource;
670        aRawSource = OMXCodec::Create(
671                mClient.interface(), mAudioTrack->getFormat(),
672                false, // createEncoder
673                mAudioTrack);
674
675        if(aRawSource != NULL) {
676            LOGV("initAudioDecoder: new VideoEditorSRC");
677            mAudioSource = new VideoEditorSRC(aRawSource);
678        }
679    }
680
681    if (mAudioSource != NULL) {
682        int64_t durationUs;
683        if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
684            Mutex::Autolock autoLock(mMiscStateLock);
685            if (mDurationUs < 0 || durationUs > mDurationUs) {
686                mDurationUs = durationUs;
687            }
688        }
689        status_t err = mAudioSource->start();
690
691        if (err != OK) {
692            mAudioSource.clear();
693            return err;
694        }
695    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) {
696        // For legacy reasons we're simply going to ignore the absence
697        // of an audio decoder for QCELP instead of aborting playback
698        // altogether.
699        return OK;
700    }
701
702    return mAudioSource != NULL ? OK : UNKNOWN_ERROR;
703}
704
705
706status_t PreviewPlayer::initVideoDecoder(uint32_t flags) {
707
708    mVideoSource = OMXCodec::Create(
709            mClient.interface(), mVideoTrack->getFormat(),
710            false,
711            mVideoTrack,
712            NULL, flags);
713
714    if (mVideoSource != NULL) {
715        int64_t durationUs;
716        if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
717            Mutex::Autolock autoLock(mMiscStateLock);
718            if (mDurationUs < 0 || durationUs > mDurationUs) {
719                mDurationUs = durationUs;
720            }
721        }
722
723        CHECK(mVideoTrack->getFormat()->findInt32(kKeyWidth, &mVideoWidth));
724        CHECK(mVideoTrack->getFormat()->findInt32(kKeyHeight, &mVideoHeight));
725
726        mReportedWidth = mVideoWidth;
727        mReportedHeight = mVideoHeight;
728
729        status_t err = mVideoSource->start();
730
731        if (err != OK) {
732            mVideoSource.clear();
733            return err;
734        }
735    }
736
737    return mVideoSource != NULL ? OK : UNKNOWN_ERROR;
738}
739
740
741void PreviewPlayer::onVideoEvent() {
742    uint32_t i=0;
743    bool bAppliedVideoEffect = false;
744    M4OSA_ERR err1 = M4NO_ERROR;
745    int64_t imageFrameTimeUs = 0;
746
747    Mutex::Autolock autoLock(mLock);
748    if (!mVideoEventPending) {
749        // The event has been cancelled in reset_l() but had already
750        // been scheduled for execution at that time.
751        return;
752    }
753    mVideoEventPending = false;
754
755    if (mFlags & SEEK_PREVIEW) {
756        mFlags &= ~SEEK_PREVIEW;
757        return;
758    }
759
760    TimeSource *ts_st =  &mSystemTimeSource;
761    int64_t timeStartUs = ts_st->getRealTimeUs();
762
763    if (mSeeking) {
764        if (mLastVideoBuffer) {
765            mLastVideoBuffer->release();
766            mLastVideoBuffer = NULL;
767        }
768
769
770        if(mAudioSource != NULL) {
771
772            // We're going to seek the video source first, followed by
773            // the audio source.
774            // In order to avoid jumps in the DataSource offset caused by
775            // the audio codec prefetching data from the old locations
776            // while the video codec is already reading data from the new
777            // locations, we'll "pause" the audio source, causing it to
778            // stop reading input data until a subsequent seek.
779
780            if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) {
781                mAudioPlayer->pause();
782                mFlags &= ~AUDIO_RUNNING;
783            }
784            mAudioSource->pause();
785        }
786    }
787
788    if (!mVideoBuffer) {
789        MediaSource::ReadOptions options;
790        if (mSeeking) {
791            LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
792                                                      mSeekTimeUs / 1E6);
793
794            options.setSeekTo(
795                    mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
796        }
797        for (;;) {
798            status_t err = mVideoSource->read(&mVideoBuffer, &options);
799            options.clearSeekTo();
800
801            if (err != OK) {
802                CHECK_EQ(mVideoBuffer, NULL);
803
804                if (err == INFO_FORMAT_CHANGED) {
805                    LOGV("LV PLAYER VideoSource signalled format change");
806                    notifyVideoSize_l();
807                    sp<MetaData> meta = mVideoSource->getFormat();
808
809                    CHECK(meta->findInt32(kKeyWidth, &mReportedWidth));
810                    CHECK(meta->findInt32(kKeyHeight, &mReportedHeight));
811                    if (mVideoRenderer != NULL) {
812                        mVideoRendererIsPreview = false;
813                        err = initRenderer_l();
814                        if (err != OK) {
815                            postStreamDoneEvent_l(err);
816                        }
817
818                    }
819                    continue;
820                }
821                // So video playback is complete, but we may still have
822                // a seek request pending that needs to be applied to the audio track
823                if (mSeeking) {
824                    LOGV("video stream ended while seeking!");
825                }
826                finishSeekIfNecessary(-1);
827                LOGV("PreviewPlayer: onVideoEvent EOS reached.");
828                mFlags |= VIDEO_AT_EOS;
829                mOverlayUpdateEventPosted = false;
830                postStreamDoneEvent_l(err);
831                return;
832            }
833
834            if (mVideoBuffer->range_length() == 0) {
835                // Some decoders, notably the PV AVC software decoder
836                // return spurious empty buffers that we just want to ignore.
837
838                mVideoBuffer->release();
839                mVideoBuffer = NULL;
840                continue;
841            }
842
843            int64_t videoTimeUs;
844            CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
845
846            if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
847                // Frames are before begin cut time
848                // Donot render
849                mVideoBuffer->release();
850                mVideoBuffer = NULL;
851                continue;
852            }
853
854            break;
855        }
856    }
857
858    mNumberDecVideoFrames++;
859
860    int64_t timeUs;
861    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
862
863    {
864        Mutex::Autolock autoLock(mMiscStateLock);
865        mVideoTimeUs = timeUs;
866    }
867
868    mDecodedVideoTs = timeUs;
869
870    if(!mStartNextPlayer) {
871        int64_t playbackTimeRemaining = (mPlayEndTimeMsec*1000) - timeUs;
872        if(playbackTimeRemaining <= 1500000) {
873            //When less than 1.5 sec of playback left
874            // send notification to start next player
875
876            mStartNextPlayer = true;
877            notifyListener_l(0xAAAAAAAA);
878        }
879    }
880
881    bool wasSeeking = mSeeking;
882    finishSeekIfNecessary(timeUs);
883    if (mAudioPlayer != NULL && !(mFlags & (AUDIO_RUNNING))) {
884        status_t err = startAudioPlayer_l();
885        if (err != OK) {
886            LOGE("Starting the audio player failed w/ err %d", err);
887            return;
888        }
889    }
890
891    TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
892
893    if(ts == NULL) {
894        mVideoBuffer->release();
895        mVideoBuffer = NULL;
896        return;
897    }
898
899    if(!mIsVideoSourceJpg) {
900        if (mFlags & FIRST_FRAME) {
901            mFlags &= ~FIRST_FRAME;
902
903            mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs;
904        }
905
906        int64_t realTimeUs, mediaTimeUs;
907        if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
908            && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {
909            mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
910        }
911
912        int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
913
914        int64_t latenessUs = nowUs - timeUs;
915
916        if (wasSeeking) {
917            // Let's display the first frame after seeking right away.
918            latenessUs = 0;
919        }
920        LOGV("Audio time stamp = %lld and video time stamp = %lld",
921                                            ts->getRealTimeUs(),timeUs);
922        if (latenessUs > 40000) {
923            // We're more than 40ms late.
924
925            LOGV("LV PLAYER we're late by %lld us (%.2f secs)",
926                                           latenessUs, latenessUs / 1E6);
927
928            mVideoBuffer->release();
929            mVideoBuffer = NULL;
930            postVideoEvent_l(0);
931            return;
932        }
933
934        if (latenessUs < -25000) {
935            // We're more than 25ms early.
936            LOGV("We're more than 25ms early, lateness %lld", latenessUs);
937
938            postVideoEvent_l(25000);
939            return;
940        }
941    }
942
943    if (mVideoRendererIsPreview || mVideoRenderer == NULL) {
944        mVideoRendererIsPreview = false;
945
946        status_t err = initRenderer_l();
947        if (err != OK) {
948            postStreamDoneEvent_l(err);
949        }
950    }
951
952    // If timestamp exceeds endCutTime of clip, donot render
953    if((timeUs/1000) > mPlayEndTimeMsec) {
954        if (mLastVideoBuffer) {
955            mLastVideoBuffer->release();
956            mLastVideoBuffer = NULL;
957        }
958        mLastVideoBuffer = mVideoBuffer;
959        mVideoBuffer = NULL;
960        mFlags |= VIDEO_AT_EOS;
961        mFlags |= AUDIO_AT_EOS;
962        LOGV("PreviewPlayer: onVideoEvent timeUs > mPlayEndTime; send EOS..");
963        mOverlayUpdateEventPosted = false;
964        postStreamDoneEvent_l(ERROR_END_OF_STREAM);
965        return;
966    }
967
968    // Post processing to apply video effects
969    for(i=0;i<mNumberEffects;i++) {
970        // First check if effect starttime matches the clip being previewed
971        if((mEffectsSettings[i].uiStartTime < (mDecVideoTsStoryBoard/1000)) ||
972        (mEffectsSettings[i].uiStartTime >=
973         ((mDecVideoTsStoryBoard/1000) + mPlayEndTimeMsec - mPlayBeginTimeMsec)))
974        {
975            // This effect doesn't belong to this clip, check next one
976            continue;
977        }
978        // Check if effect applies to this particular frame timestamp
979        if((mEffectsSettings[i].uiStartTime <=
980         (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec)) &&
981            ((mEffectsSettings[i].uiStartTime+mEffectsSettings[i].uiDuration) >=
982             (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec))
983              && (mEffectsSettings[i].uiDuration != 0)) {
984            setVideoPostProcessingNode(
985             mEffectsSettings[i].VideoEffectType, TRUE);
986        }
987        else {
988            setVideoPostProcessingNode(
989             mEffectsSettings[i].VideoEffectType, FALSE);
990        }
991    }
992
993    //Provide the overlay Update indication when there is an overlay effect
994    if (mCurrentVideoEffect & VIDEO_EFFECT_FRAMING) {
995        mCurrentVideoEffect &= ~VIDEO_EFFECT_FRAMING; //never apply framing here.
996        if (!mOverlayUpdateEventPosted) {
997            // Find the effect in effectSettings array
998            int index;
999            for (index = 0; index < mNumberEffects; index++) {
1000                M4OSA_UInt32 timeMs = mDecodedVideoTs/1000;
1001                M4OSA_UInt32 timeOffset = mDecVideoTsStoryBoard/1000;
1002                if(mEffectsSettings[index].VideoEffectType ==
1003                    M4xVSS_kVideoEffectType_Framing) {
1004                    if (((mEffectsSettings[index].uiStartTime + 1) <=
1005                        timeMs + timeOffset - mPlayBeginTimeMsec) &&
1006                        ((mEffectsSettings[index].uiStartTime - 1 +
1007                        mEffectsSettings[index].uiDuration) >=
1008                        timeMs + timeOffset - mPlayBeginTimeMsec))
1009                    {
1010                        break;
1011                    }
1012                }
1013            }
1014            if (index < mNumberEffects) {
1015                mCurrFramingEffectIndex = index;
1016                mOverlayUpdateEventPosted = true;
1017                postOverlayUpdateEvent_l();
1018                LOGV("Framing index = %d", mCurrFramingEffectIndex);
1019            } else {
1020                LOGV("No framing effects found");
1021            }
1022        }
1023
1024    } else if (mOverlayUpdateEventPosted) {
1025        //Post the event when the overlay is no more valid
1026        LOGV("Overlay is Done");
1027        mOverlayUpdateEventPosted = false;
1028        postOverlayUpdateEvent_l();
1029    }
1030
1031
1032    if (mCurrentVideoEffect != VIDEO_EFFECT_NONE) {
1033        err1 = doVideoPostProcessing();
1034        if(err1 != M4NO_ERROR) {
1035            LOGE("doVideoPostProcessing returned err");
1036            bAppliedVideoEffect = false;
1037        }
1038        else {
1039            bAppliedVideoEffect = true;
1040        }
1041    }
1042    else {
1043        bAppliedVideoEffect = false;
1044        if(mRenderingMode != MEDIA_RENDERING_INVALID) {
1045            // No effects to be applied, but media rendering to be done
1046            err1 = doMediaRendering();
1047            if(err1 != M4NO_ERROR) {
1048                LOGE("doMediaRendering returned err");
1049                //Use original mVideoBuffer for rendering
1050                mVideoResizedOrCropped = false;
1051            }
1052        }
1053    }
1054
1055    if (mVideoRenderer != NULL) {
1056        LOGV("mVideoRenderer CALL render()");
1057        mVideoRenderer->render();
1058    }
1059
1060    if (mLastVideoBuffer) {
1061        mLastVideoBuffer->release();
1062        mLastVideoBuffer = NULL;
1063    }
1064
1065    mLastVideoBuffer = mVideoBuffer;
1066    mVideoBuffer = NULL;
1067
1068    // Post progress callback based on callback interval set
1069    if(mNumberDecVideoFrames >= mProgressCbInterval) {
1070        postProgressCallbackEvent_l();
1071        mNumberDecVideoFrames = 0;  // reset counter
1072    }
1073
1074    // if reached EndCutTime of clip, post EOS event
1075    if((timeUs/1000) >= mPlayEndTimeMsec) {
1076        LOGV("PreviewPlayer: onVideoEvent EOS.");
1077        mFlags |= VIDEO_AT_EOS;
1078        mFlags |= AUDIO_AT_EOS;
1079        mOverlayUpdateEventPosted = false;
1080        postStreamDoneEvent_l(ERROR_END_OF_STREAM);
1081    }
1082    else {
1083        if(!mIsVideoSourceJpg) {
1084            postVideoEvent_l(0);
1085        }
1086        else {
1087            postVideoEvent_l(33000);
1088        }
1089    }
1090}
1091
1092status_t PreviewPlayer::prepare() {
1093    Mutex::Autolock autoLock(mLock);
1094    return prepare_l();
1095}
1096
1097status_t PreviewPlayer::prepare_l() {
1098    if (mFlags & PREPARED) {
1099        return OK;
1100    }
1101
1102    if (mFlags & PREPARING) {
1103        return UNKNOWN_ERROR;
1104    }
1105
1106    mIsAsyncPrepare = false;
1107    status_t err = prepareAsync_l();
1108
1109    if (err != OK) {
1110        return err;
1111    }
1112
1113    while (mFlags & PREPARING) {
1114        mPreparedCondition.wait(mLock);
1115    }
1116
1117    return mPrepareResult;
1118}
1119
1120status_t PreviewPlayer::prepareAsync_l() {
1121    if (mFlags & PREPARING) {
1122        return UNKNOWN_ERROR;  // async prepare already pending
1123    }
1124
1125    if (!mQueueStarted) {
1126        mQueue.start();
1127        mQueueStarted = true;
1128    }
1129
1130    mFlags |= PREPARING;
1131    mAsyncPrepareEvent = new PreviewPlayerEvent(
1132            this, &PreviewPlayer::onPrepareAsyncEvent);
1133
1134    mQueue.postEvent(mAsyncPrepareEvent);
1135
1136    return OK;
1137}
1138
1139status_t PreviewPlayer::finishSetDataSource_l() {
1140    sp<DataSource> dataSource;
1141    sp<MediaExtractor> extractor;
1142
1143    dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders);
1144
1145    if (dataSource == NULL) {
1146        return UNKNOWN_ERROR;
1147    }
1148
1149    //If file type is .rgb, then no need to check for Extractor
1150    int uriLen = strlen(mUri);
1151    int startOffset = uriLen - 4;
1152    if(!strncasecmp(mUri+startOffset, ".rgb", 4)) {
1153        extractor = NULL;
1154    }
1155    else {
1156        extractor = MediaExtractor::Create(dataSource,
1157                                        MEDIA_MIMETYPE_CONTAINER_MPEG4);
1158    }
1159
1160    if (extractor == NULL) {
1161        LOGV("PreviewPlayer::finishSetDataSource_l  extractor == NULL");
1162        return setDataSource_l_jpg();
1163    }
1164
1165    return setDataSource_l(extractor);
1166}
1167
1168
1169// static
1170bool PreviewPlayer::ContinuePreparation(void *cookie) {
1171    PreviewPlayer *me = static_cast<PreviewPlayer *>(cookie);
1172
1173    return (me->mFlags & PREPARE_CANCELLED) == 0;
1174}
1175
1176void PreviewPlayer::onPrepareAsyncEvent() {
1177    Mutex::Autolock autoLock(mLock);
1178    LOGV("onPrepareAsyncEvent");
1179
1180    if (mFlags & PREPARE_CANCELLED) {
1181        LOGV("LV PLAYER prepare was cancelled before doing anything");
1182        abortPrepare(UNKNOWN_ERROR);
1183        return;
1184    }
1185
1186    if (mUri.size() > 0) {
1187        status_t err = finishSetDataSource_l();
1188
1189        if (err != OK) {
1190            abortPrepare(err);
1191            return;
1192        }
1193    }
1194
1195    if (mVideoTrack != NULL && mVideoSource == NULL) {
1196        status_t err = initVideoDecoder(OMXCodec::kHardwareCodecsOnly);
1197
1198        if (err != OK) {
1199            abortPrepare(err);
1200            return;
1201        }
1202    }
1203
1204    if (mAudioTrack != NULL && mAudioSource == NULL) {
1205        status_t err = initAudioDecoder();
1206
1207        if (err != OK) {
1208            abortPrepare(err);
1209            return;
1210        }
1211    }
1212    finishAsyncPrepare_l();
1213
1214}
1215
1216void PreviewPlayer::finishAsyncPrepare_l() {
1217    if (mIsAsyncPrepare) {
1218        if (mVideoSource == NULL) {
1219            LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE 0 0 ");
1220            notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0);
1221        } else {
1222            LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE");
1223            notifyVideoSize_l();
1224        }
1225        LOGV("finishAsyncPrepare_l: MEDIA_PREPARED");
1226        notifyListener_l(MEDIA_PREPARED);
1227    }
1228
1229    mPrepareResult = OK;
1230    mFlags &= ~(PREPARING|PREPARE_CANCELLED);
1231    mFlags |= PREPARED;
1232    mAsyncPrepareEvent = NULL;
1233    mPreparedCondition.broadcast();
1234}
1235
1236status_t PreviewPlayer::suspend() {
1237    LOGV("suspend");
1238    Mutex::Autolock autoLock(mLock);
1239
1240    if (mSuspensionState != NULL) {
1241        if (mLastVideoBuffer == NULL) {
1242            //go into here if video is suspended again
1243            //after resuming without being played between
1244            //them
1245            SuspensionState *state = mSuspensionState;
1246            mSuspensionState = NULL;
1247            reset_l();
1248            mSuspensionState = state;
1249            return OK;
1250        }
1251
1252        delete mSuspensionState;
1253        mSuspensionState = NULL;
1254    }
1255
1256    if (mFlags & PREPARING) {
1257        mFlags |= PREPARE_CANCELLED;
1258    }
1259
1260    while (mFlags & PREPARING) {
1261        mPreparedCondition.wait(mLock);
1262    }
1263
1264    SuspensionState *state = new SuspensionState;
1265    state->mUri = mUri;
1266    state->mUriHeaders = mUriHeaders;
1267    state->mFileSource = mFileSource;
1268
1269    state->mFlags = mFlags & (PLAYING | AUTO_LOOPING | LOOPING | AT_EOS);
1270    getPosition(&state->mPositionUs);
1271
1272    if (mLastVideoBuffer) {
1273        size_t size = mLastVideoBuffer->range_length();
1274        if (size) {
1275            int32_t unreadable;
1276            if (!mLastVideoBuffer->meta_data()->findInt32(
1277                        kKeyIsUnreadable, &unreadable)
1278                    || unreadable == 0) {
1279                state->mLastVideoFrameSize = size;
1280                state->mLastVideoFrame = malloc(size);
1281                memcpy(state->mLastVideoFrame,
1282                   (const uint8_t *)mLastVideoBuffer->data()
1283                        + mLastVideoBuffer->range_offset(),
1284                   size);
1285
1286                state->mVideoWidth = mVideoWidth;
1287                state->mVideoHeight = mVideoHeight;
1288
1289                sp<MetaData> meta = mVideoSource->getFormat();
1290                CHECK(meta->findInt32(kKeyColorFormat, &state->mColorFormat));
1291                CHECK(meta->findInt32(kKeyWidth, &state->mDecodedWidth));
1292                CHECK(meta->findInt32(kKeyHeight, &state->mDecodedHeight));
1293            } else {
1294                LOGV("Unable to save last video frame, we have no access to "
1295                     "the decoded video data.");
1296            }
1297        }
1298    }
1299
1300    reset_l();
1301
1302    mSuspensionState = state;
1303
1304    return OK;
1305}
1306
1307status_t PreviewPlayer::resume() {
1308    LOGV("resume");
1309    Mutex::Autolock autoLock(mLock);
1310
1311    if (mSuspensionState == NULL) {
1312        return INVALID_OPERATION;
1313    }
1314
1315    SuspensionState *state = mSuspensionState;
1316    mSuspensionState = NULL;
1317
1318    status_t err;
1319    if (state->mFileSource != NULL) {
1320        err = AwesomePlayer::setDataSource_l(state->mFileSource);
1321
1322        if (err == OK) {
1323            mFileSource = state->mFileSource;
1324        }
1325    } else {
1326        err = AwesomePlayer::setDataSource_l(state->mUri, &state->mUriHeaders);
1327    }
1328
1329    if (err != OK) {
1330        delete state;
1331        state = NULL;
1332
1333        return err;
1334    }
1335
1336    seekTo_l(state->mPositionUs);
1337
1338    mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS);
1339
1340    if (state->mLastVideoFrame && (mSurface != NULL || mISurface != NULL)) {
1341        mVideoRenderer =
1342            PreviewLocalRenderer::initPreviewLocalRenderer(
1343                    true,  // previewOnly
1344                    (OMX_COLOR_FORMATTYPE)state->mColorFormat,
1345                    mSurface,
1346                    state->mVideoWidth,
1347                    state->mVideoHeight,
1348                    state->mDecodedWidth,
1349                    state->mDecodedHeight);
1350
1351        mVideoRendererIsPreview = true;
1352
1353        ((PreviewLocalRenderer *)mVideoRenderer.get())->render(
1354                state->mLastVideoFrame, state->mLastVideoFrameSize);
1355    }
1356
1357    if (state->mFlags & PLAYING) {
1358        play_l();
1359    }
1360
1361    mSuspensionState = state;
1362    state = NULL;
1363
1364    return OK;
1365}
1366
1367
1368status_t PreviewPlayer::loadEffectsSettings(
1369                    M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) {
1370    M4OSA_UInt32 i = 0, rgbSize = 0;
1371    M4VIFI_UInt8 *tmp = M4OSA_NULL;
1372
1373    mNumberEffects = nEffects;
1374    mEffectsSettings = pEffectSettings;
1375    return OK;
1376}
1377
1378status_t PreviewPlayer::loadAudioMixSettings(
1379                    M4xVSS_AudioMixingSettings* pAudioMixSettings) {
1380
1381    LOGV("PreviewPlayer: loadAudioMixSettings: ");
1382    mPreviewPlayerAudioMixSettings = pAudioMixSettings;
1383    return OK;
1384}
1385
1386status_t PreviewPlayer::setAudioMixPCMFileHandle(
1387                    M4OSA_Context pAudioMixPCMFileHandle) {
1388
1389    LOGV("PreviewPlayer: setAudioMixPCMFileHandle: ");
1390    mAudioMixPCMFileHandle = pAudioMixPCMFileHandle;
1391    return OK;
1392}
1393
1394status_t PreviewPlayer::setAudioMixStoryBoardParam(
1395                    M4OSA_UInt32 audioMixStoryBoardTS,
1396                    M4OSA_UInt32 currentMediaBeginCutTime,
1397                    M4OSA_UInt32 primaryTrackVolValue ) {
1398
1399    mAudioMixStoryBoardTS = audioMixStoryBoardTS;
1400    mCurrentMediaBeginCutTime = currentMediaBeginCutTime;
1401    mCurrentMediaVolumeValue = primaryTrackVolValue;
1402    return OK;
1403}
1404
1405status_t PreviewPlayer::setPlaybackBeginTime(uint32_t msec) {
1406
1407    mPlayBeginTimeMsec = msec;
1408    return OK;
1409}
1410
1411status_t PreviewPlayer::setPlaybackEndTime(uint32_t msec) {
1412
1413    mPlayEndTimeMsec = msec;
1414    return OK;
1415}
1416
1417status_t PreviewPlayer::setStoryboardStartTime(uint32_t msec) {
1418
1419    mStoryboardStartTimeMsec = msec;
1420    mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000;
1421    return OK;
1422}
1423
1424status_t PreviewPlayer::setProgressCallbackInterval(uint32_t cbInterval) {
1425
1426    mProgressCbInterval = cbInterval;
1427    return OK;
1428}
1429
1430
1431status_t PreviewPlayer::setMediaRenderingMode(
1432        M4xVSS_MediaRendering mode,
1433        M4VIDEOEDITING_VideoFrameSize outputVideoSize) {
1434
1435    mRenderingMode = mode;
1436
1437    /* reset boolean for each clip*/
1438    mVideoResizedOrCropped = false;
1439
1440    switch(outputVideoSize) {
1441        case M4VIDEOEDITING_kSQCIF:
1442            mOutputVideoWidth = 128;
1443            mOutputVideoHeight = 96;
1444            break;
1445
1446        case M4VIDEOEDITING_kQQVGA:
1447            mOutputVideoWidth = 160;
1448            mOutputVideoHeight = 120;
1449            break;
1450
1451        case M4VIDEOEDITING_kQCIF:
1452            mOutputVideoWidth = 176;
1453            mOutputVideoHeight = 144;
1454            break;
1455
1456        case M4VIDEOEDITING_kQVGA:
1457            mOutputVideoWidth = 320;
1458            mOutputVideoHeight = 240;
1459            break;
1460
1461        case M4VIDEOEDITING_kCIF:
1462            mOutputVideoWidth = 352;
1463            mOutputVideoHeight = 288;
1464            break;
1465
1466        case M4VIDEOEDITING_kVGA:
1467            mOutputVideoWidth = 640;
1468            mOutputVideoHeight = 480;
1469            break;
1470
1471        case M4VIDEOEDITING_kWVGA:
1472            mOutputVideoWidth = 800;
1473            mOutputVideoHeight = 480;
1474            break;
1475
1476        case M4VIDEOEDITING_kNTSC:
1477            mOutputVideoWidth = 720;
1478            mOutputVideoHeight = 480;
1479            break;
1480
1481        case M4VIDEOEDITING_k640_360:
1482            mOutputVideoWidth = 640;
1483            mOutputVideoHeight = 360;
1484            break;
1485
1486        case M4VIDEOEDITING_k854_480:
1487            mOutputVideoWidth = 854;
1488            mOutputVideoHeight = 480;
1489            break;
1490
1491        case M4VIDEOEDITING_kHD1280:
1492            mOutputVideoWidth = 1280;
1493            mOutputVideoHeight = 720;
1494            break;
1495
1496        case M4VIDEOEDITING_kHD1080:
1497            mOutputVideoWidth = 1080;
1498            mOutputVideoHeight = 720;
1499            break;
1500
1501        case M4VIDEOEDITING_kHD960:
1502            mOutputVideoWidth = 960;
1503            mOutputVideoHeight = 720;
1504            break;
1505
1506        default:
1507            LOGE("unsupported output video size set");
1508            return BAD_VALUE;
1509    }
1510
1511    return OK;
1512}
1513
1514M4OSA_ERR PreviewPlayer::doMediaRendering() {
1515    M4OSA_ERR err = M4NO_ERROR;
1516    M4VIFI_ImagePlane planeIn[3], planeOut[3];
1517    M4VIFI_UInt8 *inBuffer = M4OSA_NULL, *finalOutputBuffer = M4OSA_NULL;
1518    M4VIFI_UInt8 *tempOutputBuffer= M4OSA_NULL;
1519    size_t videoBufferSize = 0;
1520    M4OSA_UInt32 frameSize = 0, i=0, index =0, nFrameCount =0, bufferOffset =0;
1521    int32_t colorFormat = 0;
1522
1523    if(!mIsVideoSourceJpg) {
1524        sp<MetaData> meta = mVideoSource->getFormat();
1525        CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
1526    }
1527    else {
1528        colorFormat = OMX_COLOR_FormatYUV420Planar;
1529    }
1530
1531    videoBufferSize = mVideoBuffer->size();
1532    frameSize = (mVideoWidth*mVideoHeight*3) >> 1;
1533
1534    uint8_t* outBuffer;
1535    size_t outBufferStride = 0;
1536
1537    mVideoRenderer->getBuffer(&outBuffer, &outBufferStride);
1538
1539    bufferOffset = index*frameSize;
1540    inBuffer = (M4OSA_UInt8 *)mVideoBuffer->data()+
1541                mVideoBuffer->range_offset()+bufferOffset;
1542
1543
1544    /* In plane*/
1545    prepareYUV420ImagePlane(planeIn, mVideoWidth,
1546      mVideoHeight, (M4VIFI_UInt8 *)inBuffer, mReportedWidth, mReportedHeight);
1547
1548    // Set the output YUV420 plane to be compatible with YV12 format
1549    // W & H even
1550    // YVU instead of YUV
1551    // align buffers on 32 bits
1552
1553    //In YV12 format, sizes must be even
1554    M4OSA_UInt32 yv12PlaneWidth = ((mOutputVideoWidth +1)>>1)<<1;
1555    M4OSA_UInt32 yv12PlaneHeight = ((mOutputVideoHeight+1)>>1)<<1;
1556
1557    prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,
1558     (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer);
1559
1560
1561    err = applyRenderingMode(planeIn, planeOut, mRenderingMode);
1562
1563    if(err != M4NO_ERROR)
1564    {
1565        LOGE("doMediaRendering: applyRenderingMode returned err=0x%x", err);
1566        return err;
1567    }
1568    mVideoResizedOrCropped = true;
1569
1570    return err;
1571}
1572
1573status_t PreviewPlayer::resetJniCallbackTimeStamp() {
1574
1575    mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000;
1576    return OK;
1577}
1578
1579void PreviewPlayer::postProgressCallbackEvent_l() {
1580    if (mProgressCbEventPending) {
1581        return;
1582    }
1583    mProgressCbEventPending = true;
1584
1585    mQueue.postEvent(mProgressCbEvent);
1586}
1587
1588
1589void PreviewPlayer::onProgressCbEvent() {
1590    Mutex::Autolock autoLock(mLock);
1591    if (!mProgressCbEventPending) {
1592        return;
1593    }
1594    mProgressCbEventPending = false;
1595    // If playback starts from previous I-frame,
1596    // then send frame storyboard duration
1597    if((mDecodedVideoTs/1000) < mPlayBeginTimeMsec) {
1598        notifyListener_l(MEDIA_INFO, 0, mDecVideoTsStoryBoard/1000);
1599    }
1600    else {
1601        notifyListener_l(MEDIA_INFO, 0,
1602        (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec));
1603    }
1604}
1605
1606void PreviewPlayer::postOverlayUpdateEvent_l() {
1607    if (mOverlayUpdateEventPending) {
1608        return;
1609    }
1610    mOverlayUpdateEventPending = true;
1611    mQueue.postEvent(mOverlayUpdateEvent);
1612}
1613
1614void PreviewPlayer::onUpdateOverlayEvent() {
1615    Mutex::Autolock autoLock(mLock);
1616
1617    if (!mOverlayUpdateEventPending) {
1618        return;
1619    }
1620    mOverlayUpdateEventPending = false;
1621
1622    int updateState;
1623    if (mOverlayUpdateEventPosted) {
1624        updateState = 1;
1625    } else {
1626        updateState = 0;
1627    }
1628    notifyListener_l(0xBBBBBBBB, updateState, mCurrFramingEffectIndex);
1629}
1630
1631
1632void PreviewPlayer::setVideoPostProcessingNode(
1633                    M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable) {
1634
1635    uint32_t effect = VIDEO_EFFECT_NONE;
1636
1637    //Map M4VSS3GPP_VideoEffectType to local enum
1638    switch(type) {
1639        case M4VSS3GPP_kVideoEffectType_FadeFromBlack:
1640            effect = VIDEO_EFFECT_FADEFROMBLACK;
1641            break;
1642
1643        case M4VSS3GPP_kVideoEffectType_FadeToBlack:
1644            effect = VIDEO_EFFECT_FADETOBLACK;
1645            break;
1646
1647        case M4VSS3GPP_kVideoEffectType_CurtainOpening:
1648            effect = VIDEO_EFFECT_CURTAINOPEN;
1649            break;
1650
1651        case M4VSS3GPP_kVideoEffectType_CurtainClosing:
1652            effect = VIDEO_EFFECT_CURTAINCLOSE;
1653            break;
1654
1655        case M4xVSS_kVideoEffectType_BlackAndWhite:
1656            effect = VIDEO_EFFECT_BLACKANDWHITE;
1657            break;
1658
1659        case M4xVSS_kVideoEffectType_Pink:
1660            effect = VIDEO_EFFECT_PINK;
1661            break;
1662
1663        case M4xVSS_kVideoEffectType_Green:
1664            effect = VIDEO_EFFECT_GREEN;
1665            break;
1666
1667        case M4xVSS_kVideoEffectType_Sepia:
1668            effect = VIDEO_EFFECT_SEPIA;
1669            break;
1670
1671        case M4xVSS_kVideoEffectType_Negative:
1672            effect = VIDEO_EFFECT_NEGATIVE;
1673            break;
1674
1675        case M4xVSS_kVideoEffectType_Framing:
1676            effect = VIDEO_EFFECT_FRAMING;
1677            break;
1678
1679        case M4xVSS_kVideoEffectType_Fifties:
1680            effect = VIDEO_EFFECT_FIFTIES;
1681            break;
1682
1683        case M4xVSS_kVideoEffectType_ColorRGB16:
1684            effect = VIDEO_EFFECT_COLOR_RGB16;
1685            break;
1686
1687        case M4xVSS_kVideoEffectType_Gradient:
1688            effect = VIDEO_EFFECT_GRADIENT;
1689            break;
1690
1691        default:
1692            effect = VIDEO_EFFECT_NONE;
1693            break;
1694    }
1695
1696    if(enable == M4OSA_TRUE) {
1697        //If already set, then no need to set again
1698        if(!(mCurrentVideoEffect & effect)) {
1699            mCurrentVideoEffect |= effect;
1700            if(effect == VIDEO_EFFECT_FIFTIES) {
1701                mIsFiftiesEffectStarted = true;
1702            }
1703        }
1704    }
1705    else  {
1706        //Reset only if already set
1707        if(mCurrentVideoEffect & effect) {
1708            mCurrentVideoEffect &= ~effect;
1709        }
1710    }
1711}
1712
1713status_t PreviewPlayer::setImageClipProperties(uint32_t width,uint32_t height) {
1714    mVideoWidth = width;
1715    mVideoHeight = height;
1716    return OK;
1717}
1718
1719
1720M4OSA_ERR PreviewPlayer::doVideoPostProcessing() {
1721    M4OSA_ERR err = M4NO_ERROR;
1722    vePostProcessParams postProcessParams;
1723    int32_t colorFormat = 0;
1724
1725
1726    if(!mIsVideoSourceJpg) {
1727        sp<MetaData> meta = mVideoSource->getFormat();
1728        CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
1729    }
1730    else {
1731        colorFormat = OMX_COLOR_FormatYUV420Planar;
1732    }
1733
1734    if((colorFormat == OMX_COLOR_FormatYUV420SemiPlanar) ||
1735       (colorFormat == 0x7FA30C00)) {
1736          LOGE("doVideoPostProcessing: colorFormat YUV420Sp not supported");
1737          return M4ERR_UNSUPPORTED_MEDIA_TYPE;
1738    }
1739
1740    postProcessParams.vidBuffer = (M4VIFI_UInt8*)mVideoBuffer->data()
1741        + mVideoBuffer->range_offset();
1742
1743    postProcessParams.videoWidth = mVideoWidth;
1744    postProcessParams.videoHeight = mVideoHeight;
1745    postProcessParams.timeMs = mDecodedVideoTs/1000;
1746    postProcessParams.timeOffset = mDecVideoTsStoryBoard/1000;
1747    postProcessParams.effectsSettings = mEffectsSettings;
1748    postProcessParams.numberEffects = mNumberEffects;
1749    postProcessParams.outVideoWidth = mOutputVideoWidth;
1750    postProcessParams.outVideoHeight = mOutputVideoHeight;
1751    postProcessParams.currentVideoEffect = mCurrentVideoEffect;
1752    postProcessParams.renderingMode = mRenderingMode;
1753    if(mIsFiftiesEffectStarted == M4OSA_TRUE) {
1754        postProcessParams.isFiftiesEffectStarted = M4OSA_TRUE;
1755        mIsFiftiesEffectStarted = M4OSA_FALSE;
1756    }
1757    else {
1758       postProcessParams.isFiftiesEffectStarted = M4OSA_FALSE;
1759    }
1760
1761    postProcessParams.overlayFrameRGBBuffer = mFrameRGBBuffer;
1762    postProcessParams.overlayFrameYUVBuffer = mFrameYUVBuffer;
1763    mVideoRenderer->getBuffer(&(postProcessParams.pOutBuffer), &(postProcessParams.outBufferStride));
1764    err = applyEffectsAndRenderingMode(&postProcessParams, mReportedWidth, mReportedHeight);
1765
1766    return err;
1767}
1768
1769status_t PreviewPlayer::readFirstVideoFrame() {
1770    LOGV("PreviewPlayer::readFirstVideoFrame");
1771
1772    if (mFlags & SEEK_PREVIEW) {
1773        mFlags &= ~SEEK_PREVIEW;
1774        return OK;
1775    }
1776
1777    if (!mVideoBuffer) {
1778        MediaSource::ReadOptions options;
1779        if (mSeeking) {
1780            LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
1781                    mSeekTimeUs / 1E6);
1782
1783            options.setSeekTo(
1784                    mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
1785        }
1786        for (;;) {
1787            status_t err = mVideoSource->read(&mVideoBuffer, &options);
1788            options.clearSeekTo();
1789
1790            if (err != OK) {
1791                CHECK_EQ(mVideoBuffer, NULL);
1792
1793                if (err == INFO_FORMAT_CHANGED) {
1794                    LOGV("LV PLAYER VideoSource signalled format change");
1795                    notifyVideoSize_l();
1796                    sp<MetaData> meta = mVideoSource->getFormat();
1797
1798                    CHECK(meta->findInt32(kKeyWidth, &mReportedWidth));
1799                    CHECK(meta->findInt32(kKeyHeight, &mReportedHeight));
1800
1801                    if (mVideoRenderer != NULL) {
1802                        mVideoRendererIsPreview = false;
1803                        err = initRenderer_l();
1804                        if (err != OK) {
1805                            postStreamDoneEvent_l(err);
1806                        }
1807                    }
1808                    continue;
1809                }
1810                LOGV("PreviewPlayer: onVideoEvent EOS reached.");
1811                mFlags |= VIDEO_AT_EOS;
1812                postStreamDoneEvent_l(err);
1813                return OK;
1814            }
1815
1816            if (mVideoBuffer->range_length() == 0) {
1817                // Some decoders, notably the PV AVC software decoder
1818                // return spurious empty buffers that we just want to ignore.
1819
1820                mVideoBuffer->release();
1821                mVideoBuffer = NULL;
1822                continue;
1823            }
1824
1825            int64_t videoTimeUs;
1826            CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
1827
1828            if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
1829                // buffers are before begin cut time
1830                // ignore them
1831                mVideoBuffer->release();
1832                mVideoBuffer = NULL;
1833                continue;
1834            }
1835
1836            break;
1837        }
1838    }
1839
1840    int64_t timeUs;
1841    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
1842
1843    {
1844        Mutex::Autolock autoLock(mMiscStateLock);
1845        mVideoTimeUs = timeUs;
1846    }
1847
1848    mDecodedVideoTs = timeUs;
1849
1850    return OK;
1851
1852}
1853
1854}  // namespace android
1855