PreviewPlayer.cpp revision 254c8dfe514fd45eef251763639f63615379570f
1/*
2 * Copyright (C) 2011 NXP Software
3 * Copyright (C) 2011 The Android Open Source Project
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 *      http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18#define LOG_NDEBUG 1
19#define LOG_TAG "PreviewPlayer"
20#include <utils/Log.h>
21
22#include <dlfcn.h>
23
24#include "include/ARTSPController.h"
25#include "PreviewPlayer.h"
26#include "DummyAudioSource.h"
27#include "DummyVideoSource.h"
28#include "VideoEditorSRC.h"
29#include "include/LiveSession.h"
30#include "include/NuCachedSource2.h"
31#include "include/ThrottledSource.h"
32
33
34#include "PreviewRenderer.h"
35
36#include <binder/IPCThreadState.h>
37#include <media/stagefright/DataSource.h>
38#include <media/stagefright/FileSource.h>
39#include <media/stagefright/MediaBuffer.h>
40#include <media/stagefright/MediaDefs.h>
41#include <media/stagefright/MediaExtractor.h>
42#include <media/stagefright/MediaDebug.h>
43#include <media/stagefright/MediaSource.h>
44#include <media/stagefright/MetaData.h>
45#include <media/stagefright/OMXCodec.h>
46
47#include <surfaceflinger/Surface.h>
48#include <media/stagefright/foundation/ALooper.h>
49
50namespace android {
51
52
53struct PreviewPlayerEvent : public TimedEventQueue::Event {
54    PreviewPlayerEvent(
55            PreviewPlayer *player,
56            void (PreviewPlayer::*method)())
57        : mPlayer(player),
58          mMethod(method) {
59    }
60
61protected:
62    virtual ~PreviewPlayerEvent() {}
63
64    virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) {
65        (mPlayer->*mMethod)();
66    }
67
68private:
69    PreviewPlayer *mPlayer;
70    void (PreviewPlayer::*mMethod)();
71
72    PreviewPlayerEvent(const PreviewPlayerEvent &);
73    PreviewPlayerEvent &operator=(const PreviewPlayerEvent &);
74};
75
76
77struct PreviewLocalRenderer : public PreviewPlayerRenderer {
78    PreviewLocalRenderer(
79            bool previewOnly,
80            OMX_COLOR_FORMATTYPE colorFormat,
81            const sp<Surface> &surface,
82            size_t displayWidth, size_t displayHeight,
83            size_t decodedWidth, size_t decodedHeight,
84            int32_t rotationDegrees = 0)
85        : mTarget(NULL) {
86            init(previewOnly,
87                 colorFormat, surface,
88                 displayWidth, displayHeight,
89                 decodedWidth, decodedHeight,
90                 rotationDegrees);
91    }
92
93    virtual void render(MediaBuffer *buffer) {
94        render((const uint8_t *)buffer->data() + buffer->range_offset(),
95               buffer->range_length());
96    }
97
98    void render(const void *data, size_t size) {
99        mTarget->render(data, size, NULL);
100    }
101    void render() {
102        mTarget->renderYV12();
103    }
104    void getBuffer(uint8_t **data, size_t *stride) {
105        mTarget->getBufferYV12(data, stride);
106    }
107
108protected:
109    virtual ~PreviewLocalRenderer() {
110        delete mTarget;
111        mTarget = NULL;
112    }
113
114private:
115    PreviewRenderer *mTarget;
116
117    void init(
118            bool previewOnly,
119            OMX_COLOR_FORMATTYPE colorFormat,
120            const sp<Surface> &surface,
121            size_t displayWidth, size_t displayHeight,
122            size_t decodedWidth, size_t decodedHeight,
123            int32_t rotationDegrees = 0);
124
125    PreviewLocalRenderer(const PreviewLocalRenderer &);
126    PreviewLocalRenderer &operator=(const PreviewLocalRenderer &);;
127};
128
129void PreviewLocalRenderer::init(
130        bool previewOnly,
131        OMX_COLOR_FORMATTYPE colorFormat,
132        const sp<Surface> &surface,
133        size_t displayWidth, size_t displayHeight,
134        size_t decodedWidth, size_t decodedHeight,
135        int32_t rotationDegrees) {
136    mTarget = new PreviewRenderer(
137            colorFormat, surface, displayWidth, displayHeight,
138            decodedWidth, decodedHeight, rotationDegrees);
139}
140
141PreviewPlayer::PreviewPlayer()
142    : AwesomePlayer(),
143      mFrameRGBBuffer(NULL),
144      mFrameYUVBuffer(NULL),
145      mReportedWidth(0),
146      mReportedHeight(0),
147      mCurrFramingEffectIndex(0) {
148
149    mVideoRenderer = NULL;
150    mLastVideoBuffer = NULL;
151    mSuspensionState = NULL;
152    mEffectsSettings = NULL;
153    mAudioMixStoryBoardTS = 0;
154    mCurrentMediaBeginCutTime = 0;
155    mCurrentMediaVolumeValue = 0;
156    mNumberEffects = 0;
157    mDecodedVideoTs = 0;
158    mDecVideoTsStoryBoard = 0;
159    mCurrentVideoEffect = VIDEO_EFFECT_NONE;
160    mProgressCbInterval = 0;
161    mNumberDecVideoFrames = 0;
162    mOverlayUpdateEventPosted = false;
163
164    mVideoEvent = new PreviewPlayerEvent(this, &PreviewPlayer::onVideoEvent);
165    mVideoEventPending = false;
166    mStreamDoneEvent = new PreviewPlayerEvent(this,
167         &AwesomePlayer::onStreamDone);
168
169    mStreamDoneEventPending = false;
170
171    mCheckAudioStatusEvent = new PreviewPlayerEvent(
172        this, &AwesomePlayer::onCheckAudioStatus);
173
174    mAudioStatusEventPending = false;
175
176    mProgressCbEvent = new PreviewPlayerEvent(this,
177         &PreviewPlayer::onProgressCbEvent);
178
179    mOverlayUpdateEvent = new PreviewPlayerEvent(this,
180        &PreviewPlayer::onUpdateOverlayEvent);
181    mProgressCbEventPending = false;
182
183    mOverlayUpdateEventPending = false;
184    mResizedVideoBuffer = NULL;
185    mVideoResizedOrCropped = false;
186    mRenderingMode = (M4xVSS_MediaRendering)MEDIA_RENDERING_INVALID;
187    mIsFiftiesEffectStarted = false;
188    reset();
189}
190
191PreviewPlayer::~PreviewPlayer() {
192
193    if (mQueueStarted) {
194        mQueue.stop();
195    }
196
197    reset();
198
199    if(mResizedVideoBuffer != NULL) {
200        M4OSA_free((M4OSA_MemAddr32)(mResizedVideoBuffer->data()));
201        mResizedVideoBuffer = NULL;
202    }
203
204    mVideoRenderer.clear();
205    mVideoRenderer = NULL;
206}
207
208void PreviewPlayer::cancelPlayerEvents(bool keepBufferingGoing) {
209    mQueue.cancelEvent(mVideoEvent->eventID());
210    mVideoEventPending = false;
211    mQueue.cancelEvent(mStreamDoneEvent->eventID());
212    mStreamDoneEventPending = false;
213    mQueue.cancelEvent(mCheckAudioStatusEvent->eventID());
214    mAudioStatusEventPending = false;
215
216    mQueue.cancelEvent(mProgressCbEvent->eventID());
217    mProgressCbEventPending = false;
218}
219
220status_t PreviewPlayer::setDataSource(
221        const char *uri, const KeyedVector<String8, String8> *headers) {
222    Mutex::Autolock autoLock(mLock);
223    return setDataSource_l(uri, headers);
224}
225
226status_t PreviewPlayer::setDataSource_l(
227        const char *uri, const KeyedVector<String8, String8> *headers) {
228    reset_l();
229
230    mUri = uri;
231
232    if (headers) {
233        mUriHeaders = *headers;
234    }
235
236    // The actual work will be done during preparation in the call to
237    // ::finishSetDataSource_l to avoid blocking the calling thread in
238    // setDataSource for any significant time.
239    return OK;
240}
241
242status_t PreviewPlayer::setDataSource_l(const sp<MediaExtractor> &extractor) {
243    bool haveAudio = false;
244    bool haveVideo = false;
245    for (size_t i = 0; i < extractor->countTracks(); ++i) {
246        sp<MetaData> meta = extractor->getTrackMetaData(i);
247
248        const char *mime;
249        CHECK(meta->findCString(kKeyMIMEType, &mime));
250
251        if (!haveVideo && !strncasecmp(mime, "video/", 6)) {
252            setVideoSource(extractor->getTrack(i));
253            haveVideo = true;
254        } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) {
255            setAudioSource(extractor->getTrack(i));
256            haveAudio = true;
257
258            if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
259                // Only do this for vorbis audio, none of the other audio
260                // formats even support this ringtone specific hack and
261                // retrieving the metadata on some extractors may turn out
262                // to be very expensive.
263                sp<MetaData> fileMeta = extractor->getMetaData();
264                int32_t loop;
265                if (fileMeta != NULL
266                        && fileMeta->findInt32(kKeyAutoLoop, &loop)
267                         && loop != 0) {
268                    mFlags |= AUTO_LOOPING;
269                }
270            }
271        }
272
273        if (haveAudio && haveVideo) {
274            break;
275        }
276    }
277
278    /* Add the support for Dummy audio*/
279    if( !haveAudio ){
280        LOGV("PreviewPlayer: setDataSource_l Dummyaudiocreation started");
281
282        mAudioTrack = DummyAudioSource::Create(32000, 2, 20000,
283                                              ((mPlayEndTimeMsec)*1000));
284        LOGV("PreviewPlayer: setDataSource_l Dummyauiosource created");
285        if(mAudioTrack != NULL) {
286            haveAudio = true;
287        }
288    }
289
290    if (!haveAudio && !haveVideo) {
291        return UNKNOWN_ERROR;
292    }
293
294    mExtractorFlags = extractor->flags();
295    return OK;
296}
297
298status_t PreviewPlayer::setDataSource_l_jpg() {
299    M4OSA_ERR err = M4NO_ERROR;
300    LOGV("PreviewPlayer: setDataSource_l_jpg started");
301
302    mAudioSource = DummyAudioSource::Create(32000, 2, 20000,
303                                          ((mPlayEndTimeMsec)*1000));
304    LOGV("PreviewPlayer: setDataSource_l_jpg Dummyaudiosource created");
305    if(mAudioSource != NULL) {
306        setAudioSource(mAudioSource);
307    }
308    status_t error = mAudioSource->start();
309    if (error != OK) {
310        LOGV("Error starting dummy audio source");
311        mAudioSource.clear();
312        return err;
313    }
314
315    mDurationUs = (mPlayEndTimeMsec - mPlayBeginTimeMsec)*1000;
316
317    mVideoSource = DummyVideoSource::Create(mVideoWidth, mVideoHeight,
318                                            mDurationUs, mUri);
319    mReportedWidth = mVideoWidth;
320    mReportedHeight = mVideoHeight;
321
322    setVideoSource(mVideoSource);
323    status_t err1 = mVideoSource->start();
324    if (err1 != OK) {
325        mVideoSource.clear();
326        return err;
327    }
328
329    mIsVideoSourceJpg = true;
330    return OK;
331}
332
333void PreviewPlayer::reset() {
334    Mutex::Autolock autoLock(mLock);
335    reset_l();
336}
337
338void PreviewPlayer::reset_l() {
339
340    if (mFlags & PREPARING) {
341        mFlags |= PREPARE_CANCELLED;
342    }
343
344    while (mFlags & PREPARING) {
345        mPreparedCondition.wait(mLock);
346    }
347
348    cancelPlayerEvents();
349    mAudioTrack.clear();
350    mVideoTrack.clear();
351
352    // Shutdown audio first, so that the respone to the reset request
353    // appears to happen instantaneously as far as the user is concerned
354    // If we did this later, audio would continue playing while we
355    // shutdown the video-related resources and the player appear to
356    // not be as responsive to a reset request.
357    if (mAudioPlayer == NULL && mAudioSource != NULL) {
358        // If we had an audio player, it would have effectively
359        // taken possession of the audio source and stopped it when
360        // _it_ is stopped. Otherwise this is still our responsibility.
361        mAudioSource->stop();
362    }
363    mAudioSource.clear();
364
365    mTimeSource = NULL;
366
367    delete mAudioPlayer;
368    mAudioPlayer = NULL;
369
370    if (mLastVideoBuffer) {
371        mLastVideoBuffer->release();
372        mLastVideoBuffer = NULL;
373    }
374
375    if (mVideoBuffer) {
376        mVideoBuffer->release();
377        mVideoBuffer = NULL;
378    }
379
380    if (mVideoSource != NULL) {
381        mVideoSource->stop();
382
383        // The following hack is necessary to ensure that the OMX
384        // component is completely released by the time we may try
385        // to instantiate it again.
386        wp<MediaSource> tmp = mVideoSource;
387        mVideoSource.clear();
388        while (tmp.promote() != NULL) {
389            usleep(1000);
390        }
391        IPCThreadState::self()->flushCommands();
392    }
393
394    mDurationUs = -1;
395    mFlags = 0;
396    mExtractorFlags = 0;
397    mVideoWidth = mVideoHeight = -1;
398    mTimeSourceDeltaUs = 0;
399    mVideoTimeUs = 0;
400
401    mSeeking = false;
402    mSeekNotificationSent = false;
403    mSeekTimeUs = 0;
404
405    mUri.setTo("");
406    mUriHeaders.clear();
407
408    mFileSource.clear();
409
410    delete mSuspensionState;
411    mSuspensionState = NULL;
412
413    mCurrentVideoEffect = VIDEO_EFFECT_NONE;
414    mIsVideoSourceJpg = false;
415    mFrameRGBBuffer = NULL;
416    if(mFrameYUVBuffer != NULL) {
417        M4OSA_free((M4OSA_MemAddr32)mFrameYUVBuffer);
418        mFrameYUVBuffer = NULL;
419    }
420}
421
422void PreviewPlayer::partial_reset_l() {
423
424    if (mLastVideoBuffer) {
425        mLastVideoBuffer->release();
426        mLastVideoBuffer = NULL;
427    }
428
429    /* call base struct */
430    AwesomePlayer::partial_reset_l();
431
432}
433
434status_t PreviewPlayer::play() {
435    Mutex::Autolock autoLock(mLock);
436
437    mFlags &= ~CACHE_UNDERRUN;
438
439    return play_l();
440}
441
442status_t PreviewPlayer::play_l() {
443VideoEditorAudioPlayer  *mVePlayer;
444    if (mFlags & PLAYING) {
445        return OK;
446    }
447    mStartNextPlayer = false;
448
449    if (!(mFlags & PREPARED)) {
450        status_t err = prepare_l();
451
452        if (err != OK) {
453            return err;
454        }
455    }
456
457    mFlags |= PLAYING;
458    mFlags |= FIRST_FRAME;
459
460    bool deferredAudioSeek = false;
461
462    if (mAudioSource != NULL) {
463        if (mAudioPlayer == NULL) {
464            if (mAudioSink != NULL) {
465
466                mAudioPlayer = new VideoEditorAudioPlayer(mAudioSink, this);
467                mVePlayer =
468                          (VideoEditorAudioPlayer*)mAudioPlayer;
469
470                mAudioPlayer->setSource(mAudioSource);
471
472                mVePlayer->setAudioMixSettings(
473                 mPreviewPlayerAudioMixSettings);
474
475                mVePlayer->setAudioMixPCMFileHandle(
476                 mAudioMixPCMFileHandle);
477
478                mVePlayer->setAudioMixStoryBoardSkimTimeStamp(
479                 mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
480                 mCurrentMediaVolumeValue);
481
482                // We've already started the MediaSource in order to enable
483                // the prefetcher to read its data.
484                status_t err = mVePlayer->start(
485                        true /* sourceAlreadyStarted */);
486
487                if (err != OK) {
488                    delete mAudioPlayer;
489                    mAudioPlayer = NULL;
490
491                    mFlags &= ~(PLAYING | FIRST_FRAME);
492                    return err;
493                }
494
495                mTimeSource = mVePlayer; //mAudioPlayer;
496
497                deferredAudioSeek = true;
498                mWatchForAudioSeekComplete = false;
499                mWatchForAudioEOS = true;
500            }
501        } else {
502            mVePlayer->resume();
503        }
504
505    }
506
507    if (mTimeSource == NULL && mAudioPlayer == NULL) {
508        mTimeSource = &mSystemTimeSource;
509    }
510
511    if (mVideoSource != NULL) {
512        // Kick off video playback
513        postVideoEvent_l();
514    }
515
516    if (deferredAudioSeek) {
517        // If there was a seek request while we were paused
518        // and we're just starting up again, honor the request now.
519        seekAudioIfNecessary_l();
520    }
521
522    if (mFlags & AT_EOS) {
523        // Legacy behaviour, if a stream finishes playing and then
524        // is started again, we play from the start...
525        seekTo_l(0);
526    }
527
528    return OK;
529}
530
531
532void PreviewPlayer::initRenderer_l() {
533    if (mSurface != NULL || mISurface != NULL) {
534        sp<MetaData> meta = mVideoSource->getFormat();
535
536        int32_t format;
537        const char *component;
538        int32_t decodedWidth, decodedHeight;
539        CHECK(meta->findInt32(kKeyColorFormat, &format));
540        CHECK(meta->findCString(kKeyDecoderComponent, &component));
541        CHECK(meta->findInt32(kKeyWidth, &decodedWidth));
542        CHECK(meta->findInt32(kKeyHeight, &decodedHeight));
543
544        // Must ensure that mVideoRenderer's destructor is actually executed
545        // before creating a new one.
546        IPCThreadState::self()->flushCommands();
547
548        // always use localrenderer since decoded buffers are modified
549        // by postprocessing module
550        // Other decoders are instantiated locally and as a consequence
551        // allocate their buffers in local address space.
552        if(mVideoRenderer == NULL) {
553
554            mVideoRenderer = new PreviewLocalRenderer(
555                false,  // previewOnly
556                (OMX_COLOR_FORMATTYPE)format,
557                mSurface,
558                mOutputVideoWidth, mOutputVideoHeight,
559                mOutputVideoWidth, mOutputVideoHeight);
560        }
561    }
562}
563
564
565void PreviewPlayer::setISurface(const sp<ISurface> &isurface) {
566    Mutex::Autolock autoLock(mLock);
567    mISurface = isurface;
568}
569
570
571status_t PreviewPlayer::seekTo(int64_t timeUs) {
572
573    if ((mExtractorFlags & MediaExtractor::CAN_SEEK) || (mIsVideoSourceJpg)) {
574        Mutex::Autolock autoLock(mLock);
575        return seekTo_l(timeUs);
576    }
577
578    return OK;
579}
580
581
582status_t PreviewPlayer::getVideoDimensions(
583        int32_t *width, int32_t *height) const {
584    Mutex::Autolock autoLock(mLock);
585
586    if (mVideoWidth < 0 || mVideoHeight < 0) {
587        return UNKNOWN_ERROR;
588    }
589
590    *width = mVideoWidth;
591    *height = mVideoHeight;
592
593    return OK;
594}
595
596
597status_t PreviewPlayer::initAudioDecoder() {
598    sp<MetaData> meta = mAudioTrack->getFormat();
599    const char *mime;
600    CHECK(meta->findCString(kKeyMIMEType, &mime));
601
602    if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
603        mAudioSource = mAudioTrack;
604    } else {
605        sp<MediaSource> aRawSource;
606        aRawSource = OMXCodec::Create(
607                mClient.interface(), mAudioTrack->getFormat(),
608                false, // createEncoder
609                mAudioTrack);
610
611        if(aRawSource != NULL) {
612            LOGV("initAudioDecoder: new VideoEditorSRC");
613            mAudioSource = new VideoEditorSRC(aRawSource);
614        }
615    }
616
617    if (mAudioSource != NULL) {
618        int64_t durationUs;
619        if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
620            Mutex::Autolock autoLock(mMiscStateLock);
621            if (mDurationUs < 0 || durationUs > mDurationUs) {
622                mDurationUs = durationUs;
623            }
624        }
625        status_t err = mAudioSource->start();
626
627        if (err != OK) {
628            mAudioSource.clear();
629            return err;
630        }
631    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) {
632        // For legacy reasons we're simply going to ignore the absence
633        // of an audio decoder for QCELP instead of aborting playback
634        // altogether.
635        return OK;
636    }
637
638    return mAudioSource != NULL ? OK : UNKNOWN_ERROR;
639}
640
641
642status_t PreviewPlayer::initVideoDecoder(uint32_t flags) {
643
644    mVideoSource = OMXCodec::Create(
645            mClient.interface(), mVideoTrack->getFormat(),
646            false,
647            mVideoTrack,
648            NULL, flags);
649
650    if (mVideoSource != NULL) {
651        int64_t durationUs;
652        if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
653            Mutex::Autolock autoLock(mMiscStateLock);
654            if (mDurationUs < 0 || durationUs > mDurationUs) {
655                mDurationUs = durationUs;
656            }
657        }
658
659        CHECK(mVideoTrack->getFormat()->findInt32(kKeyWidth, &mVideoWidth));
660        CHECK(mVideoTrack->getFormat()->findInt32(kKeyHeight, &mVideoHeight));
661
662        mReportedWidth = mVideoWidth;
663        mReportedHeight = mVideoHeight;
664
665        status_t err = mVideoSource->start();
666
667        if (err != OK) {
668            mVideoSource.clear();
669            return err;
670        }
671    }
672
673    return mVideoSource != NULL ? OK : UNKNOWN_ERROR;
674}
675
676
677void PreviewPlayer::onVideoEvent() {
678    uint32_t i=0;
679    bool bAppliedVideoEffect = false;
680    M4OSA_ERR err1 = M4NO_ERROR;
681    int64_t imageFrameTimeUs = 0;
682
683    Mutex::Autolock autoLock(mLock);
684    if (!mVideoEventPending) {
685        // The event has been cancelled in reset_l() but had already
686        // been scheduled for execution at that time.
687        return;
688    }
689    mVideoEventPending = false;
690
691    TimeSource *ts_st =  &mSystemTimeSource;
692    int64_t timeStartUs = ts_st->getRealTimeUs();
693
694    if (mSeeking) {
695        if (mLastVideoBuffer) {
696            mLastVideoBuffer->release();
697            mLastVideoBuffer = NULL;
698        }
699
700
701        if(mAudioSource != NULL) {
702
703            // We're going to seek the video source first, followed by
704            // the audio source.
705            // In order to avoid jumps in the DataSource offset caused by
706            // the audio codec prefetching data from the old locations
707            // while the video codec is already reading data from the new
708            // locations, we'll "pause" the audio source, causing it to
709            // stop reading input data until a subsequent seek.
710
711            if (mAudioPlayer != NULL) {
712                mAudioPlayer->pause();
713            }
714            mAudioSource->pause();
715        }
716    }
717
718    if (!mVideoBuffer) {
719        MediaSource::ReadOptions options;
720        if (mSeeking) {
721            LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
722                                                      mSeekTimeUs / 1E6);
723
724            options.setSeekTo(
725                    mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
726        }
727        for (;;) {
728            status_t err = mVideoSource->read(&mVideoBuffer, &options);
729            options.clearSeekTo();
730
731            if (err != OK) {
732                CHECK_EQ(mVideoBuffer, NULL);
733
734                if (err == INFO_FORMAT_CHANGED) {
735                    LOGV("LV PLAYER VideoSource signalled format change");
736                    notifyVideoSize_l();
737                    sp<MetaData> meta = mVideoSource->getFormat();
738
739                    CHECK(meta->findInt32(kKeyWidth, &mReportedWidth));
740                    CHECK(meta->findInt32(kKeyHeight, &mReportedHeight));
741                    if (mVideoRenderer != NULL) {
742                        mVideoRendererIsPreview = false;
743                        initRenderer_l();
744                    }
745                    continue;
746                }
747                // So video playback is complete, but we may still have
748                // a seek request pending that needs to be applied to the audio track
749                if (mSeeking) {
750                    LOGV("video stream ended while seeking!");
751                }
752                finishSeekIfNecessary(-1);
753                LOGV("PreviewPlayer: onVideoEvent EOS reached.");
754                mFlags |= VIDEO_AT_EOS;
755                if (mOverlayUpdateEventPosted) {
756                    mOverlayUpdateEventPosted = false;
757                    postOverlayUpdateEvent_l();
758                }
759                postStreamDoneEvent_l(err);
760                return;
761            }
762
763            if (mVideoBuffer->range_length() == 0) {
764                // Some decoders, notably the PV AVC software decoder
765                // return spurious empty buffers that we just want to ignore.
766
767                mVideoBuffer->release();
768                mVideoBuffer = NULL;
769                continue;
770            }
771
772            int64_t videoTimeUs;
773            CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
774
775            if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
776                // Frames are before begin cut time
777                // Donot render
778                mVideoBuffer->release();
779                mVideoBuffer = NULL;
780                continue;
781            }
782
783            break;
784        }
785    }
786
787    mNumberDecVideoFrames++;
788
789    int64_t timeUs;
790    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
791
792    {
793        Mutex::Autolock autoLock(mMiscStateLock);
794        mVideoTimeUs = timeUs;
795    }
796
797    mDecodedVideoTs = timeUs;
798
799    if(!mStartNextPlayer) {
800        int64_t playbackTimeRemaining = (mPlayEndTimeMsec*1000) - timeUs;
801        if(playbackTimeRemaining <= 1500000) {
802            //When less than 1.5 sec of playback left
803            // send notification to start next player
804
805            mStartNextPlayer = true;
806            notifyListener_l(0xAAAAAAAA);
807        }
808    }
809
810    bool wasSeeking = mSeeking;
811    finishSeekIfNecessary(timeUs);
812
813    TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
814
815    if(ts == NULL) {
816        mVideoBuffer->release();
817        mVideoBuffer = NULL;
818        return;
819    }
820
821    if(!mIsVideoSourceJpg) {
822        if (mFlags & FIRST_FRAME) {
823            mFlags &= ~FIRST_FRAME;
824
825            mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs;
826        }
827
828        int64_t realTimeUs, mediaTimeUs;
829        if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
830            && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {
831            mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
832        }
833
834        int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
835
836        int64_t latenessUs = nowUs - timeUs;
837
838        if (wasSeeking) {
839            // Let's display the first frame after seeking right away.
840            latenessUs = 0;
841        }
842        LOGV("Audio time stamp = %lld and video time stamp = %lld",
843                                            ts->getRealTimeUs(),timeUs);
844        if (latenessUs > 40000) {
845            // We're more than 40ms late.
846
847            LOGV("LV PLAYER we're late by %lld us (%.2f secs)",
848                                           latenessUs, latenessUs / 1E6);
849
850            mVideoBuffer->release();
851            mVideoBuffer = NULL;
852
853            postVideoEvent_l();
854            return;
855        }
856
857        if (latenessUs < -10000) {
858            // We're more than 10ms early.
859            LOGV("We're more than 10ms early, lateness %lld", latenessUs);
860
861            postVideoEvent_l(10000);
862            return;
863        }
864    }
865
866    if (mVideoRendererIsPreview || mVideoRenderer == NULL) {
867        mVideoRendererIsPreview = false;
868
869        initRenderer_l();
870    }
871
872    // If timestamp exceeds endCutTime of clip, donot render
873    if((timeUs/1000) > mPlayEndTimeMsec) {
874        if (mLastVideoBuffer) {
875            mLastVideoBuffer->release();
876            mLastVideoBuffer = NULL;
877        }
878        mLastVideoBuffer = mVideoBuffer;
879        mVideoBuffer = NULL;
880        mFlags |= VIDEO_AT_EOS;
881        mFlags |= AUDIO_AT_EOS;
882        LOGI("PreviewPlayer: onVideoEvent timeUs > mPlayEndTime; send EOS..");
883        if (mOverlayUpdateEventPosted) {
884            mOverlayUpdateEventPosted = false;
885            postOverlayUpdateEvent_l();
886        }
887        postStreamDoneEvent_l(ERROR_END_OF_STREAM);
888        return;
889    }
890
891    // Post processing to apply video effects
892    for(i=0;i<mNumberEffects;i++) {
893        // First check if effect starttime matches the clip being previewed
894        if((mEffectsSettings[i].uiStartTime < (mDecVideoTsStoryBoard/1000)) ||
895        (mEffectsSettings[i].uiStartTime >=
896         ((mDecVideoTsStoryBoard/1000) + mPlayEndTimeMsec - mPlayBeginTimeMsec)))
897        {
898            // This effect doesn't belong to this clip, check next one
899            continue;
900        }
901        // Check if effect applies to this particular frame timestamp
902        if((mEffectsSettings[i].uiStartTime <=
903         (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec)) &&
904            ((mEffectsSettings[i].uiStartTime+mEffectsSettings[i].uiDuration) >=
905             (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec))
906              && (mEffectsSettings[i].uiDuration != 0)) {
907
908            setVideoPostProcessingNode(
909             mEffectsSettings[i].VideoEffectType, TRUE);
910        }
911        else {
912            setVideoPostProcessingNode(
913             mEffectsSettings[i].VideoEffectType, FALSE);
914        }
915    }
916
917    //Provide the overlay Update indication when there is an overlay effect
918    if (mCurrentVideoEffect & VIDEO_EFFECT_FRAMING) {
919        mCurrentVideoEffect &= ~VIDEO_EFFECT_FRAMING; //never apply framing here.
920        if (!mOverlayUpdateEventPosted) {
921
922            // Find the effect in effectSettings array
923            int index;
924            for (index = 0; index < mNumberEffects; index++) {
925                M4OSA_UInt32 timeMs = mDecodedVideoTs/1000;
926                M4OSA_UInt32 timeOffset = mDecVideoTsStoryBoard/1000;
927                if(mEffectsSettings[index].VideoEffectType ==
928                    M4xVSS_kVideoEffectType_Framing) {
929                    if (((mEffectsSettings[index].uiStartTime + 1) <=
930                        timeMs + timeOffset - mPlayBeginTimeMsec) &&
931                        ((mEffectsSettings[index].uiStartTime - 1 +
932                        mEffectsSettings[index].uiDuration) >=
933                        timeMs + timeOffset - mPlayBeginTimeMsec))
934                    {
935                        break;
936                    }
937                }
938            }
939            if (index < mNumberEffects) {
940                mCurrFramingEffectIndex = index;
941                mOverlayUpdateEventPosted = true;
942                postOverlayUpdateEvent_l();
943                LOGV("Framing index = %d", mCurrFramingEffectIndex);
944            } else {
945                LOGV("No framing effects found");
946            }
947        }
948
949    } else if (mOverlayUpdateEventPosted) {
950        //Post the event when the overlay is no more valid
951        LOGV("Overlay is Done");
952        mOverlayUpdateEventPosted = false;
953        postOverlayUpdateEvent_l();
954    }
955
956
957    if (mCurrentVideoEffect != VIDEO_EFFECT_NONE) {
958        err1 = doVideoPostProcessing();
959        if(err1 != M4NO_ERROR) {
960            LOGE("doVideoPostProcessing returned err");
961            bAppliedVideoEffect = false;
962        }
963        else {
964            bAppliedVideoEffect = true;
965        }
966    }
967    else {
968        bAppliedVideoEffect = false;
969        if(mRenderingMode != MEDIA_RENDERING_INVALID) {
970            // No effects to be applied, but media rendering to be done
971            err1 = doMediaRendering();
972            if(err1 != M4NO_ERROR) {
973                LOGE("doMediaRendering returned err");
974                //Use original mVideoBuffer for rendering
975                mVideoResizedOrCropped = false;
976            }
977        }
978    }
979
980    if (mVideoRenderer != NULL) {
981        LOGV("mVideoRenderer CALL render()");
982        mVideoRenderer->render();
983    }
984
985    if (mLastVideoBuffer) {
986        mLastVideoBuffer->release();
987        mLastVideoBuffer = NULL;
988    }
989
990    mLastVideoBuffer = mVideoBuffer;
991    mVideoBuffer = NULL;
992
993    // Post progress callback based on callback interval set
994    if(mNumberDecVideoFrames >= mProgressCbInterval) {
995        postProgressCallbackEvent_l();
996        mNumberDecVideoFrames = 0;  // reset counter
997    }
998
999    // if reached EndCutTime of clip, post EOS event
1000    if((timeUs/1000) >= mPlayEndTimeMsec) {
1001        LOGV("PreviewPlayer: onVideoEvent EOS.");
1002        mFlags |= VIDEO_AT_EOS;
1003        mFlags |= AUDIO_AT_EOS;
1004        if (mOverlayUpdateEventPosted) {
1005            mOverlayUpdateEventPosted = false;
1006            postOverlayUpdateEvent_l();
1007        }
1008        postStreamDoneEvent_l(ERROR_END_OF_STREAM);
1009    }
1010    else {
1011        if(!mIsVideoSourceJpg) {
1012            postVideoEvent_l();
1013        }
1014        else {
1015            postVideoEvent_l(33000);
1016        }
1017    }
1018}
1019
1020status_t PreviewPlayer::prepare() {
1021    Mutex::Autolock autoLock(mLock);
1022    return prepare_l();
1023}
1024
1025status_t PreviewPlayer::prepare_l() {
1026    if (mFlags & PREPARED) {
1027        return OK;
1028    }
1029
1030    if (mFlags & PREPARING) {
1031        return UNKNOWN_ERROR;
1032    }
1033
1034    mIsAsyncPrepare = false;
1035    status_t err = prepareAsync_l();
1036
1037    if (err != OK) {
1038        return err;
1039    }
1040
1041    while (mFlags & PREPARING) {
1042        mPreparedCondition.wait(mLock);
1043    }
1044
1045    return mPrepareResult;
1046}
1047
1048status_t PreviewPlayer::prepareAsync_l() {
1049    if (mFlags & PREPARING) {
1050        return UNKNOWN_ERROR;  // async prepare already pending
1051    }
1052
1053    if (!mQueueStarted) {
1054        mQueue.start();
1055        mQueueStarted = true;
1056    }
1057
1058    mFlags |= PREPARING;
1059    mAsyncPrepareEvent = new PreviewPlayerEvent(
1060            this, &PreviewPlayer::onPrepareAsyncEvent);
1061
1062    mQueue.postEvent(mAsyncPrepareEvent);
1063
1064    return OK;
1065}
1066
1067status_t PreviewPlayer::finishSetDataSource_l() {
1068    sp<DataSource> dataSource;
1069    sp<MediaExtractor> extractor;
1070
1071    dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders);
1072
1073    if (dataSource == NULL) {
1074        return UNKNOWN_ERROR;
1075    }
1076
1077    //If file type is .rgb, then no need to check for Extractor
1078    int uriLen = strlen(mUri);
1079    int startOffset = uriLen - 4;
1080    if(!strncasecmp(mUri+startOffset, ".rgb", 4)) {
1081        extractor = NULL;
1082    }
1083    else {
1084        extractor = MediaExtractor::Create(dataSource,
1085                                        MEDIA_MIMETYPE_CONTAINER_MPEG4);
1086    }
1087
1088    if (extractor == NULL) {
1089        LOGV("PreviewPlayer::finishSetDataSource_l  extractor == NULL");
1090        return setDataSource_l_jpg();
1091    }
1092
1093    return setDataSource_l(extractor);
1094}
1095
1096
1097// static
1098bool PreviewPlayer::ContinuePreparation(void *cookie) {
1099    PreviewPlayer *me = static_cast<PreviewPlayer *>(cookie);
1100
1101    return (me->mFlags & PREPARE_CANCELLED) == 0;
1102}
1103
1104void PreviewPlayer::onPrepareAsyncEvent() {
1105    Mutex::Autolock autoLock(mLock);
1106    LOGV("onPrepareAsyncEvent");
1107
1108    if (mFlags & PREPARE_CANCELLED) {
1109        LOGI("LV PLAYER prepare was cancelled before doing anything");
1110        abortPrepare(UNKNOWN_ERROR);
1111        return;
1112    }
1113
1114    if (mUri.size() > 0) {
1115        status_t err = finishSetDataSource_l();
1116
1117        if (err != OK) {
1118            abortPrepare(err);
1119            return;
1120        }
1121    }
1122
1123    if (mVideoTrack != NULL && mVideoSource == NULL) {
1124        status_t err = initVideoDecoder(OMXCodec::kHardwareCodecsOnly);
1125
1126        if (err != OK) {
1127            abortPrepare(err);
1128            return;
1129        }
1130    }
1131
1132    if (mAudioTrack != NULL && mAudioSource == NULL) {
1133        status_t err = initAudioDecoder();
1134
1135        if (err != OK) {
1136            abortPrepare(err);
1137            return;
1138        }
1139    }
1140    finishAsyncPrepare_l();
1141
1142}
1143
1144void PreviewPlayer::finishAsyncPrepare_l() {
1145    if (mIsAsyncPrepare) {
1146        if (mVideoSource == NULL) {
1147            LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE 0 0 ");
1148            notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0);
1149        } else {
1150            LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE");
1151            notifyVideoSize_l();
1152        }
1153        LOGV("finishAsyncPrepare_l: MEDIA_PREPARED");
1154        notifyListener_l(MEDIA_PREPARED);
1155    }
1156
1157    mPrepareResult = OK;
1158    mFlags &= ~(PREPARING|PREPARE_CANCELLED);
1159    mFlags |= PREPARED;
1160    mAsyncPrepareEvent = NULL;
1161    mPreparedCondition.broadcast();
1162}
1163
1164status_t PreviewPlayer::suspend() {
1165    LOGV("suspend");
1166    Mutex::Autolock autoLock(mLock);
1167
1168    if (mSuspensionState != NULL) {
1169        if (mLastVideoBuffer == NULL) {
1170            //go into here if video is suspended again
1171            //after resuming without being played between
1172            //them
1173            SuspensionState *state = mSuspensionState;
1174            mSuspensionState = NULL;
1175            reset_l();
1176            mSuspensionState = state;
1177            return OK;
1178        }
1179
1180        delete mSuspensionState;
1181        mSuspensionState = NULL;
1182    }
1183
1184    if (mFlags & PREPARING) {
1185        mFlags |= PREPARE_CANCELLED;
1186    }
1187
1188    while (mFlags & PREPARING) {
1189        mPreparedCondition.wait(mLock);
1190    }
1191
1192    SuspensionState *state = new SuspensionState;
1193    state->mUri = mUri;
1194    state->mUriHeaders = mUriHeaders;
1195    state->mFileSource = mFileSource;
1196
1197    state->mFlags = mFlags & (PLAYING | AUTO_LOOPING | LOOPING | AT_EOS);
1198    getPosition(&state->mPositionUs);
1199
1200    if (mLastVideoBuffer) {
1201        size_t size = mLastVideoBuffer->range_length();
1202        if (size) {
1203            int32_t unreadable;
1204            if (!mLastVideoBuffer->meta_data()->findInt32(
1205                        kKeyIsUnreadable, &unreadable)
1206                    || unreadable == 0) {
1207                state->mLastVideoFrameSize = size;
1208                state->mLastVideoFrame = malloc(size);
1209                memcpy(state->mLastVideoFrame,
1210                   (const uint8_t *)mLastVideoBuffer->data()
1211                        + mLastVideoBuffer->range_offset(),
1212                   size);
1213
1214                state->mVideoWidth = mVideoWidth;
1215                state->mVideoHeight = mVideoHeight;
1216
1217                sp<MetaData> meta = mVideoSource->getFormat();
1218                CHECK(meta->findInt32(kKeyColorFormat, &state->mColorFormat));
1219                CHECK(meta->findInt32(kKeyWidth, &state->mDecodedWidth));
1220                CHECK(meta->findInt32(kKeyHeight, &state->mDecodedHeight));
1221            } else {
1222                LOGV("Unable to save last video frame, we have no access to "
1223                     "the decoded video data.");
1224            }
1225        }
1226    }
1227
1228    reset_l();
1229
1230    mSuspensionState = state;
1231
1232    return OK;
1233}
1234
1235status_t PreviewPlayer::resume() {
1236    LOGV("resume");
1237    Mutex::Autolock autoLock(mLock);
1238
1239    if (mSuspensionState == NULL) {
1240        return INVALID_OPERATION;
1241    }
1242
1243    SuspensionState *state = mSuspensionState;
1244    mSuspensionState = NULL;
1245
1246    status_t err;
1247    if (state->mFileSource != NULL) {
1248        err = AwesomePlayer::setDataSource_l(state->mFileSource);
1249
1250        if (err == OK) {
1251            mFileSource = state->mFileSource;
1252        }
1253    } else {
1254        err = AwesomePlayer::setDataSource_l(state->mUri, &state->mUriHeaders);
1255    }
1256
1257    if (err != OK) {
1258        delete state;
1259        state = NULL;
1260
1261        return err;
1262    }
1263
1264    seekTo_l(state->mPositionUs);
1265
1266    mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS);
1267
1268    if (state->mLastVideoFrame && (mSurface != NULL || mISurface != NULL)) {
1269        mVideoRenderer =
1270            new PreviewLocalRenderer(
1271                    true,  // previewOnly
1272                    (OMX_COLOR_FORMATTYPE)state->mColorFormat,
1273                    mSurface,
1274                    state->mVideoWidth,
1275                    state->mVideoHeight,
1276                    state->mDecodedWidth,
1277                    state->mDecodedHeight);
1278
1279        mVideoRendererIsPreview = true;
1280
1281        ((PreviewLocalRenderer *)mVideoRenderer.get())->render(
1282                state->mLastVideoFrame, state->mLastVideoFrameSize);
1283    }
1284
1285    if (state->mFlags & PLAYING) {
1286        play_l();
1287    }
1288
1289    mSuspensionState = state;
1290    state = NULL;
1291
1292    return OK;
1293}
1294
1295
1296status_t PreviewPlayer::loadEffectsSettings(
1297                    M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) {
1298    M4OSA_UInt32 i = 0, rgbSize = 0;
1299    M4VIFI_UInt8 *tmp = M4OSA_NULL;
1300
1301    mNumberEffects = nEffects;
1302    mEffectsSettings = pEffectSettings;
1303    return OK;
1304}
1305
1306status_t PreviewPlayer::loadAudioMixSettings(
1307                    M4xVSS_AudioMixingSettings* pAudioMixSettings) {
1308
1309    LOGV("PreviewPlayer: loadAudioMixSettings: ");
1310    mPreviewPlayerAudioMixSettings = pAudioMixSettings;
1311    return OK;
1312}
1313
1314status_t PreviewPlayer::setAudioMixPCMFileHandle(
1315                    M4OSA_Context pAudioMixPCMFileHandle) {
1316
1317    LOGV("PreviewPlayer: setAudioMixPCMFileHandle: ");
1318    mAudioMixPCMFileHandle = pAudioMixPCMFileHandle;
1319    return OK;
1320}
1321
1322status_t PreviewPlayer::setAudioMixStoryBoardParam(
1323                    M4OSA_UInt32 audioMixStoryBoardTS,
1324                    M4OSA_UInt32 currentMediaBeginCutTime,
1325                    M4OSA_UInt32 primaryTrackVolValue ) {
1326
1327    mAudioMixStoryBoardTS = audioMixStoryBoardTS;
1328    mCurrentMediaBeginCutTime = currentMediaBeginCutTime;
1329    mCurrentMediaVolumeValue = primaryTrackVolValue;
1330    return OK;
1331}
1332
1333status_t PreviewPlayer::setPlaybackBeginTime(uint32_t msec) {
1334
1335    mPlayBeginTimeMsec = msec;
1336    return OK;
1337}
1338
1339status_t PreviewPlayer::setPlaybackEndTime(uint32_t msec) {
1340
1341    mPlayEndTimeMsec = msec;
1342    return OK;
1343}
1344
1345status_t PreviewPlayer::setStoryboardStartTime(uint32_t msec) {
1346
1347    mStoryboardStartTimeMsec = msec;
1348    mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000;
1349    return OK;
1350}
1351
1352status_t PreviewPlayer::setProgressCallbackInterval(uint32_t cbInterval) {
1353
1354    mProgressCbInterval = cbInterval;
1355    return OK;
1356}
1357
1358
1359status_t PreviewPlayer::setMediaRenderingMode(
1360        M4xVSS_MediaRendering mode,
1361        M4VIDEOEDITING_VideoFrameSize outputVideoSize) {
1362
1363    mRenderingMode = mode;
1364
1365    /* reset boolean for each clip*/
1366    mVideoResizedOrCropped = false;
1367
1368    switch(outputVideoSize) {
1369        case M4VIDEOEDITING_kSQCIF:
1370            mOutputVideoWidth = 128;
1371            mOutputVideoHeight = 96;
1372            break;
1373
1374        case M4VIDEOEDITING_kQQVGA:
1375            mOutputVideoWidth = 160;
1376            mOutputVideoHeight = 120;
1377            break;
1378
1379        case M4VIDEOEDITING_kQCIF:
1380            mOutputVideoWidth = 176;
1381            mOutputVideoHeight = 144;
1382            break;
1383
1384        case M4VIDEOEDITING_kQVGA:
1385            mOutputVideoWidth = 320;
1386            mOutputVideoHeight = 240;
1387            break;
1388
1389        case M4VIDEOEDITING_kCIF:
1390            mOutputVideoWidth = 352;
1391            mOutputVideoHeight = 288;
1392            break;
1393
1394        case M4VIDEOEDITING_kVGA:
1395            mOutputVideoWidth = 640;
1396            mOutputVideoHeight = 480;
1397            break;
1398
1399        case M4VIDEOEDITING_kWVGA:
1400            mOutputVideoWidth = 800;
1401            mOutputVideoHeight = 480;
1402            break;
1403
1404        case M4VIDEOEDITING_kNTSC:
1405            mOutputVideoWidth = 720;
1406            mOutputVideoHeight = 480;
1407            break;
1408
1409        case M4VIDEOEDITING_k640_360:
1410            mOutputVideoWidth = 640;
1411            mOutputVideoHeight = 360;
1412            break;
1413
1414        case M4VIDEOEDITING_k854_480:
1415            mOutputVideoWidth = 854;
1416            mOutputVideoHeight = 480;
1417            break;
1418
1419        case M4VIDEOEDITING_kHD1280:
1420            mOutputVideoWidth = 1280;
1421            mOutputVideoHeight = 720;
1422            break;
1423
1424        case M4VIDEOEDITING_kHD1080:
1425            mOutputVideoWidth = 1080;
1426            mOutputVideoHeight = 720;
1427            break;
1428
1429        case M4VIDEOEDITING_kHD960:
1430            mOutputVideoWidth = 960;
1431            mOutputVideoHeight = 720;
1432            break;
1433
1434        default:
1435            LOGE("unsupported output video size set");
1436            return BAD_VALUE;
1437    }
1438
1439    return OK;
1440}
1441
1442M4OSA_ERR PreviewPlayer::doMediaRendering() {
1443    M4OSA_ERR err = M4NO_ERROR;
1444    M4VIFI_ImagePlane planeIn[3], planeOut[3];
1445    M4VIFI_UInt8 *inBuffer = M4OSA_NULL, *finalOutputBuffer = M4OSA_NULL;
1446    M4VIFI_UInt8 *tempOutputBuffer= M4OSA_NULL;
1447    size_t videoBufferSize = 0;
1448    M4OSA_UInt32 frameSize = 0, i=0, index =0, nFrameCount =0, bufferOffset =0;
1449    int32_t colorFormat = 0;
1450
1451    if(!mIsVideoSourceJpg) {
1452        sp<MetaData> meta = mVideoSource->getFormat();
1453        CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
1454    }
1455    else {
1456        colorFormat = OMX_COLOR_FormatYUV420Planar;
1457    }
1458
1459    videoBufferSize = mVideoBuffer->size();
1460    frameSize = (mVideoWidth*mVideoHeight*3) >> 1;
1461
1462    uint8_t* outBuffer;
1463    size_t outBufferStride = 0;
1464
1465    mVideoRenderer->getBuffer(&outBuffer, &outBufferStride);
1466
1467    bufferOffset = index*frameSize;
1468    inBuffer = (M4OSA_UInt8 *)mVideoBuffer->data()+
1469                mVideoBuffer->range_offset()+bufferOffset;
1470
1471
1472    /* In plane*/
1473    prepareYUV420ImagePlane(planeIn, mVideoWidth,
1474      mVideoHeight, (M4VIFI_UInt8 *)inBuffer, mReportedWidth, mReportedHeight);
1475
1476    // Set the output YUV420 plane to be compatible with YV12 format
1477    // W & H even
1478    // YVU instead of YUV
1479    // align buffers on 32 bits
1480
1481    //In YV12 format, sizes must be even
1482    M4OSA_UInt32 yv12PlaneWidth = ((mOutputVideoWidth +1)>>1)<<1;
1483    M4OSA_UInt32 yv12PlaneHeight = ((mOutputVideoHeight+1)>>1)<<1;
1484
1485    prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,
1486     (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer);
1487
1488
1489    err = applyRenderingMode(planeIn, planeOut, mRenderingMode);
1490
1491    if(err != M4NO_ERROR)
1492    {
1493        LOGE("doMediaRendering: applyRenderingMode returned err=0x%x", err);
1494        return err;
1495    }
1496    mVideoResizedOrCropped = true;
1497
1498    return err;
1499}
1500
1501status_t PreviewPlayer::resetJniCallbackTimeStamp() {
1502
1503    mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000;
1504    return OK;
1505}
1506
1507void PreviewPlayer::postProgressCallbackEvent_l() {
1508    if (mProgressCbEventPending) {
1509        return;
1510    }
1511    mProgressCbEventPending = true;
1512
1513    mQueue.postEvent(mProgressCbEvent);
1514}
1515
1516
1517void PreviewPlayer::onProgressCbEvent() {
1518    Mutex::Autolock autoLock(mLock);
1519    if (!mProgressCbEventPending) {
1520        return;
1521    }
1522    mProgressCbEventPending = false;
1523    // If playback starts from previous I-frame,
1524    // then send frame storyboard duration
1525    if((mDecodedVideoTs/1000) < mPlayBeginTimeMsec) {
1526        notifyListener_l(MEDIA_INFO, 0, mDecVideoTsStoryBoard/1000);
1527    }
1528    else {
1529        notifyListener_l(MEDIA_INFO, 0,
1530        (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec));
1531    }
1532}
1533
1534void PreviewPlayer::postOverlayUpdateEvent_l() {
1535    if (mOverlayUpdateEventPending) {
1536        return;
1537    }
1538    mOverlayUpdateEventPending = true;
1539    mQueue.postEvent(mOverlayUpdateEvent);
1540}
1541
1542void PreviewPlayer::onUpdateOverlayEvent() {
1543    Mutex::Autolock autoLock(mLock);
1544
1545    if (!mOverlayUpdateEventPending) {
1546        return;
1547    }
1548    mOverlayUpdateEventPending = false;
1549
1550    int updateState;
1551    if (mOverlayUpdateEventPosted) {
1552        updateState = 1;
1553    } else {
1554        updateState = 0;
1555    }
1556    notifyListener_l(0xBBBBBBBB, updateState, mCurrFramingEffectIndex);
1557}
1558
1559
1560void PreviewPlayer::setVideoPostProcessingNode(
1561                    M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable) {
1562
1563    uint32_t effect = VIDEO_EFFECT_NONE;
1564
1565    //Map M4VSS3GPP_VideoEffectType to local enum
1566    switch(type) {
1567        case M4VSS3GPP_kVideoEffectType_FadeFromBlack:
1568            effect = VIDEO_EFFECT_FADEFROMBLACK;
1569            break;
1570
1571        case M4VSS3GPP_kVideoEffectType_FadeToBlack:
1572            effect = VIDEO_EFFECT_FADETOBLACK;
1573            break;
1574
1575        case M4VSS3GPP_kVideoEffectType_CurtainOpening:
1576            effect = VIDEO_EFFECT_CURTAINOPEN;
1577            break;
1578
1579        case M4VSS3GPP_kVideoEffectType_CurtainClosing:
1580            effect = VIDEO_EFFECT_CURTAINCLOSE;
1581            break;
1582
1583        case M4xVSS_kVideoEffectType_BlackAndWhite:
1584            effect = VIDEO_EFFECT_BLACKANDWHITE;
1585            break;
1586
1587        case M4xVSS_kVideoEffectType_Pink:
1588            effect = VIDEO_EFFECT_PINK;
1589            break;
1590
1591        case M4xVSS_kVideoEffectType_Green:
1592            effect = VIDEO_EFFECT_GREEN;
1593            break;
1594
1595        case M4xVSS_kVideoEffectType_Sepia:
1596            effect = VIDEO_EFFECT_SEPIA;
1597            break;
1598
1599        case M4xVSS_kVideoEffectType_Negative:
1600            effect = VIDEO_EFFECT_NEGATIVE;
1601            break;
1602
1603        case M4xVSS_kVideoEffectType_Framing:
1604            effect = VIDEO_EFFECT_FRAMING;
1605            break;
1606
1607        case M4xVSS_kVideoEffectType_Fifties:
1608            effect = VIDEO_EFFECT_FIFTIES;
1609            break;
1610
1611        case M4xVSS_kVideoEffectType_ColorRGB16:
1612            effect = VIDEO_EFFECT_COLOR_RGB16;
1613            break;
1614
1615        case M4xVSS_kVideoEffectType_Gradient:
1616            effect = VIDEO_EFFECT_GRADIENT;
1617            break;
1618
1619        default:
1620            effect = VIDEO_EFFECT_NONE;
1621            break;
1622    }
1623
1624    if(enable == M4OSA_TRUE) {
1625        //If already set, then no need to set again
1626        if(!(mCurrentVideoEffect & effect)) {
1627            mCurrentVideoEffect |= effect;
1628            if(effect == VIDEO_EFFECT_FIFTIES) {
1629                mIsFiftiesEffectStarted = true;
1630            }
1631        }
1632    }
1633    else  {
1634        //Reset only if already set
1635        if(mCurrentVideoEffect & effect) {
1636            mCurrentVideoEffect &= ~effect;
1637        }
1638    }
1639}
1640
1641status_t PreviewPlayer::setImageClipProperties(uint32_t width,uint32_t height) {
1642    mVideoWidth = width;
1643    mVideoHeight = height;
1644    return OK;
1645}
1646
1647
1648M4OSA_ERR PreviewPlayer::doVideoPostProcessing() {
1649    M4OSA_ERR err = M4NO_ERROR;
1650    vePostProcessParams postProcessParams;
1651    int32_t colorFormat = 0;
1652
1653
1654    if(!mIsVideoSourceJpg) {
1655        sp<MetaData> meta = mVideoSource->getFormat();
1656        CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
1657    }
1658    else {
1659        colorFormat = OMX_COLOR_FormatYUV420Planar;
1660    }
1661
1662    if((colorFormat == OMX_COLOR_FormatYUV420SemiPlanar) ||
1663       (colorFormat == 0x7FA30C00)) {
1664          LOGE("doVideoPostProcessing: colorFormat YUV420Sp not supported");
1665          return M4ERR_UNSUPPORTED_MEDIA_TYPE;
1666    }
1667
1668    postProcessParams.vidBuffer = (M4VIFI_UInt8*)mVideoBuffer->data()
1669        + mVideoBuffer->range_offset();
1670
1671    postProcessParams.videoWidth = mVideoWidth;
1672    postProcessParams.videoHeight = mVideoHeight;
1673    postProcessParams.timeMs = mDecodedVideoTs/1000;
1674    postProcessParams.timeOffset = mDecVideoTsStoryBoard/1000;
1675    postProcessParams.effectsSettings = mEffectsSettings;
1676    postProcessParams.numberEffects = mNumberEffects;
1677    postProcessParams.outVideoWidth = mOutputVideoWidth;
1678    postProcessParams.outVideoHeight = mOutputVideoHeight;
1679    postProcessParams.currentVideoEffect = mCurrentVideoEffect;
1680    postProcessParams.renderingMode = mRenderingMode;
1681    if(mIsFiftiesEffectStarted == M4OSA_TRUE) {
1682        postProcessParams.isFiftiesEffectStarted = M4OSA_TRUE;
1683        mIsFiftiesEffectStarted = M4OSA_FALSE;
1684    }
1685    else {
1686       postProcessParams.isFiftiesEffectStarted = M4OSA_FALSE;
1687    }
1688
1689    postProcessParams.overlayFrameRGBBuffer = mFrameRGBBuffer;
1690    postProcessParams.overlayFrameYUVBuffer = mFrameYUVBuffer;
1691    mVideoRenderer->getBuffer(&(postProcessParams.pOutBuffer), &(postProcessParams.outBufferStride));
1692    err = applyEffectsAndRenderingMode(&postProcessParams, mReportedWidth, mReportedHeight);
1693
1694    return err;
1695}
1696
1697status_t PreviewPlayer::readFirstVideoFrame() {
1698    LOGV("PreviewPlayer::readFirstVideoFrame");
1699
1700    if (!mVideoBuffer) {
1701        MediaSource::ReadOptions options;
1702        if (mSeeking) {
1703            LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
1704                    mSeekTimeUs / 1E6);
1705
1706            options.setSeekTo(
1707                    mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
1708        }
1709        for (;;) {
1710            status_t err = mVideoSource->read(&mVideoBuffer, &options);
1711            options.clearSeekTo();
1712
1713            if (err != OK) {
1714                CHECK_EQ(mVideoBuffer, NULL);
1715
1716                if (err == INFO_FORMAT_CHANGED) {
1717                    LOGV("LV PLAYER VideoSource signalled format change");
1718                    notifyVideoSize_l();
1719                    sp<MetaData> meta = mVideoSource->getFormat();
1720
1721                    CHECK(meta->findInt32(kKeyWidth, &mReportedWidth));
1722                    CHECK(meta->findInt32(kKeyHeight, &mReportedHeight));
1723
1724                    if (mVideoRenderer != NULL) {
1725                        mVideoRendererIsPreview = false;
1726                        initRenderer_l();
1727                    }
1728                    continue;
1729                }
1730                LOGV("PreviewPlayer: onVideoEvent EOS reached.");
1731                mFlags |= VIDEO_AT_EOS;
1732                postStreamDoneEvent_l(err);
1733                return OK;
1734            }
1735
1736            if (mVideoBuffer->range_length() == 0) {
1737                // Some decoders, notably the PV AVC software decoder
1738                // return spurious empty buffers that we just want to ignore.
1739
1740                mVideoBuffer->release();
1741                mVideoBuffer = NULL;
1742                continue;
1743            }
1744
1745            int64_t videoTimeUs;
1746            CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
1747
1748            if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
1749                // buffers are before begin cut time
1750                // ignore them
1751                //LOGI("PreviewPlayer: Ignoring buffers before begin cut time");
1752                mVideoBuffer->release();
1753                mVideoBuffer = NULL;
1754                continue;
1755            }
1756
1757            break;
1758        }
1759    }
1760
1761    int64_t timeUs;
1762    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
1763
1764    {
1765        Mutex::Autolock autoLock(mMiscStateLock);
1766        mVideoTimeUs = timeUs;
1767    }
1768
1769    mDecodedVideoTs = timeUs;
1770
1771    return OK;
1772
1773}
1774
1775}  // namespace android
1776