PreviewPlayer.cpp revision 4f4efef8357f4d8b23ccfb1b29db34175bf72627
1/*
2 * Copyright (C) 2011 NXP Software
3 * Copyright (C) 2011 The Android Open Source Project
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 *      http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18
19#define LOG_NDEBUG 1
20#define LOG_TAG "PreviewPlayer"
21#include <utils/Log.h>
22
23#include <dlfcn.h>
24
25#include "include/ARTSPController.h"
26#include "PreviewPlayer.h"
27#include "DummyAudioSource.h"
28#include "DummyVideoSource.h"
29#include "VideoEditorSRC.h"
30#include "include/LiveSession.h"
31#include "include/NuCachedSource2.h"
32#include "include/ThrottledSource.h"
33
34
35#include "PreviewRenderer.h"
36
37#include <binder/IPCThreadState.h>
38#include <media/stagefright/DataSource.h>
39#include <media/stagefright/FileSource.h>
40#include <media/stagefright/MediaBuffer.h>
41#include <media/stagefright/MediaDefs.h>
42#include <media/stagefright/MediaExtractor.h>
43#include <media/stagefright/MediaDebug.h>
44#include <media/stagefright/MediaSource.h>
45#include <media/stagefright/MetaData.h>
46#include <media/stagefright/OMXCodec.h>
47
48#include <surfaceflinger/Surface.h>
49#include <media/stagefright/foundation/ALooper.h>
50
51namespace android {
52
53
54struct PreviewPlayerEvent : public TimedEventQueue::Event {
55    PreviewPlayerEvent(
56            PreviewPlayer *player,
57            void (PreviewPlayer::*method)())
58        : mPlayer(player),
59          mMethod(method) {
60    }
61
62protected:
63    virtual ~PreviewPlayerEvent() {}
64
65    virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) {
66        (mPlayer->*mMethod)();
67    }
68
69private:
70    PreviewPlayer *mPlayer;
71    void (PreviewPlayer::*mMethod)();
72
73    PreviewPlayerEvent(const PreviewPlayerEvent &);
74    PreviewPlayerEvent &operator=(const PreviewPlayerEvent &);
75};
76
77
78struct PreviewLocalRenderer : public PreviewPlayerRenderer {
79
80    static PreviewLocalRenderer* initPreviewLocalRenderer (
81            bool previewOnly,
82            OMX_COLOR_FORMATTYPE colorFormat,
83            const sp<Surface> &surface,
84            size_t displayWidth, size_t displayHeight,
85            size_t decodedWidth, size_t decodedHeight,
86            int32_t rotationDegrees = 0)
87    {
88        PreviewLocalRenderer* mLocalRenderer = new
89            PreviewLocalRenderer(
90                previewOnly,
91                colorFormat,
92                surface,
93                displayWidth, displayHeight,
94                decodedWidth, decodedHeight,
95                rotationDegrees);
96
97        if ( mLocalRenderer->init(previewOnly,
98                 colorFormat, surface,
99                 displayWidth, displayHeight,
100                 decodedWidth, decodedHeight,
101                 rotationDegrees) != OK )
102        {
103            delete mLocalRenderer;
104            return NULL;
105        }
106        return mLocalRenderer;
107    }
108
109    virtual void render(MediaBuffer *buffer) {
110        render((const uint8_t *)buffer->data() + buffer->range_offset(),
111               buffer->range_length());
112    }
113
114    void render(const void *data, size_t size) {
115        mTarget->render(data, size, NULL);
116    }
117    void render() {
118        mTarget->renderYV12();
119    }
120    void getBuffer(uint8_t **data, size_t *stride) {
121        mTarget->getBufferYV12(data, stride);
122    }
123
124protected:
125    virtual ~PreviewLocalRenderer() {
126        delete mTarget;
127        mTarget = NULL;
128    }
129
130private:
131    PreviewRenderer *mTarget;
132
133    PreviewLocalRenderer(
134            bool previewOnly,
135            OMX_COLOR_FORMATTYPE colorFormat,
136            const sp<Surface> &surface,
137            size_t displayWidth, size_t displayHeight,
138            size_t decodedWidth, size_t decodedHeight,
139            int32_t rotationDegrees = 0)
140        : mTarget(NULL) {
141    }
142
143
144    int init(
145            bool previewOnly,
146            OMX_COLOR_FORMATTYPE colorFormat,
147            const sp<Surface> &surface,
148            size_t displayWidth, size_t displayHeight,
149            size_t decodedWidth, size_t decodedHeight,
150            int32_t rotationDegrees = 0);
151
152    PreviewLocalRenderer(const PreviewLocalRenderer &);
153    PreviewLocalRenderer &operator=(const PreviewLocalRenderer &);;
154};
155
156int PreviewLocalRenderer::init(
157        bool previewOnly,
158        OMX_COLOR_FORMATTYPE colorFormat,
159        const sp<Surface> &surface,
160        size_t displayWidth, size_t displayHeight,
161        size_t decodedWidth, size_t decodedHeight,
162        int32_t rotationDegrees) {
163
164    mTarget = PreviewRenderer::CreatePreviewRenderer (
165            colorFormat, surface, displayWidth, displayHeight,
166            decodedWidth, decodedHeight, rotationDegrees);
167    if (mTarget == M4OSA_NULL) {
168        return UNKNOWN_ERROR;
169    }
170    return OK;
171}
172
173PreviewPlayer::PreviewPlayer()
174    : AwesomePlayer(),
175      mFrameRGBBuffer(NULL),
176      mFrameYUVBuffer(NULL),
177      mReportedWidth(0),
178      mReportedHeight(0),
179      mCurrFramingEffectIndex(0) {
180
181    mVideoRenderer = NULL;
182    mLastVideoBuffer = NULL;
183    mSuspensionState = NULL;
184    mEffectsSettings = NULL;
185    mAudioMixStoryBoardTS = 0;
186    mCurrentMediaBeginCutTime = 0;
187    mCurrentMediaVolumeValue = 0;
188    mNumberEffects = 0;
189    mDecodedVideoTs = 0;
190    mDecVideoTsStoryBoard = 0;
191    mCurrentVideoEffect = VIDEO_EFFECT_NONE;
192    mProgressCbInterval = 0;
193    mNumberDecVideoFrames = 0;
194    mOverlayUpdateEventPosted = false;
195
196    mVideoEvent = new PreviewPlayerEvent(this, &PreviewPlayer::onVideoEvent);
197    mVideoEventPending = false;
198    mStreamDoneEvent = new PreviewPlayerEvent(this,
199         &AwesomePlayer::onStreamDone);
200
201    mStreamDoneEventPending = false;
202
203    mCheckAudioStatusEvent = new PreviewPlayerEvent(
204        this, &AwesomePlayer::onCheckAudioStatus);
205
206    mAudioStatusEventPending = false;
207
208    mProgressCbEvent = new PreviewPlayerEvent(this,
209         &PreviewPlayer::onProgressCbEvent);
210
211    mOverlayUpdateEvent = new PreviewPlayerEvent(this,
212        &PreviewPlayer::onUpdateOverlayEvent);
213    mProgressCbEventPending = false;
214
215    mOverlayUpdateEventPending = false;
216    mResizedVideoBuffer = NULL;
217    mVideoResizedOrCropped = false;
218    mRenderingMode = (M4xVSS_MediaRendering)MEDIA_RENDERING_INVALID;
219    mIsFiftiesEffectStarted = false;
220    reset();
221}
222
223PreviewPlayer::~PreviewPlayer() {
224
225    if (mQueueStarted) {
226        mQueue.stop();
227    }
228
229    reset();
230
231    if(mResizedVideoBuffer != NULL) {
232        M4OSA_free((M4OSA_MemAddr32)(mResizedVideoBuffer->data()));
233        mResizedVideoBuffer = NULL;
234    }
235
236    mVideoRenderer.clear();
237    mVideoRenderer = NULL;
238}
239
240void PreviewPlayer::cancelPlayerEvents(bool keepBufferingGoing) {
241    mQueue.cancelEvent(mVideoEvent->eventID());
242    mVideoEventPending = false;
243    mQueue.cancelEvent(mStreamDoneEvent->eventID());
244    mStreamDoneEventPending = false;
245    mQueue.cancelEvent(mCheckAudioStatusEvent->eventID());
246    mAudioStatusEventPending = false;
247
248    mQueue.cancelEvent(mProgressCbEvent->eventID());
249    mProgressCbEventPending = false;
250}
251
252status_t PreviewPlayer::setDataSource(
253        const char *uri, const KeyedVector<String8, String8> *headers) {
254    Mutex::Autolock autoLock(mLock);
255    return setDataSource_l(uri, headers);
256}
257
258status_t PreviewPlayer::setDataSource_l(
259        const char *uri, const KeyedVector<String8, String8> *headers) {
260    reset_l();
261
262    mUri = uri;
263
264    if (headers) {
265        mUriHeaders = *headers;
266    }
267
268    // The actual work will be done during preparation in the call to
269    // ::finishSetDataSource_l to avoid blocking the calling thread in
270    // setDataSource for any significant time.
271    return OK;
272}
273
274status_t PreviewPlayer::setDataSource_l(const sp<MediaExtractor> &extractor) {
275    bool haveAudio = false;
276    bool haveVideo = false;
277    for (size_t i = 0; i < extractor->countTracks(); ++i) {
278        sp<MetaData> meta = extractor->getTrackMetaData(i);
279
280        const char *mime;
281        CHECK(meta->findCString(kKeyMIMEType, &mime));
282
283        if (!haveVideo && !strncasecmp(mime, "video/", 6)) {
284            setVideoSource(extractor->getTrack(i));
285            haveVideo = true;
286        } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) {
287            setAudioSource(extractor->getTrack(i));
288            haveAudio = true;
289
290            if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
291                // Only do this for vorbis audio, none of the other audio
292                // formats even support this ringtone specific hack and
293                // retrieving the metadata on some extractors may turn out
294                // to be very expensive.
295                sp<MetaData> fileMeta = extractor->getMetaData();
296                int32_t loop;
297                if (fileMeta != NULL
298                        && fileMeta->findInt32(kKeyAutoLoop, &loop)
299                         && loop != 0) {
300                    mFlags |= AUTO_LOOPING;
301                }
302            }
303        }
304
305        if (haveAudio && haveVideo) {
306            break;
307        }
308    }
309
310    /* Add the support for Dummy audio*/
311    if( !haveAudio ){
312        LOGV("PreviewPlayer: setDataSource_l Dummyaudiocreation started");
313
314        mAudioTrack = DummyAudioSource::Create(32000, 2, 20000,
315                                              ((mPlayEndTimeMsec)*1000));
316        LOGV("PreviewPlayer: setDataSource_l Dummyauiosource created");
317        if(mAudioTrack != NULL) {
318            haveAudio = true;
319        }
320    }
321
322    if (!haveAudio && !haveVideo) {
323        return UNKNOWN_ERROR;
324    }
325
326    mExtractorFlags = extractor->flags();
327    return OK;
328}
329
330status_t PreviewPlayer::setDataSource_l_jpg() {
331    M4OSA_ERR err = M4NO_ERROR;
332    LOGV("PreviewPlayer: setDataSource_l_jpg started");
333
334    mAudioSource = DummyAudioSource::Create(32000, 2, 20000,
335                                          ((mPlayEndTimeMsec)*1000));
336    LOGV("PreviewPlayer: setDataSource_l_jpg Dummyaudiosource created");
337    if(mAudioSource != NULL) {
338        setAudioSource(mAudioSource);
339    }
340    status_t error = mAudioSource->start();
341    if (error != OK) {
342        LOGV("Error starting dummy audio source");
343        mAudioSource.clear();
344        return err;
345    }
346
347    mDurationUs = (mPlayEndTimeMsec - mPlayBeginTimeMsec)*1000;
348
349    mVideoSource = DummyVideoSource::Create(mVideoWidth, mVideoHeight,
350                                            mDurationUs, mUri);
351    mReportedWidth = mVideoWidth;
352    mReportedHeight = mVideoHeight;
353
354    setVideoSource(mVideoSource);
355    status_t err1 = mVideoSource->start();
356    if (err1 != OK) {
357        mVideoSource.clear();
358        return err;
359    }
360
361    mIsVideoSourceJpg = true;
362    return OK;
363}
364
365void PreviewPlayer::reset() {
366    Mutex::Autolock autoLock(mLock);
367    reset_l();
368}
369
370void PreviewPlayer::reset_l() {
371
372    if (mFlags & PREPARING) {
373        mFlags |= PREPARE_CANCELLED;
374    }
375
376    while (mFlags & PREPARING) {
377        mPreparedCondition.wait(mLock);
378    }
379
380    cancelPlayerEvents();
381    mAudioTrack.clear();
382    mVideoTrack.clear();
383
384    // Shutdown audio first, so that the respone to the reset request
385    // appears to happen instantaneously as far as the user is concerned
386    // If we did this later, audio would continue playing while we
387    // shutdown the video-related resources and the player appear to
388    // not be as responsive to a reset request.
389    if (mAudioPlayer == NULL && mAudioSource != NULL) {
390        // If we had an audio player, it would have effectively
391        // taken possession of the audio source and stopped it when
392        // _it_ is stopped. Otherwise this is still our responsibility.
393        mAudioSource->stop();
394    }
395    mAudioSource.clear();
396
397    mTimeSource = NULL;
398
399    delete mAudioPlayer;
400    mAudioPlayer = NULL;
401
402    if (mLastVideoBuffer) {
403        mLastVideoBuffer->release();
404        mLastVideoBuffer = NULL;
405    }
406
407    if (mVideoBuffer) {
408        mVideoBuffer->release();
409        mVideoBuffer = NULL;
410    }
411
412    if (mVideoSource != NULL) {
413        mVideoSource->stop();
414
415        // The following hack is necessary to ensure that the OMX
416        // component is completely released by the time we may try
417        // to instantiate it again.
418        wp<MediaSource> tmp = mVideoSource;
419        mVideoSource.clear();
420        while (tmp.promote() != NULL) {
421            usleep(1000);
422        }
423        IPCThreadState::self()->flushCommands();
424    }
425
426    mDurationUs = -1;
427    mFlags = 0;
428    mExtractorFlags = 0;
429    mVideoWidth = mVideoHeight = -1;
430    mTimeSourceDeltaUs = 0;
431    mVideoTimeUs = 0;
432
433    mSeeking = false;
434    mSeekNotificationSent = false;
435    mSeekTimeUs = 0;
436
437    mUri.setTo("");
438    mUriHeaders.clear();
439
440    mFileSource.clear();
441
442    delete mSuspensionState;
443    mSuspensionState = NULL;
444
445    mCurrentVideoEffect = VIDEO_EFFECT_NONE;
446    mIsVideoSourceJpg = false;
447    mFrameRGBBuffer = NULL;
448    if(mFrameYUVBuffer != NULL) {
449        M4OSA_free((M4OSA_MemAddr32)mFrameYUVBuffer);
450        mFrameYUVBuffer = NULL;
451    }
452}
453
454void PreviewPlayer::partial_reset_l() {
455
456    if (mLastVideoBuffer) {
457        mLastVideoBuffer->release();
458        mLastVideoBuffer = NULL;
459    }
460
461    /* call base struct */
462    AwesomePlayer::partial_reset_l();
463
464}
465
466status_t PreviewPlayer::play() {
467    Mutex::Autolock autoLock(mLock);
468
469    mFlags &= ~CACHE_UNDERRUN;
470
471    return play_l();
472}
473
474status_t PreviewPlayer::play_l() {
475VideoEditorAudioPlayer  *mVePlayer;
476    if (mFlags & PLAYING) {
477        return OK;
478    }
479    mStartNextPlayer = false;
480
481    if (!(mFlags & PREPARED)) {
482        status_t err = prepare_l();
483
484        if (err != OK) {
485            return err;
486        }
487    }
488
489    mFlags |= PLAYING;
490    mFlags |= FIRST_FRAME;
491
492    bool deferredAudioSeek = false;
493
494    if (mAudioSource != NULL) {
495        if (mAudioPlayer == NULL) {
496            if (mAudioSink != NULL) {
497
498                mAudioPlayer = new VideoEditorAudioPlayer(mAudioSink, this);
499                mVePlayer =
500                          (VideoEditorAudioPlayer*)mAudioPlayer;
501
502                mAudioPlayer->setSource(mAudioSource);
503
504                mVePlayer->setAudioMixSettings(
505                 mPreviewPlayerAudioMixSettings);
506
507                mVePlayer->setAudioMixPCMFileHandle(
508                 mAudioMixPCMFileHandle);
509
510                mVePlayer->setAudioMixStoryBoardSkimTimeStamp(
511                 mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
512                 mCurrentMediaVolumeValue);
513
514                // We've already started the MediaSource in order to enable
515                // the prefetcher to read its data.
516                status_t err = mVePlayer->start(
517                        true /* sourceAlreadyStarted */);
518
519                if (err != OK) {
520                    delete mAudioPlayer;
521                    mAudioPlayer = NULL;
522
523                    mFlags &= ~(PLAYING | FIRST_FRAME);
524                    return err;
525                }
526
527                mTimeSource = mVePlayer; //mAudioPlayer;
528
529                deferredAudioSeek = true;
530                mWatchForAudioSeekComplete = false;
531                mWatchForAudioEOS = true;
532            }
533        } else {
534            mVePlayer->resume();
535        }
536
537    }
538
539    if (mTimeSource == NULL && mAudioPlayer == NULL) {
540        mTimeSource = &mSystemTimeSource;
541    }
542
543    // Set the seek option for Image source files and read.
544    // This resets the timestamping for image play
545    if (mIsVideoSourceJpg) {
546        MediaSource::ReadOptions options;
547        MediaBuffer *aLocalBuffer;
548        options.setSeekTo(mSeekTimeUs);
549        mVideoSource->read(&aLocalBuffer, &options);
550    }
551
552    if (mVideoSource != NULL) {
553        // Kick off video playback
554        postVideoEvent_l();
555    }
556
557    if (deferredAudioSeek) {
558        // If there was a seek request while we were paused
559        // and we're just starting up again, honor the request now.
560        seekAudioIfNecessary_l();
561    }
562
563    if (mFlags & AT_EOS) {
564        // Legacy behaviour, if a stream finishes playing and then
565        // is started again, we play from the start...
566        seekTo_l(0);
567    }
568
569    return OK;
570}
571
572
573status_t PreviewPlayer::initRenderer_l() {
574    if (mSurface != NULL || mISurface != NULL) {
575        sp<MetaData> meta = mVideoSource->getFormat();
576
577        int32_t format;
578        const char *component;
579        int32_t decodedWidth, decodedHeight;
580        CHECK(meta->findInt32(kKeyColorFormat, &format));
581        CHECK(meta->findCString(kKeyDecoderComponent, &component));
582        CHECK(meta->findInt32(kKeyWidth, &decodedWidth));
583        CHECK(meta->findInt32(kKeyHeight, &decodedHeight));
584
585        // Must ensure that mVideoRenderer's destructor is actually executed
586        // before creating a new one.
587        IPCThreadState::self()->flushCommands();
588
589        // always use localrenderer since decoded buffers are modified
590        // by postprocessing module
591        // Other decoders are instantiated locally and as a consequence
592        // allocate their buffers in local address space.
593        if(mVideoRenderer == NULL) {
594
595            mVideoRenderer = PreviewLocalRenderer:: initPreviewLocalRenderer (
596                false,  // previewOnly
597                (OMX_COLOR_FORMATTYPE)format,
598                mSurface,
599                mOutputVideoWidth, mOutputVideoHeight,
600                mOutputVideoWidth, mOutputVideoHeight);
601
602            if ( mVideoRenderer == NULL )
603            {
604                return UNKNOWN_ERROR;
605            }
606            return OK;
607        }
608    }
609    return OK;
610}
611
612
613void PreviewPlayer::setISurface(const sp<ISurface> &isurface) {
614    Mutex::Autolock autoLock(mLock);
615    mISurface = isurface;
616}
617
618
619status_t PreviewPlayer::seekTo(int64_t timeUs) {
620
621    if ((mExtractorFlags & MediaExtractor::CAN_SEEK) || (mIsVideoSourceJpg)) {
622        Mutex::Autolock autoLock(mLock);
623        return seekTo_l(timeUs);
624    }
625
626    return OK;
627}
628
629
630status_t PreviewPlayer::getVideoDimensions(
631        int32_t *width, int32_t *height) const {
632    Mutex::Autolock autoLock(mLock);
633
634    if (mVideoWidth < 0 || mVideoHeight < 0) {
635        return UNKNOWN_ERROR;
636    }
637
638    *width = mVideoWidth;
639    *height = mVideoHeight;
640
641    return OK;
642}
643
644
645status_t PreviewPlayer::initAudioDecoder() {
646    sp<MetaData> meta = mAudioTrack->getFormat();
647    const char *mime;
648    CHECK(meta->findCString(kKeyMIMEType, &mime));
649
650    if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
651        mAudioSource = mAudioTrack;
652    } else {
653        sp<MediaSource> aRawSource;
654        aRawSource = OMXCodec::Create(
655                mClient.interface(), mAudioTrack->getFormat(),
656                false, // createEncoder
657                mAudioTrack);
658
659        if(aRawSource != NULL) {
660            LOGV("initAudioDecoder: new VideoEditorSRC");
661            mAudioSource = new VideoEditorSRC(aRawSource);
662        }
663    }
664
665    if (mAudioSource != NULL) {
666        int64_t durationUs;
667        if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
668            Mutex::Autolock autoLock(mMiscStateLock);
669            if (mDurationUs < 0 || durationUs > mDurationUs) {
670                mDurationUs = durationUs;
671            }
672        }
673        status_t err = mAudioSource->start();
674
675        if (err != OK) {
676            mAudioSource.clear();
677            return err;
678        }
679    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) {
680        // For legacy reasons we're simply going to ignore the absence
681        // of an audio decoder for QCELP instead of aborting playback
682        // altogether.
683        return OK;
684    }
685
686    return mAudioSource != NULL ? OK : UNKNOWN_ERROR;
687}
688
689
690status_t PreviewPlayer::initVideoDecoder(uint32_t flags) {
691
692    mVideoSource = OMXCodec::Create(
693            mClient.interface(), mVideoTrack->getFormat(),
694            false,
695            mVideoTrack,
696            NULL, flags);
697
698    if (mVideoSource != NULL) {
699        int64_t durationUs;
700        if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
701            Mutex::Autolock autoLock(mMiscStateLock);
702            if (mDurationUs < 0 || durationUs > mDurationUs) {
703                mDurationUs = durationUs;
704            }
705        }
706
707        CHECK(mVideoTrack->getFormat()->findInt32(kKeyWidth, &mVideoWidth));
708        CHECK(mVideoTrack->getFormat()->findInt32(kKeyHeight, &mVideoHeight));
709
710        mReportedWidth = mVideoWidth;
711        mReportedHeight = mVideoHeight;
712
713        status_t err = mVideoSource->start();
714
715        if (err != OK) {
716            mVideoSource.clear();
717            return err;
718        }
719    }
720
721    return mVideoSource != NULL ? OK : UNKNOWN_ERROR;
722}
723
724
725void PreviewPlayer::onVideoEvent() {
726    uint32_t i=0;
727    bool bAppliedVideoEffect = false;
728    M4OSA_ERR err1 = M4NO_ERROR;
729    int64_t imageFrameTimeUs = 0;
730
731    Mutex::Autolock autoLock(mLock);
732    if (!mVideoEventPending) {
733        // The event has been cancelled in reset_l() but had already
734        // been scheduled for execution at that time.
735        return;
736    }
737    mVideoEventPending = false;
738
739    if (mFlags & SEEK_PREVIEW) {
740        mFlags &= ~SEEK_PREVIEW;
741        return;
742    }
743
744    TimeSource *ts_st =  &mSystemTimeSource;
745    int64_t timeStartUs = ts_st->getRealTimeUs();
746
747    if (mSeeking) {
748        if (mLastVideoBuffer) {
749            mLastVideoBuffer->release();
750            mLastVideoBuffer = NULL;
751        }
752
753
754        if(mAudioSource != NULL) {
755
756            // We're going to seek the video source first, followed by
757            // the audio source.
758            // In order to avoid jumps in the DataSource offset caused by
759            // the audio codec prefetching data from the old locations
760            // while the video codec is already reading data from the new
761            // locations, we'll "pause" the audio source, causing it to
762            // stop reading input data until a subsequent seek.
763
764            if (mAudioPlayer != NULL) {
765                mAudioPlayer->pause();
766            }
767            mAudioSource->pause();
768        }
769    }
770
771    if (!mVideoBuffer) {
772        MediaSource::ReadOptions options;
773        if (mSeeking) {
774            LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
775                                                      mSeekTimeUs / 1E6);
776
777            options.setSeekTo(
778                    mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
779        }
780        for (;;) {
781            status_t err = mVideoSource->read(&mVideoBuffer, &options);
782            options.clearSeekTo();
783
784            if (err != OK) {
785                CHECK_EQ(mVideoBuffer, NULL);
786
787                if (err == INFO_FORMAT_CHANGED) {
788                    LOGV("LV PLAYER VideoSource signalled format change");
789                    notifyVideoSize_l();
790                    sp<MetaData> meta = mVideoSource->getFormat();
791
792                    CHECK(meta->findInt32(kKeyWidth, &mReportedWidth));
793                    CHECK(meta->findInt32(kKeyHeight, &mReportedHeight));
794                    if (mVideoRenderer != NULL) {
795                        mVideoRendererIsPreview = false;
796                        err = initRenderer_l();
797                           if ( err != OK )
798                        postStreamDoneEvent_l(err); // santosh
799
800                    }
801                    continue;
802                }
803                // So video playback is complete, but we may still have
804                // a seek request pending that needs to be applied to the audio track
805                if (mSeeking) {
806                    LOGV("video stream ended while seeking!");
807                }
808                finishSeekIfNecessary(-1);
809                LOGV("PreviewPlayer: onVideoEvent EOS reached.");
810                mFlags |= VIDEO_AT_EOS;
811                mOverlayUpdateEventPosted = false;
812                postStreamDoneEvent_l(err);
813                return;
814            }
815
816            if (mVideoBuffer->range_length() == 0) {
817                // Some decoders, notably the PV AVC software decoder
818                // return spurious empty buffers that we just want to ignore.
819
820                mVideoBuffer->release();
821                mVideoBuffer = NULL;
822                continue;
823            }
824
825            int64_t videoTimeUs;
826            CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
827
828            if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
829                // Frames are before begin cut time
830                // Donot render
831                mVideoBuffer->release();
832                mVideoBuffer = NULL;
833                continue;
834            }
835
836            break;
837        }
838    }
839
840    mNumberDecVideoFrames++;
841
842    int64_t timeUs;
843    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
844
845    {
846        Mutex::Autolock autoLock(mMiscStateLock);
847        mVideoTimeUs = timeUs;
848    }
849
850    mDecodedVideoTs = timeUs;
851
852    if(!mStartNextPlayer) {
853        int64_t playbackTimeRemaining = (mPlayEndTimeMsec*1000) - timeUs;
854        if(playbackTimeRemaining <= 1500000) {
855            //When less than 1.5 sec of playback left
856            // send notification to start next player
857
858            mStartNextPlayer = true;
859            notifyListener_l(0xAAAAAAAA);
860        }
861    }
862
863    bool wasSeeking = mSeeking;
864    finishSeekIfNecessary(timeUs);
865
866    TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
867
868    if(ts == NULL) {
869        mVideoBuffer->release();
870        mVideoBuffer = NULL;
871        return;
872    }
873
874    if(!mIsVideoSourceJpg) {
875        if (mFlags & FIRST_FRAME) {
876            mFlags &= ~FIRST_FRAME;
877
878            mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs;
879        }
880
881        int64_t realTimeUs, mediaTimeUs;
882        if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
883            && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {
884            mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
885        }
886
887        int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
888
889        int64_t latenessUs = nowUs - timeUs;
890
891        if (wasSeeking) {
892            // Let's display the first frame after seeking right away.
893            latenessUs = 0;
894        }
895        LOGV("Audio time stamp = %lld and video time stamp = %lld",
896                                            ts->getRealTimeUs(),timeUs);
897        if (latenessUs > 40000) {
898            // We're more than 40ms late.
899
900            LOGV("LV PLAYER we're late by %lld us (%.2f secs)",
901                                           latenessUs, latenessUs / 1E6);
902
903            mVideoBuffer->release();
904            mVideoBuffer = NULL;
905            postVideoEvent_l(0);
906            return;
907        }
908
909        if (latenessUs < -25000) {
910            // We're more than 25ms early.
911            LOGV("We're more than 25ms early, lateness %lld", latenessUs);
912
913            postVideoEvent_l(25000);
914            return;
915        }
916    }
917
918    if (mVideoRendererIsPreview || mVideoRenderer == NULL) {
919        mVideoRendererIsPreview = false;
920
921        status_t err = initRenderer_l();
922        if ( err != OK )
923        postStreamDoneEvent_l(err); // santosh
924    }
925
926    // If timestamp exceeds endCutTime of clip, donot render
927    if((timeUs/1000) > mPlayEndTimeMsec) {
928        if (mLastVideoBuffer) {
929            mLastVideoBuffer->release();
930            mLastVideoBuffer = NULL;
931        }
932        mLastVideoBuffer = mVideoBuffer;
933        mVideoBuffer = NULL;
934        mFlags |= VIDEO_AT_EOS;
935        mFlags |= AUDIO_AT_EOS;
936        LOGV("PreviewPlayer: onVideoEvent timeUs > mPlayEndTime; send EOS..");
937        mOverlayUpdateEventPosted = false;
938        postStreamDoneEvent_l(ERROR_END_OF_STREAM);
939        return;
940    }
941
942    // Post processing to apply video effects
943    for(i=0;i<mNumberEffects;i++) {
944        // First check if effect starttime matches the clip being previewed
945        if((mEffectsSettings[i].uiStartTime < (mDecVideoTsStoryBoard/1000)) ||
946        (mEffectsSettings[i].uiStartTime >=
947         ((mDecVideoTsStoryBoard/1000) + mPlayEndTimeMsec - mPlayBeginTimeMsec)))
948        {
949            // This effect doesn't belong to this clip, check next one
950            continue;
951        }
952        // Check if effect applies to this particular frame timestamp
953        if((mEffectsSettings[i].uiStartTime <=
954         (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec)) &&
955            ((mEffectsSettings[i].uiStartTime+mEffectsSettings[i].uiDuration) >=
956             (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec))
957              && (mEffectsSettings[i].uiDuration != 0)) {
958            setVideoPostProcessingNode(
959             mEffectsSettings[i].VideoEffectType, TRUE);
960        }
961        else {
962            setVideoPostProcessingNode(
963             mEffectsSettings[i].VideoEffectType, FALSE);
964        }
965    }
966
967    //Provide the overlay Update indication when there is an overlay effect
968    if (mCurrentVideoEffect & VIDEO_EFFECT_FRAMING) {
969        mCurrentVideoEffect &= ~VIDEO_EFFECT_FRAMING; //never apply framing here.
970        if (!mOverlayUpdateEventPosted) {
971            // Find the effect in effectSettings array
972            int index;
973            for (index = 0; index < mNumberEffects; index++) {
974                M4OSA_UInt32 timeMs = mDecodedVideoTs/1000;
975                M4OSA_UInt32 timeOffset = mDecVideoTsStoryBoard/1000;
976                if(mEffectsSettings[index].VideoEffectType ==
977                    M4xVSS_kVideoEffectType_Framing) {
978                    if (((mEffectsSettings[index].uiStartTime + 1) <=
979                        timeMs + timeOffset - mPlayBeginTimeMsec) &&
980                        ((mEffectsSettings[index].uiStartTime - 1 +
981                        mEffectsSettings[index].uiDuration) >=
982                        timeMs + timeOffset - mPlayBeginTimeMsec))
983                    {
984                        break;
985                    }
986                }
987            }
988            if (index < mNumberEffects) {
989                mCurrFramingEffectIndex = index;
990                mOverlayUpdateEventPosted = true;
991                postOverlayUpdateEvent_l();
992                LOGV("Framing index = %d", mCurrFramingEffectIndex);
993            } else {
994                LOGV("No framing effects found");
995            }
996        }
997
998    } else if (mOverlayUpdateEventPosted) {
999        //Post the event when the overlay is no more valid
1000        LOGV("Overlay is Done");
1001        mOverlayUpdateEventPosted = false;
1002        postOverlayUpdateEvent_l();
1003    }
1004
1005
1006    if (mCurrentVideoEffect != VIDEO_EFFECT_NONE) {
1007        err1 = doVideoPostProcessing();
1008        if(err1 != M4NO_ERROR) {
1009            LOGE("doVideoPostProcessing returned err");
1010            bAppliedVideoEffect = false;
1011        }
1012        else {
1013            bAppliedVideoEffect = true;
1014        }
1015    }
1016    else {
1017        bAppliedVideoEffect = false;
1018        if(mRenderingMode != MEDIA_RENDERING_INVALID) {
1019            // No effects to be applied, but media rendering to be done
1020            err1 = doMediaRendering();
1021            if(err1 != M4NO_ERROR) {
1022                LOGE("doMediaRendering returned err");
1023                //Use original mVideoBuffer for rendering
1024                mVideoResizedOrCropped = false;
1025            }
1026        }
1027    }
1028
1029    if (mVideoRenderer != NULL) {
1030        LOGV("mVideoRenderer CALL render()");
1031        mVideoRenderer->render();
1032    }
1033
1034    if (mLastVideoBuffer) {
1035        mLastVideoBuffer->release();
1036        mLastVideoBuffer = NULL;
1037    }
1038
1039    mLastVideoBuffer = mVideoBuffer;
1040    mVideoBuffer = NULL;
1041
1042    // Post progress callback based on callback interval set
1043    if(mNumberDecVideoFrames >= mProgressCbInterval) {
1044        postProgressCallbackEvent_l();
1045        mNumberDecVideoFrames = 0;  // reset counter
1046    }
1047
1048    // if reached EndCutTime of clip, post EOS event
1049    if((timeUs/1000) >= mPlayEndTimeMsec) {
1050        LOGV("PreviewPlayer: onVideoEvent EOS.");
1051        mFlags |= VIDEO_AT_EOS;
1052        mFlags |= AUDIO_AT_EOS;
1053        mOverlayUpdateEventPosted = false;
1054        postStreamDoneEvent_l(ERROR_END_OF_STREAM);
1055    }
1056    else {
1057        if(!mIsVideoSourceJpg) {
1058            postVideoEvent_l(0);
1059        }
1060        else {
1061            postVideoEvent_l(33000);
1062        }
1063    }
1064}
1065
1066status_t PreviewPlayer::prepare() {
1067    Mutex::Autolock autoLock(mLock);
1068    return prepare_l();
1069}
1070
1071status_t PreviewPlayer::prepare_l() {
1072    if (mFlags & PREPARED) {
1073        return OK;
1074    }
1075
1076    if (mFlags & PREPARING) {
1077        return UNKNOWN_ERROR;
1078    }
1079
1080    mIsAsyncPrepare = false;
1081    status_t err = prepareAsync_l();
1082
1083    if (err != OK) {
1084        return err;
1085    }
1086
1087    while (mFlags & PREPARING) {
1088        mPreparedCondition.wait(mLock);
1089    }
1090
1091    return mPrepareResult;
1092}
1093
1094status_t PreviewPlayer::prepareAsync_l() {
1095    if (mFlags & PREPARING) {
1096        return UNKNOWN_ERROR;  // async prepare already pending
1097    }
1098
1099    if (!mQueueStarted) {
1100        mQueue.start();
1101        mQueueStarted = true;
1102    }
1103
1104    mFlags |= PREPARING;
1105    mAsyncPrepareEvent = new PreviewPlayerEvent(
1106            this, &PreviewPlayer::onPrepareAsyncEvent);
1107
1108    mQueue.postEvent(mAsyncPrepareEvent);
1109
1110    return OK;
1111}
1112
1113status_t PreviewPlayer::finishSetDataSource_l() {
1114    sp<DataSource> dataSource;
1115    sp<MediaExtractor> extractor;
1116
1117    dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders);
1118
1119    if (dataSource == NULL) {
1120        return UNKNOWN_ERROR;
1121    }
1122
1123    //If file type is .rgb, then no need to check for Extractor
1124    int uriLen = strlen(mUri);
1125    int startOffset = uriLen - 4;
1126    if(!strncasecmp(mUri+startOffset, ".rgb", 4)) {
1127        extractor = NULL;
1128    }
1129    else {
1130        extractor = MediaExtractor::Create(dataSource,
1131                                        MEDIA_MIMETYPE_CONTAINER_MPEG4);
1132    }
1133
1134    if (extractor == NULL) {
1135        LOGV("PreviewPlayer::finishSetDataSource_l  extractor == NULL");
1136        return setDataSource_l_jpg();
1137    }
1138
1139    return setDataSource_l(extractor);
1140}
1141
1142
1143// static
1144bool PreviewPlayer::ContinuePreparation(void *cookie) {
1145    PreviewPlayer *me = static_cast<PreviewPlayer *>(cookie);
1146
1147    return (me->mFlags & PREPARE_CANCELLED) == 0;
1148}
1149
1150void PreviewPlayer::onPrepareAsyncEvent() {
1151    Mutex::Autolock autoLock(mLock);
1152    LOGV("onPrepareAsyncEvent");
1153
1154    if (mFlags & PREPARE_CANCELLED) {
1155        LOGV("LV PLAYER prepare was cancelled before doing anything");
1156        abortPrepare(UNKNOWN_ERROR);
1157        return;
1158    }
1159
1160    if (mUri.size() > 0) {
1161        status_t err = finishSetDataSource_l();
1162
1163        if (err != OK) {
1164            abortPrepare(err);
1165            return;
1166        }
1167    }
1168
1169    if (mVideoTrack != NULL && mVideoSource == NULL) {
1170        status_t err = initVideoDecoder(OMXCodec::kHardwareCodecsOnly);
1171
1172        if (err != OK) {
1173            abortPrepare(err);
1174            return;
1175        }
1176    }
1177
1178    if (mAudioTrack != NULL && mAudioSource == NULL) {
1179        status_t err = initAudioDecoder();
1180
1181        if (err != OK) {
1182            abortPrepare(err);
1183            return;
1184        }
1185    }
1186    finishAsyncPrepare_l();
1187
1188}
1189
1190void PreviewPlayer::finishAsyncPrepare_l() {
1191    if (mIsAsyncPrepare) {
1192        if (mVideoSource == NULL) {
1193            LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE 0 0 ");
1194            notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0);
1195        } else {
1196            LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE");
1197            notifyVideoSize_l();
1198        }
1199        LOGV("finishAsyncPrepare_l: MEDIA_PREPARED");
1200        notifyListener_l(MEDIA_PREPARED);
1201    }
1202
1203    mPrepareResult = OK;
1204    mFlags &= ~(PREPARING|PREPARE_CANCELLED);
1205    mFlags |= PREPARED;
1206    mAsyncPrepareEvent = NULL;
1207    mPreparedCondition.broadcast();
1208}
1209
1210status_t PreviewPlayer::suspend() {
1211    LOGV("suspend");
1212    Mutex::Autolock autoLock(mLock);
1213
1214    if (mSuspensionState != NULL) {
1215        if (mLastVideoBuffer == NULL) {
1216            //go into here if video is suspended again
1217            //after resuming without being played between
1218            //them
1219            SuspensionState *state = mSuspensionState;
1220            mSuspensionState = NULL;
1221            reset_l();
1222            mSuspensionState = state;
1223            return OK;
1224        }
1225
1226        delete mSuspensionState;
1227        mSuspensionState = NULL;
1228    }
1229
1230    if (mFlags & PREPARING) {
1231        mFlags |= PREPARE_CANCELLED;
1232    }
1233
1234    while (mFlags & PREPARING) {
1235        mPreparedCondition.wait(mLock);
1236    }
1237
1238    SuspensionState *state = new SuspensionState;
1239    state->mUri = mUri;
1240    state->mUriHeaders = mUriHeaders;
1241    state->mFileSource = mFileSource;
1242
1243    state->mFlags = mFlags & (PLAYING | AUTO_LOOPING | LOOPING | AT_EOS);
1244    getPosition(&state->mPositionUs);
1245
1246    if (mLastVideoBuffer) {
1247        size_t size = mLastVideoBuffer->range_length();
1248        if (size) {
1249            int32_t unreadable;
1250            if (!mLastVideoBuffer->meta_data()->findInt32(
1251                        kKeyIsUnreadable, &unreadable)
1252                    || unreadable == 0) {
1253                state->mLastVideoFrameSize = size;
1254                state->mLastVideoFrame = malloc(size);
1255                memcpy(state->mLastVideoFrame,
1256                   (const uint8_t *)mLastVideoBuffer->data()
1257                        + mLastVideoBuffer->range_offset(),
1258                   size);
1259
1260                state->mVideoWidth = mVideoWidth;
1261                state->mVideoHeight = mVideoHeight;
1262
1263                sp<MetaData> meta = mVideoSource->getFormat();
1264                CHECK(meta->findInt32(kKeyColorFormat, &state->mColorFormat));
1265                CHECK(meta->findInt32(kKeyWidth, &state->mDecodedWidth));
1266                CHECK(meta->findInt32(kKeyHeight, &state->mDecodedHeight));
1267            } else {
1268                LOGV("Unable to save last video frame, we have no access to "
1269                     "the decoded video data.");
1270            }
1271        }
1272    }
1273
1274    reset_l();
1275
1276    mSuspensionState = state;
1277
1278    return OK;
1279}
1280
1281status_t PreviewPlayer::resume() {
1282    LOGV("resume");
1283    Mutex::Autolock autoLock(mLock);
1284
1285    if (mSuspensionState == NULL) {
1286        return INVALID_OPERATION;
1287    }
1288
1289    SuspensionState *state = mSuspensionState;
1290    mSuspensionState = NULL;
1291
1292    status_t err;
1293    if (state->mFileSource != NULL) {
1294        err = AwesomePlayer::setDataSource_l(state->mFileSource);
1295
1296        if (err == OK) {
1297            mFileSource = state->mFileSource;
1298        }
1299    } else {
1300        err = AwesomePlayer::setDataSource_l(state->mUri, &state->mUriHeaders);
1301    }
1302
1303    if (err != OK) {
1304        delete state;
1305        state = NULL;
1306
1307        return err;
1308    }
1309
1310    seekTo_l(state->mPositionUs);
1311
1312    mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS);
1313
1314    if (state->mLastVideoFrame && (mSurface != NULL || mISurface != NULL)) {
1315        mVideoRenderer =
1316            PreviewLocalRenderer::initPreviewLocalRenderer(
1317                    true,  // previewOnly
1318                    (OMX_COLOR_FORMATTYPE)state->mColorFormat,
1319                    mSurface,
1320                    state->mVideoWidth,
1321                    state->mVideoHeight,
1322                    state->mDecodedWidth,
1323                    state->mDecodedHeight);
1324
1325        mVideoRendererIsPreview = true;
1326
1327        ((PreviewLocalRenderer *)mVideoRenderer.get())->render(
1328                state->mLastVideoFrame, state->mLastVideoFrameSize);
1329    }
1330
1331    if (state->mFlags & PLAYING) {
1332        play_l();
1333    }
1334
1335    mSuspensionState = state;
1336    state = NULL;
1337
1338    return OK;
1339}
1340
1341
1342status_t PreviewPlayer::loadEffectsSettings(
1343                    M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) {
1344    M4OSA_UInt32 i = 0, rgbSize = 0;
1345    M4VIFI_UInt8 *tmp = M4OSA_NULL;
1346
1347    mNumberEffects = nEffects;
1348    mEffectsSettings = pEffectSettings;
1349    return OK;
1350}
1351
1352status_t PreviewPlayer::loadAudioMixSettings(
1353                    M4xVSS_AudioMixingSettings* pAudioMixSettings) {
1354
1355    LOGV("PreviewPlayer: loadAudioMixSettings: ");
1356    mPreviewPlayerAudioMixSettings = pAudioMixSettings;
1357    return OK;
1358}
1359
1360status_t PreviewPlayer::setAudioMixPCMFileHandle(
1361                    M4OSA_Context pAudioMixPCMFileHandle) {
1362
1363    LOGV("PreviewPlayer: setAudioMixPCMFileHandle: ");
1364    mAudioMixPCMFileHandle = pAudioMixPCMFileHandle;
1365    return OK;
1366}
1367
1368status_t PreviewPlayer::setAudioMixStoryBoardParam(
1369                    M4OSA_UInt32 audioMixStoryBoardTS,
1370                    M4OSA_UInt32 currentMediaBeginCutTime,
1371                    M4OSA_UInt32 primaryTrackVolValue ) {
1372
1373    mAudioMixStoryBoardTS = audioMixStoryBoardTS;
1374    mCurrentMediaBeginCutTime = currentMediaBeginCutTime;
1375    mCurrentMediaVolumeValue = primaryTrackVolValue;
1376    return OK;
1377}
1378
1379status_t PreviewPlayer::setPlaybackBeginTime(uint32_t msec) {
1380
1381    mPlayBeginTimeMsec = msec;
1382    return OK;
1383}
1384
1385status_t PreviewPlayer::setPlaybackEndTime(uint32_t msec) {
1386
1387    mPlayEndTimeMsec = msec;
1388    return OK;
1389}
1390
1391status_t PreviewPlayer::setStoryboardStartTime(uint32_t msec) {
1392
1393    mStoryboardStartTimeMsec = msec;
1394    mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000;
1395    return OK;
1396}
1397
1398status_t PreviewPlayer::setProgressCallbackInterval(uint32_t cbInterval) {
1399
1400    mProgressCbInterval = cbInterval;
1401    return OK;
1402}
1403
1404
1405status_t PreviewPlayer::setMediaRenderingMode(
1406        M4xVSS_MediaRendering mode,
1407        M4VIDEOEDITING_VideoFrameSize outputVideoSize) {
1408
1409    mRenderingMode = mode;
1410
1411    /* reset boolean for each clip*/
1412    mVideoResizedOrCropped = false;
1413
1414    switch(outputVideoSize) {
1415        case M4VIDEOEDITING_kSQCIF:
1416            mOutputVideoWidth = 128;
1417            mOutputVideoHeight = 96;
1418            break;
1419
1420        case M4VIDEOEDITING_kQQVGA:
1421            mOutputVideoWidth = 160;
1422            mOutputVideoHeight = 120;
1423            break;
1424
1425        case M4VIDEOEDITING_kQCIF:
1426            mOutputVideoWidth = 176;
1427            mOutputVideoHeight = 144;
1428            break;
1429
1430        case M4VIDEOEDITING_kQVGA:
1431            mOutputVideoWidth = 320;
1432            mOutputVideoHeight = 240;
1433            break;
1434
1435        case M4VIDEOEDITING_kCIF:
1436            mOutputVideoWidth = 352;
1437            mOutputVideoHeight = 288;
1438            break;
1439
1440        case M4VIDEOEDITING_kVGA:
1441            mOutputVideoWidth = 640;
1442            mOutputVideoHeight = 480;
1443            break;
1444
1445        case M4VIDEOEDITING_kWVGA:
1446            mOutputVideoWidth = 800;
1447            mOutputVideoHeight = 480;
1448            break;
1449
1450        case M4VIDEOEDITING_kNTSC:
1451            mOutputVideoWidth = 720;
1452            mOutputVideoHeight = 480;
1453            break;
1454
1455        case M4VIDEOEDITING_k640_360:
1456            mOutputVideoWidth = 640;
1457            mOutputVideoHeight = 360;
1458            break;
1459
1460        case M4VIDEOEDITING_k854_480:
1461            mOutputVideoWidth = 854;
1462            mOutputVideoHeight = 480;
1463            break;
1464
1465        case M4VIDEOEDITING_kHD1280:
1466            mOutputVideoWidth = 1280;
1467            mOutputVideoHeight = 720;
1468            break;
1469
1470        case M4VIDEOEDITING_kHD1080:
1471            mOutputVideoWidth = 1080;
1472            mOutputVideoHeight = 720;
1473            break;
1474
1475        case M4VIDEOEDITING_kHD960:
1476            mOutputVideoWidth = 960;
1477            mOutputVideoHeight = 720;
1478            break;
1479
1480        default:
1481            LOGE("unsupported output video size set");
1482            return BAD_VALUE;
1483    }
1484
1485    return OK;
1486}
1487
1488M4OSA_ERR PreviewPlayer::doMediaRendering() {
1489    M4OSA_ERR err = M4NO_ERROR;
1490    M4VIFI_ImagePlane planeIn[3], planeOut[3];
1491    M4VIFI_UInt8 *inBuffer = M4OSA_NULL, *finalOutputBuffer = M4OSA_NULL;
1492    M4VIFI_UInt8 *tempOutputBuffer= M4OSA_NULL;
1493    size_t videoBufferSize = 0;
1494    M4OSA_UInt32 frameSize = 0, i=0, index =0, nFrameCount =0, bufferOffset =0;
1495    int32_t colorFormat = 0;
1496
1497    if(!mIsVideoSourceJpg) {
1498        sp<MetaData> meta = mVideoSource->getFormat();
1499        CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
1500    }
1501    else {
1502        colorFormat = OMX_COLOR_FormatYUV420Planar;
1503    }
1504
1505    videoBufferSize = mVideoBuffer->size();
1506    frameSize = (mVideoWidth*mVideoHeight*3) >> 1;
1507
1508    uint8_t* outBuffer;
1509    size_t outBufferStride = 0;
1510
1511    mVideoRenderer->getBuffer(&outBuffer, &outBufferStride);
1512
1513    bufferOffset = index*frameSize;
1514    inBuffer = (M4OSA_UInt8 *)mVideoBuffer->data()+
1515                mVideoBuffer->range_offset()+bufferOffset;
1516
1517
1518    /* In plane*/
1519    prepareYUV420ImagePlane(planeIn, mVideoWidth,
1520      mVideoHeight, (M4VIFI_UInt8 *)inBuffer, mReportedWidth, mReportedHeight);
1521
1522    // Set the output YUV420 plane to be compatible with YV12 format
1523    // W & H even
1524    // YVU instead of YUV
1525    // align buffers on 32 bits
1526
1527    //In YV12 format, sizes must be even
1528    M4OSA_UInt32 yv12PlaneWidth = ((mOutputVideoWidth +1)>>1)<<1;
1529    M4OSA_UInt32 yv12PlaneHeight = ((mOutputVideoHeight+1)>>1)<<1;
1530
1531    prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,
1532     (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer);
1533
1534
1535    err = applyRenderingMode(planeIn, planeOut, mRenderingMode);
1536
1537    if(err != M4NO_ERROR)
1538    {
1539        LOGE("doMediaRendering: applyRenderingMode returned err=0x%x", err);
1540        return err;
1541    }
1542    mVideoResizedOrCropped = true;
1543
1544    return err;
1545}
1546
1547status_t PreviewPlayer::resetJniCallbackTimeStamp() {
1548
1549    mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000;
1550    return OK;
1551}
1552
1553void PreviewPlayer::postProgressCallbackEvent_l() {
1554    if (mProgressCbEventPending) {
1555        return;
1556    }
1557    mProgressCbEventPending = true;
1558
1559    mQueue.postEvent(mProgressCbEvent);
1560}
1561
1562
1563void PreviewPlayer::onProgressCbEvent() {
1564    Mutex::Autolock autoLock(mLock);
1565    if (!mProgressCbEventPending) {
1566        return;
1567    }
1568    mProgressCbEventPending = false;
1569    // If playback starts from previous I-frame,
1570    // then send frame storyboard duration
1571    if((mDecodedVideoTs/1000) < mPlayBeginTimeMsec) {
1572        notifyListener_l(MEDIA_INFO, 0, mDecVideoTsStoryBoard/1000);
1573    }
1574    else {
1575        notifyListener_l(MEDIA_INFO, 0,
1576        (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec));
1577    }
1578}
1579
1580void PreviewPlayer::postOverlayUpdateEvent_l() {
1581    if (mOverlayUpdateEventPending) {
1582        return;
1583    }
1584    mOverlayUpdateEventPending = true;
1585    mQueue.postEvent(mOverlayUpdateEvent);
1586}
1587
1588void PreviewPlayer::onUpdateOverlayEvent() {
1589    Mutex::Autolock autoLock(mLock);
1590
1591    if (!mOverlayUpdateEventPending) {
1592        return;
1593    }
1594    mOverlayUpdateEventPending = false;
1595
1596    int updateState;
1597    if (mOverlayUpdateEventPosted) {
1598        updateState = 1;
1599    } else {
1600        updateState = 0;
1601    }
1602    notifyListener_l(0xBBBBBBBB, updateState, mCurrFramingEffectIndex);
1603}
1604
1605
1606void PreviewPlayer::setVideoPostProcessingNode(
1607                    M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable) {
1608
1609    uint32_t effect = VIDEO_EFFECT_NONE;
1610
1611    //Map M4VSS3GPP_VideoEffectType to local enum
1612    switch(type) {
1613        case M4VSS3GPP_kVideoEffectType_FadeFromBlack:
1614            effect = VIDEO_EFFECT_FADEFROMBLACK;
1615            break;
1616
1617        case M4VSS3GPP_kVideoEffectType_FadeToBlack:
1618            effect = VIDEO_EFFECT_FADETOBLACK;
1619            break;
1620
1621        case M4VSS3GPP_kVideoEffectType_CurtainOpening:
1622            effect = VIDEO_EFFECT_CURTAINOPEN;
1623            break;
1624
1625        case M4VSS3GPP_kVideoEffectType_CurtainClosing:
1626            effect = VIDEO_EFFECT_CURTAINCLOSE;
1627            break;
1628
1629        case M4xVSS_kVideoEffectType_BlackAndWhite:
1630            effect = VIDEO_EFFECT_BLACKANDWHITE;
1631            break;
1632
1633        case M4xVSS_kVideoEffectType_Pink:
1634            effect = VIDEO_EFFECT_PINK;
1635            break;
1636
1637        case M4xVSS_kVideoEffectType_Green:
1638            effect = VIDEO_EFFECT_GREEN;
1639            break;
1640
1641        case M4xVSS_kVideoEffectType_Sepia:
1642            effect = VIDEO_EFFECT_SEPIA;
1643            break;
1644
1645        case M4xVSS_kVideoEffectType_Negative:
1646            effect = VIDEO_EFFECT_NEGATIVE;
1647            break;
1648
1649        case M4xVSS_kVideoEffectType_Framing:
1650            effect = VIDEO_EFFECT_FRAMING;
1651            break;
1652
1653        case M4xVSS_kVideoEffectType_Fifties:
1654            effect = VIDEO_EFFECT_FIFTIES;
1655            break;
1656
1657        case M4xVSS_kVideoEffectType_ColorRGB16:
1658            effect = VIDEO_EFFECT_COLOR_RGB16;
1659            break;
1660
1661        case M4xVSS_kVideoEffectType_Gradient:
1662            effect = VIDEO_EFFECT_GRADIENT;
1663            break;
1664
1665        default:
1666            effect = VIDEO_EFFECT_NONE;
1667            break;
1668    }
1669
1670    if(enable == M4OSA_TRUE) {
1671        //If already set, then no need to set again
1672        if(!(mCurrentVideoEffect & effect)) {
1673            mCurrentVideoEffect |= effect;
1674            if(effect == VIDEO_EFFECT_FIFTIES) {
1675                mIsFiftiesEffectStarted = true;
1676            }
1677        }
1678    }
1679    else  {
1680        //Reset only if already set
1681        if(mCurrentVideoEffect & effect) {
1682            mCurrentVideoEffect &= ~effect;
1683        }
1684    }
1685}
1686
1687status_t PreviewPlayer::setImageClipProperties(uint32_t width,uint32_t height) {
1688    mVideoWidth = width;
1689    mVideoHeight = height;
1690    return OK;
1691}
1692
1693
1694M4OSA_ERR PreviewPlayer::doVideoPostProcessing() {
1695    M4OSA_ERR err = M4NO_ERROR;
1696    vePostProcessParams postProcessParams;
1697    int32_t colorFormat = 0;
1698
1699
1700    if(!mIsVideoSourceJpg) {
1701        sp<MetaData> meta = mVideoSource->getFormat();
1702        CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
1703    }
1704    else {
1705        colorFormat = OMX_COLOR_FormatYUV420Planar;
1706    }
1707
1708    if((colorFormat == OMX_COLOR_FormatYUV420SemiPlanar) ||
1709       (colorFormat == 0x7FA30C00)) {
1710          LOGE("doVideoPostProcessing: colorFormat YUV420Sp not supported");
1711          return M4ERR_UNSUPPORTED_MEDIA_TYPE;
1712    }
1713
1714    postProcessParams.vidBuffer = (M4VIFI_UInt8*)mVideoBuffer->data()
1715        + mVideoBuffer->range_offset();
1716
1717    postProcessParams.videoWidth = mVideoWidth;
1718    postProcessParams.videoHeight = mVideoHeight;
1719    postProcessParams.timeMs = mDecodedVideoTs/1000;
1720    postProcessParams.timeOffset = mDecVideoTsStoryBoard/1000;
1721    postProcessParams.effectsSettings = mEffectsSettings;
1722    postProcessParams.numberEffects = mNumberEffects;
1723    postProcessParams.outVideoWidth = mOutputVideoWidth;
1724    postProcessParams.outVideoHeight = mOutputVideoHeight;
1725    postProcessParams.currentVideoEffect = mCurrentVideoEffect;
1726    postProcessParams.renderingMode = mRenderingMode;
1727    if(mIsFiftiesEffectStarted == M4OSA_TRUE) {
1728        postProcessParams.isFiftiesEffectStarted = M4OSA_TRUE;
1729        mIsFiftiesEffectStarted = M4OSA_FALSE;
1730    }
1731    else {
1732       postProcessParams.isFiftiesEffectStarted = M4OSA_FALSE;
1733    }
1734
1735    postProcessParams.overlayFrameRGBBuffer = mFrameRGBBuffer;
1736    postProcessParams.overlayFrameYUVBuffer = mFrameYUVBuffer;
1737    mVideoRenderer->getBuffer(&(postProcessParams.pOutBuffer), &(postProcessParams.outBufferStride));
1738    err = applyEffectsAndRenderingMode(&postProcessParams, mReportedWidth, mReportedHeight);
1739
1740    return err;
1741}
1742
1743status_t PreviewPlayer::readFirstVideoFrame() {
1744    LOGV("PreviewPlayer::readFirstVideoFrame");
1745
1746    if (!mVideoBuffer) {
1747        MediaSource::ReadOptions options;
1748        if (mSeeking) {
1749            LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
1750                    mSeekTimeUs / 1E6);
1751
1752            options.setSeekTo(
1753                    mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
1754        }
1755        for (;;) {
1756            status_t err = mVideoSource->read(&mVideoBuffer, &options);
1757            options.clearSeekTo();
1758
1759            if (err != OK) {
1760                CHECK_EQ(mVideoBuffer, NULL);
1761
1762                if (err == INFO_FORMAT_CHANGED) {
1763                    LOGV("LV PLAYER VideoSource signalled format change");
1764                    notifyVideoSize_l();
1765                    sp<MetaData> meta = mVideoSource->getFormat();
1766
1767                    CHECK(meta->findInt32(kKeyWidth, &mReportedWidth));
1768                    CHECK(meta->findInt32(kKeyHeight, &mReportedHeight));
1769
1770                    if (mVideoRenderer != NULL) {
1771                        mVideoRendererIsPreview = false;
1772                        err = initRenderer_l();
1773                        if ( err != OK )
1774                                postStreamDoneEvent_l(err); // santosh
1775                    }
1776                    continue;
1777                }
1778                LOGV("PreviewPlayer: onVideoEvent EOS reached.");
1779                mFlags |= VIDEO_AT_EOS;
1780                postStreamDoneEvent_l(err);
1781                return OK;
1782            }
1783
1784            if (mVideoBuffer->range_length() == 0) {
1785                // Some decoders, notably the PV AVC software decoder
1786                // return spurious empty buffers that we just want to ignore.
1787
1788                mVideoBuffer->release();
1789                mVideoBuffer = NULL;
1790                continue;
1791            }
1792
1793            int64_t videoTimeUs;
1794            CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
1795
1796            if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
1797                // buffers are before begin cut time
1798                // ignore them
1799                mVideoBuffer->release();
1800                mVideoBuffer = NULL;
1801                continue;
1802            }
1803
1804            break;
1805        }
1806    }
1807
1808    int64_t timeUs;
1809    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
1810
1811    {
1812        Mutex::Autolock autoLock(mMiscStateLock);
1813        mVideoTimeUs = timeUs;
1814    }
1815
1816    mDecodedVideoTs = timeUs;
1817
1818    return OK;
1819
1820}
1821
1822}  // namespace android
1823