PreviewPlayer.cpp revision 408e25b2508d97f7ae0b39acf46ab42b7c223c44
1/*
2 * Copyright (C) 2011 NXP Software
3 * Copyright (C) 2011 The Android Open Source Project
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 *      http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18
19#define LOG_NDEBUG 1
20#define LOG_TAG "PreviewPlayer"
21#include <utils/Log.h>
22
23#include <dlfcn.h>
24
25#include "include/ARTSPController.h"
26#include "PreviewPlayer.h"
27#include "DummyAudioSource.h"
28#include "DummyVideoSource.h"
29#include "VideoEditorSRC.h"
30#include "include/NuCachedSource2.h"
31#include "include/ThrottledSource.h"
32
33
34#include "PreviewRenderer.h"
35
36#include <binder/IPCThreadState.h>
37#include <media/stagefright/DataSource.h>
38#include <media/stagefright/FileSource.h>
39#include <media/stagefright/MediaBuffer.h>
40#include <media/stagefright/MediaDefs.h>
41#include <media/stagefright/MediaExtractor.h>
42#include <media/stagefright/MediaDebug.h>
43#include <media/stagefright/MediaSource.h>
44#include <media/stagefright/MetaData.h>
45#include <media/stagefright/OMXCodec.h>
46
47#include <surfaceflinger/Surface.h>
48#include <media/stagefright/foundation/ALooper.h>
49
50namespace android {
51
52
53struct PreviewPlayerEvent : public TimedEventQueue::Event {
54    PreviewPlayerEvent(
55            PreviewPlayer *player,
56            void (PreviewPlayer::*method)())
57        : mPlayer(player),
58          mMethod(method) {
59    }
60
61protected:
62    virtual ~PreviewPlayerEvent() {}
63
64    virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) {
65        (mPlayer->*mMethod)();
66    }
67
68private:
69    PreviewPlayer *mPlayer;
70    void (PreviewPlayer::*mMethod)();
71
72    PreviewPlayerEvent(const PreviewPlayerEvent &);
73    PreviewPlayerEvent &operator=(const PreviewPlayerEvent &);
74};
75
76
77struct PreviewLocalRenderer : public PreviewPlayerRenderer {
78
79    static PreviewLocalRenderer* initPreviewLocalRenderer (
80            bool previewOnly,
81            OMX_COLOR_FORMATTYPE colorFormat,
82            const sp<Surface> &surface,
83            size_t displayWidth, size_t displayHeight,
84            size_t decodedWidth, size_t decodedHeight,
85            int32_t rotationDegrees = 0)
86    {
87        PreviewLocalRenderer* mLocalRenderer = new
88            PreviewLocalRenderer(
89                previewOnly,
90                colorFormat,
91                surface,
92                displayWidth, displayHeight,
93                decodedWidth, decodedHeight,
94                rotationDegrees);
95
96        if ( mLocalRenderer->init(previewOnly,
97                 colorFormat, surface,
98                 displayWidth, displayHeight,
99                 decodedWidth, decodedHeight,
100                 rotationDegrees) != OK )
101        {
102            delete mLocalRenderer;
103            return NULL;
104        }
105        return mLocalRenderer;
106    }
107
108    virtual void render(MediaBuffer *buffer) {
109        render((const uint8_t *)buffer->data() + buffer->range_offset(),
110               buffer->range_length());
111    }
112
113    void render(const void *data, size_t size) {
114        mTarget->render(data, size, NULL);
115    }
116    void render() {
117        mTarget->renderYV12();
118    }
119    void getBuffer(uint8_t **data, size_t *stride) {
120        mTarget->getBufferYV12(data, stride);
121    }
122
123protected:
124    virtual ~PreviewLocalRenderer() {
125        delete mTarget;
126        mTarget = NULL;
127    }
128
129private:
130    PreviewRenderer *mTarget;
131
132    PreviewLocalRenderer(
133            bool previewOnly,
134            OMX_COLOR_FORMATTYPE colorFormat,
135            const sp<Surface> &surface,
136            size_t displayWidth, size_t displayHeight,
137            size_t decodedWidth, size_t decodedHeight,
138            int32_t rotationDegrees = 0)
139        : mTarget(NULL) {
140    }
141
142
143    int init(
144            bool previewOnly,
145            OMX_COLOR_FORMATTYPE colorFormat,
146            const sp<Surface> &surface,
147            size_t displayWidth, size_t displayHeight,
148            size_t decodedWidth, size_t decodedHeight,
149            int32_t rotationDegrees = 0);
150
151    PreviewLocalRenderer(const PreviewLocalRenderer &);
152    PreviewLocalRenderer &operator=(const PreviewLocalRenderer &);;
153};
154
155int PreviewLocalRenderer::init(
156        bool previewOnly,
157        OMX_COLOR_FORMATTYPE colorFormat,
158        const sp<Surface> &surface,
159        size_t displayWidth, size_t displayHeight,
160        size_t decodedWidth, size_t decodedHeight,
161        int32_t rotationDegrees) {
162
163    mTarget = PreviewRenderer::CreatePreviewRenderer (
164            colorFormat, surface, displayWidth, displayHeight,
165            decodedWidth, decodedHeight, rotationDegrees);
166    if (mTarget == M4OSA_NULL) {
167        return UNKNOWN_ERROR;
168    }
169    return OK;
170}
171
172PreviewPlayer::PreviewPlayer()
173    : AwesomePlayer(),
174      mCurrFramingEffectIndex(0)   ,
175      mReportedWidth(0),
176      mReportedHeight(0),
177      mFrameRGBBuffer(NULL),
178      mFrameYUVBuffer(NULL){
179
180    mVideoRenderer = NULL;
181    mLastVideoBuffer = NULL;
182    mSuspensionState = NULL;
183    mEffectsSettings = NULL;
184    mVeAudioPlayer = NULL;
185    mAudioMixStoryBoardTS = 0;
186    mCurrentMediaBeginCutTime = 0;
187    mCurrentMediaVolumeValue = 0;
188    mNumberEffects = 0;
189    mDecodedVideoTs = 0;
190    mDecVideoTsStoryBoard = 0;
191    mCurrentVideoEffect = VIDEO_EFFECT_NONE;
192    mProgressCbInterval = 0;
193    mNumberDecVideoFrames = 0;
194    mOverlayUpdateEventPosted = false;
195    mIsChangeSourceRequired = true;
196
197    mVideoEvent = new PreviewPlayerEvent(this, &PreviewPlayer::onVideoEvent);
198    mVideoEventPending = false;
199    mStreamDoneEvent = new PreviewPlayerEvent(this,
200         &PreviewPlayer::onStreamDone);
201
202    mStreamDoneEventPending = false;
203
204    mCheckAudioStatusEvent = new PreviewPlayerEvent(
205        this, &AwesomePlayer::onCheckAudioStatus);
206
207    mAudioStatusEventPending = false;
208
209    mProgressCbEvent = new PreviewPlayerEvent(this,
210         &PreviewPlayer::onProgressCbEvent);
211
212    mOverlayUpdateEvent = new PreviewPlayerEvent(this,
213        &PreviewPlayer::onUpdateOverlayEvent);
214    mProgressCbEventPending = false;
215
216    mOverlayUpdateEventPending = false;
217    mResizedVideoBuffer = NULL;
218    mVideoResizedOrCropped = false;
219    mRenderingMode = (M4xVSS_MediaRendering)MEDIA_RENDERING_INVALID;
220    mIsFiftiesEffectStarted = false;
221    reset();
222}
223
224PreviewPlayer::~PreviewPlayer() {
225
226    if (mQueueStarted) {
227        mQueue.stop();
228    }
229
230    reset();
231
232    if(mResizedVideoBuffer != NULL) {
233        M4OSA_free((M4OSA_MemAddr32)(mResizedVideoBuffer->data()));
234        mResizedVideoBuffer = NULL;
235    }
236
237    mVideoRenderer.clear();
238    mVideoRenderer = NULL;
239}
240
241void PreviewPlayer::cancelPlayerEvents(bool keepBufferingGoing) {
242    mQueue.cancelEvent(mVideoEvent->eventID());
243    mVideoEventPending = false;
244    mQueue.cancelEvent(mStreamDoneEvent->eventID());
245    mStreamDoneEventPending = false;
246    mQueue.cancelEvent(mCheckAudioStatusEvent->eventID());
247    mAudioStatusEventPending = false;
248
249    mQueue.cancelEvent(mProgressCbEvent->eventID());
250    mProgressCbEventPending = false;
251}
252
253status_t PreviewPlayer::setDataSource(
254        const char *uri, const KeyedVector<String8, String8> *headers) {
255    Mutex::Autolock autoLock(mLock);
256    return setDataSource_l(uri, headers);
257}
258
259status_t PreviewPlayer::setDataSource_l(
260        const char *uri, const KeyedVector<String8, String8> *headers) {
261    reset_l();
262
263    mUri = uri;
264
265    if (headers) {
266        mUriHeaders = *headers;
267    }
268
269    // The actual work will be done during preparation in the call to
270    // ::finishSetDataSource_l to avoid blocking the calling thread in
271    // setDataSource for any significant time.
272    return OK;
273}
274
275status_t PreviewPlayer::setDataSource_l(const sp<MediaExtractor> &extractor) {
276    bool haveAudio = false;
277    bool haveVideo = false;
278    for (size_t i = 0; i < extractor->countTracks(); ++i) {
279        sp<MetaData> meta = extractor->getTrackMetaData(i);
280
281        const char *mime;
282        CHECK(meta->findCString(kKeyMIMEType, &mime));
283
284        if (!haveVideo && !strncasecmp(mime, "video/", 6)) {
285            setVideoSource(extractor->getTrack(i));
286            haveVideo = true;
287        } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) {
288            setAudioSource(extractor->getTrack(i));
289            haveAudio = true;
290
291            if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
292                // Only do this for vorbis audio, none of the other audio
293                // formats even support this ringtone specific hack and
294                // retrieving the metadata on some extractors may turn out
295                // to be very expensive.
296                sp<MetaData> fileMeta = extractor->getMetaData();
297                int32_t loop;
298                if (fileMeta != NULL
299                        && fileMeta->findInt32(kKeyAutoLoop, &loop)
300                         && loop != 0) {
301                    mFlags |= AUTO_LOOPING;
302                }
303            }
304        }
305
306        if (haveAudio && haveVideo) {
307            break;
308        }
309    }
310
311    /* Add the support for Dummy audio*/
312    if( !haveAudio ){
313        LOGV("PreviewPlayer: setDataSource_l Dummyaudiocreation started");
314
315        mAudioTrack = DummyAudioSource::Create(32000, 2, 20000,
316                                              ((mPlayEndTimeMsec)*1000));
317        LOGV("PreviewPlayer: setDataSource_l Dummyauiosource created");
318        if(mAudioTrack != NULL) {
319            haveAudio = true;
320        }
321    }
322
323    if (!haveAudio && !haveVideo) {
324        return UNKNOWN_ERROR;
325    }
326
327    mExtractorFlags = extractor->flags();
328    return OK;
329}
330
331status_t PreviewPlayer::setDataSource_l_jpg() {
332    M4OSA_ERR err = M4NO_ERROR;
333    LOGV("PreviewPlayer: setDataSource_l_jpg started");
334
335    mAudioSource = DummyAudioSource::Create(32000, 2, 20000,
336                                          ((mPlayEndTimeMsec)*1000));
337    LOGV("PreviewPlayer: setDataSource_l_jpg Dummyaudiosource created");
338    if(mAudioSource != NULL) {
339        setAudioSource(mAudioSource);
340    }
341    status_t error = mAudioSource->start();
342    if (error != OK) {
343        LOGV("Error starting dummy audio source");
344        mAudioSource.clear();
345        return err;
346    }
347
348    mDurationUs = (mPlayEndTimeMsec - mPlayBeginTimeMsec)*1000;
349
350    mVideoSource = DummyVideoSource::Create(mVideoWidth, mVideoHeight,
351                                            mDurationUs, mUri);
352    mReportedWidth = mVideoWidth;
353    mReportedHeight = mVideoHeight;
354
355    setVideoSource(mVideoSource);
356    status_t err1 = mVideoSource->start();
357    if (err1 != OK) {
358        mVideoSource.clear();
359        return err;
360    }
361
362    mIsVideoSourceJpg = true;
363    return OK;
364}
365
366void PreviewPlayer::reset() {
367    Mutex::Autolock autoLock(mLock);
368    reset_l();
369}
370
371void PreviewPlayer::reset_l() {
372
373    if (mFlags & PREPARING) {
374        mFlags |= PREPARE_CANCELLED;
375    }
376
377    while (mFlags & PREPARING) {
378        mPreparedCondition.wait(mLock);
379    }
380
381    cancelPlayerEvents();
382    mAudioTrack.clear();
383    mVideoTrack.clear();
384
385    // Shutdown audio first, so that the respone to the reset request
386    // appears to happen instantaneously as far as the user is concerned
387    // If we did this later, audio would continue playing while we
388    // shutdown the video-related resources and the player appear to
389    // not be as responsive to a reset request.
390    if (mAudioPlayer == NULL && mAudioSource != NULL) {
391        // If we had an audio player, it would have effectively
392        // taken possession of the audio source and stopped it when
393        // _it_ is stopped. Otherwise this is still our responsibility.
394        mAudioSource->stop();
395    }
396    mAudioSource.clear();
397
398    mTimeSource = NULL;
399
400    //Single audio player instance used
401    //So donot delete it here
402    //It is deleted from PreviewController class
403    //delete mAudioPlayer;
404    mAudioPlayer = NULL;
405
406    if (mLastVideoBuffer) {
407        mLastVideoBuffer->release();
408        mLastVideoBuffer = NULL;
409    }
410
411    if (mVideoBuffer) {
412        mVideoBuffer->release();
413        mVideoBuffer = NULL;
414    }
415
416    if (mVideoSource != NULL) {
417        mVideoSource->stop();
418
419        // The following hack is necessary to ensure that the OMX
420        // component is completely released by the time we may try
421        // to instantiate it again.
422        wp<MediaSource> tmp = mVideoSource;
423        mVideoSource.clear();
424        while (tmp.promote() != NULL) {
425            usleep(1000);
426        }
427        IPCThreadState::self()->flushCommands();
428    }
429
430    mDurationUs = -1;
431    mFlags = 0;
432    mExtractorFlags = 0;
433    mVideoWidth = mVideoHeight = -1;
434    mTimeSourceDeltaUs = 0;
435    mVideoTimeUs = 0;
436
437    mSeeking = NO_SEEK;
438    mSeekNotificationSent = false;
439    mSeekTimeUs = 0;
440
441    mUri.setTo("");
442    mUriHeaders.clear();
443
444    mFileSource.clear();
445
446    delete mSuspensionState;
447    mSuspensionState = NULL;
448
449    mCurrentVideoEffect = VIDEO_EFFECT_NONE;
450    mIsVideoSourceJpg = false;
451    mFrameRGBBuffer = NULL;
452    if(mFrameYUVBuffer != NULL) {
453        M4OSA_free((M4OSA_MemAddr32)mFrameYUVBuffer);
454        mFrameYUVBuffer = NULL;
455    }
456}
457
458status_t PreviewPlayer::play() {
459    Mutex::Autolock autoLock(mLock);
460
461    mFlags &= ~CACHE_UNDERRUN;
462
463    return play_l();
464}
465
466status_t PreviewPlayer::startAudioPlayer_l() {
467    CHECK(!(mFlags & AUDIO_RUNNING));
468
469    if (mAudioSource == NULL || mAudioPlayer == NULL) {
470        return OK;
471    }
472
473    if (!(mFlags & AUDIOPLAYER_STARTED)) {
474        mFlags |= AUDIOPLAYER_STARTED;
475
476        // We've already started the MediaSource in order to enable
477        // the prefetcher to read its data.
478        status_t err = mVeAudioPlayer->start(
479                true /* sourceAlreadyStarted */);
480
481        if (err != OK) {
482            notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
483            return err;
484        }
485    } else {
486        mVeAudioPlayer->resume();
487    }
488
489    mFlags |= AUDIO_RUNNING;
490
491    mWatchForAudioEOS = true;
492
493    return OK;
494}
495
496status_t PreviewPlayer::setAudioPlayer(AudioPlayer *audioPlayer) {
497    Mutex::Autolock autoLock(mLock);
498    CHECK(!(mFlags & PLAYING));
499    mAudioPlayer = audioPlayer;
500
501    LOGV("SetAudioPlayer");
502    mIsChangeSourceRequired = true;
503    mVeAudioPlayer =
504            (VideoEditorAudioPlayer*)mAudioPlayer;
505
506    // check if the new and old source are dummy
507    sp<MediaSource> anAudioSource = mVeAudioPlayer->getSource();
508    if (anAudioSource == NULL) {
509        // Audio player does not have any source set.
510        LOGV("setAudioPlayer: Audio player does not have any source set");
511        return OK;
512    }
513
514    const char *pSrcType1;
515    const char *pSrcType2;
516    sp<MetaData> meta = anAudioSource->getFormat();
517
518    if (meta->findCString(kKeyDecoderComponent, &pSrcType1)) {
519        if (strcmp(pSrcType1, "DummyAudioSource") == 0) {
520            meta = mAudioSource->getFormat();
521            if (meta->findCString(kKeyDecoderComponent, &pSrcType2)) {
522                if (strcmp(pSrcType2, "DummyAudioSource") == 0) {
523                    mIsChangeSourceRequired = false;
524                    // Just set the new play duration for the existing source
525                    MediaSource *pMediaSrc = anAudioSource.get();
526                    DummyAudioSource *pDummyAudioSource = (DummyAudioSource*)pMediaSrc;
527                    //Increment the duration of audio source
528                    pDummyAudioSource->setDuration((int64_t)((mPlayEndTimeMsec)*1000));
529
530                    // Stop the new audio source
531                    // since we continue using old source
532                    LOGV("setAudioPlayer: stop new audio source");
533                    mAudioSource->stop();
534                }
535            }
536        }
537    }
538
539    return OK;
540}
541
542void PreviewPlayer::onStreamDone() {
543    // Posted whenever any stream finishes playing.
544
545    Mutex::Autolock autoLock(mLock);
546    if (!mStreamDoneEventPending) {
547        return;
548    }
549    mStreamDoneEventPending = false;
550
551    if (mStreamDoneStatus != ERROR_END_OF_STREAM) {
552        LOGV("MEDIA_ERROR %d", mStreamDoneStatus);
553
554        notifyListener_l(
555                MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, mStreamDoneStatus);
556
557        pause_l(true /* at eos */);
558
559        mFlags |= AT_EOS;
560        return;
561    }
562
563    const bool allDone =
564        (mVideoSource == NULL || (mFlags & VIDEO_AT_EOS))
565            && (mAudioSource == NULL || (mFlags & AUDIO_AT_EOS));
566
567    if (!allDone) {
568        return;
569    }
570
571    if (mFlags & (LOOPING | AUTO_LOOPING)) {
572        seekTo_l(0);
573
574        if (mVideoSource != NULL) {
575            postVideoEvent_l();
576        }
577    } else {
578        LOGV("MEDIA_PLAYBACK_COMPLETE");
579        //pause before sending event
580        pause_l(true /* at eos */);
581        notifyListener_l(MEDIA_PLAYBACK_COMPLETE);
582
583        mFlags |= AT_EOS;
584    }
585}
586
587
588status_t PreviewPlayer::play_l() {
589
590    mFlags &= ~SEEK_PREVIEW;
591
592    if (mFlags & PLAYING) {
593        return OK;
594    }
595    mStartNextPlayer = false;
596
597    if (!(mFlags & PREPARED)) {
598        status_t err = prepare_l();
599
600        if (err != OK) {
601            return err;
602        }
603    }
604
605    mFlags |= PLAYING;
606    mFlags |= FIRST_FRAME;
607
608    bool deferredAudioSeek = false;
609
610    if (mAudioSource != NULL) {
611        if (mAudioPlayer == NULL) {
612            if (mAudioSink != NULL) {
613
614                mAudioPlayer = new VideoEditorAudioPlayer(mAudioSink, this);
615                mVeAudioPlayer =
616                          (VideoEditorAudioPlayer*)mAudioPlayer;
617
618                mAudioPlayer->setSource(mAudioSource);
619
620                mVeAudioPlayer->setAudioMixSettings(
621                 mPreviewPlayerAudioMixSettings);
622
623                mVeAudioPlayer->setAudioMixPCMFileHandle(
624                 mAudioMixPCMFileHandle);
625
626                mVeAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
627                 mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
628                 mCurrentMediaVolumeValue);
629
630                 mFlags |= AUDIOPLAYER_STARTED;
631                // We've already started the MediaSource in order to enable
632                // the prefetcher to read its data.
633                status_t err = mVeAudioPlayer->start(
634                        true /* sourceAlreadyStarted */);
635
636                if (err != OK) {
637                    //delete mAudioPlayer;
638                    mAudioPlayer = NULL;
639
640                    mFlags &= ~(PLAYING | FIRST_FRAME);
641                    return err;
642                }
643
644                mTimeSource = mVeAudioPlayer;
645                mFlags |= AUDIO_RUNNING;
646                deferredAudioSeek = true;
647                mWatchForAudioSeekComplete = false;
648                mWatchForAudioEOS = true;
649            }
650        } else {
651            mVeAudioPlayer = (VideoEditorAudioPlayer*)mAudioPlayer;
652            bool isAudioPlayerStarted = mVeAudioPlayer->isStarted();
653
654            if (mIsChangeSourceRequired == true) {
655                LOGV("play_l: Change audio source required");
656
657                if (isAudioPlayerStarted == true) {
658                    mVeAudioPlayer->pause();
659                }
660
661                mVeAudioPlayer->setSource(mAudioSource);
662                mVeAudioPlayer->setObserver(this);
663
664                mVeAudioPlayer->setAudioMixSettings(
665                 mPreviewPlayerAudioMixSettings);
666
667                mVeAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
668                    mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
669                    mCurrentMediaVolumeValue);
670
671                if (isAudioPlayerStarted == true) {
672                    mVeAudioPlayer->resume();
673                } else {
674                    status_t err = OK;
675                    err = mVeAudioPlayer->start(true);
676                    if (err != OK) {
677                        mAudioPlayer = NULL;
678                        mVeAudioPlayer = NULL;
679
680                        mFlags &= ~(PLAYING | FIRST_FRAME);
681                        return err;
682                    }
683                }
684            } else {
685                LOGV("play_l: No Source change required");
686                mVeAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
687                    mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
688                    mCurrentMediaVolumeValue);
689
690                mVeAudioPlayer->resume();
691            }
692
693            mFlags |= AUDIOPLAYER_STARTED;
694            mFlags |= AUDIO_RUNNING;
695            mTimeSource = mVeAudioPlayer;
696            deferredAudioSeek = true;
697            mWatchForAudioSeekComplete = false;
698            mWatchForAudioEOS = true;
699        }
700    }
701
702    if (mTimeSource == NULL && mAudioPlayer == NULL) {
703        mTimeSource = &mSystemTimeSource;
704    }
705
706    // Set the seek option for Image source files and read.
707    // This resets the timestamping for image play
708    if (mIsVideoSourceJpg) {
709        MediaSource::ReadOptions options;
710        MediaBuffer *aLocalBuffer;
711        options.setSeekTo(mSeekTimeUs);
712        mVideoSource->read(&aLocalBuffer, &options);
713        aLocalBuffer->release();
714    }
715
716    if (mVideoSource != NULL) {
717        // Kick off video playback
718        postVideoEvent_l();
719    }
720
721    if (deferredAudioSeek) {
722        // If there was a seek request while we were paused
723        // and we're just starting up again, honor the request now.
724        seekAudioIfNecessary_l();
725    }
726
727    if (mFlags & AT_EOS) {
728        // Legacy behaviour, if a stream finishes playing and then
729        // is started again, we play from the start...
730        seekTo_l(0);
731    }
732
733    return OK;
734}
735
736
737status_t PreviewPlayer::initRenderer_l() {
738    if (mSurface != NULL || mISurface != NULL) {
739        sp<MetaData> meta = mVideoSource->getFormat();
740
741        int32_t format;
742        const char *component;
743        int32_t decodedWidth, decodedHeight;
744        CHECK(meta->findInt32(kKeyColorFormat, &format));
745        CHECK(meta->findCString(kKeyDecoderComponent, &component));
746        CHECK(meta->findInt32(kKeyWidth, &decodedWidth));
747        CHECK(meta->findInt32(kKeyHeight, &decodedHeight));
748
749        // Must ensure that mVideoRenderer's destructor is actually executed
750        // before creating a new one.
751        IPCThreadState::self()->flushCommands();
752
753        // always use localrenderer since decoded buffers are modified
754        // by postprocessing module
755        // Other decoders are instantiated locally and as a consequence
756        // allocate their buffers in local address space.
757        if(mVideoRenderer == NULL) {
758
759            mVideoRenderer = PreviewLocalRenderer:: initPreviewLocalRenderer (
760                false,  // previewOnly
761                (OMX_COLOR_FORMATTYPE)format,
762                mSurface,
763                mOutputVideoWidth, mOutputVideoHeight,
764                mOutputVideoWidth, mOutputVideoHeight);
765
766            if ( mVideoRenderer == NULL )
767            {
768                return UNKNOWN_ERROR;
769            }
770            return OK;
771        }
772    }
773    return OK;
774}
775
776
777void PreviewPlayer::setISurface(const sp<ISurface> &isurface) {
778    Mutex::Autolock autoLock(mLock);
779    mISurface = isurface;
780}
781
782
783status_t PreviewPlayer::seekTo(int64_t timeUs) {
784
785    if ((mExtractorFlags & MediaExtractor::CAN_SEEK) || (mIsVideoSourceJpg)) {
786        Mutex::Autolock autoLock(mLock);
787        return seekTo_l(timeUs);
788    }
789
790    return OK;
791}
792
793
794status_t PreviewPlayer::getVideoDimensions(
795        int32_t *width, int32_t *height) const {
796    Mutex::Autolock autoLock(mLock);
797
798    if (mVideoWidth < 0 || mVideoHeight < 0) {
799        return UNKNOWN_ERROR;
800    }
801
802    *width = mVideoWidth;
803    *height = mVideoHeight;
804
805    return OK;
806}
807
808
809status_t PreviewPlayer::initAudioDecoder() {
810    sp<MetaData> meta = mAudioTrack->getFormat();
811    const char *mime;
812    CHECK(meta->findCString(kKeyMIMEType, &mime));
813
814    if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
815        mAudioSource = mAudioTrack;
816    } else {
817        sp<MediaSource> aRawSource;
818        aRawSource = OMXCodec::Create(
819                mClient.interface(), mAudioTrack->getFormat(),
820                false, // createEncoder
821                mAudioTrack);
822
823        if(aRawSource != NULL) {
824            LOGV("initAudioDecoder: new VideoEditorSRC");
825            mAudioSource = new VideoEditorSRC(aRawSource);
826        }
827    }
828
829    if (mAudioSource != NULL) {
830        int64_t durationUs;
831        if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
832            Mutex::Autolock autoLock(mMiscStateLock);
833            if (mDurationUs < 0 || durationUs > mDurationUs) {
834                mDurationUs = durationUs;
835            }
836        }
837        status_t err = mAudioSource->start();
838
839        if (err != OK) {
840            mAudioSource.clear();
841            return err;
842        }
843    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) {
844        // For legacy reasons we're simply going to ignore the absence
845        // of an audio decoder for QCELP instead of aborting playback
846        // altogether.
847        return OK;
848    }
849
850    return mAudioSource != NULL ? OK : UNKNOWN_ERROR;
851}
852
853
854status_t PreviewPlayer::initVideoDecoder(uint32_t flags) {
855
856    mVideoSource = OMXCodec::Create(
857            mClient.interface(), mVideoTrack->getFormat(),
858            false,
859            mVideoTrack,
860            NULL, flags);
861
862    if (mVideoSource != NULL) {
863        int64_t durationUs;
864        if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
865            Mutex::Autolock autoLock(mMiscStateLock);
866            if (mDurationUs < 0 || durationUs > mDurationUs) {
867                mDurationUs = durationUs;
868            }
869        }
870
871        CHECK(mVideoTrack->getFormat()->findInt32(kKeyWidth, &mVideoWidth));
872        CHECK(mVideoTrack->getFormat()->findInt32(kKeyHeight, &mVideoHeight));
873
874        mReportedWidth = mVideoWidth;
875        mReportedHeight = mVideoHeight;
876
877        status_t err = mVideoSource->start();
878
879        if (err != OK) {
880            mVideoSource.clear();
881            return err;
882        }
883    }
884
885    return mVideoSource != NULL ? OK : UNKNOWN_ERROR;
886}
887
888
889void PreviewPlayer::onVideoEvent() {
890    uint32_t i=0;
891    bool bAppliedVideoEffect = false;
892    M4OSA_ERR err1 = M4NO_ERROR;
893    int64_t imageFrameTimeUs = 0;
894
895    Mutex::Autolock autoLock(mLock);
896    if (!mVideoEventPending) {
897        // The event has been cancelled in reset_l() but had already
898        // been scheduled for execution at that time.
899        return;
900    }
901    mVideoEventPending = false;
902
903    if (mFlags & SEEK_PREVIEW) {
904        mFlags &= ~SEEK_PREVIEW;
905        return;
906    }
907
908    TimeSource *ts_st =  &mSystemTimeSource;
909    int64_t timeStartUs = ts_st->getRealTimeUs();
910
911    if (mSeeking != NO_SEEK) {
912        if (mLastVideoBuffer) {
913            mLastVideoBuffer->release();
914            mLastVideoBuffer = NULL;
915        }
916
917
918        if(mAudioSource != NULL) {
919
920            // We're going to seek the video source first, followed by
921            // the audio source.
922            // In order to avoid jumps in the DataSource offset caused by
923            // the audio codec prefetching data from the old locations
924            // while the video codec is already reading data from the new
925            // locations, we'll "pause" the audio source, causing it to
926            // stop reading input data until a subsequent seek.
927
928            if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) {
929                mAudioPlayer->pause();
930                mFlags &= ~AUDIO_RUNNING;
931            }
932            mAudioSource->pause();
933        }
934    }
935
936    if (!mVideoBuffer) {
937        MediaSource::ReadOptions options;
938        if (mSeeking != NO_SEEK) {
939            LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
940                                                      mSeekTimeUs / 1E6);
941
942            options.setSeekTo(
943                    mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
944        }
945        for (;;) {
946            status_t err = mVideoSource->read(&mVideoBuffer, &options);
947            options.clearSeekTo();
948
949            if (err != OK) {
950                CHECK_EQ(mVideoBuffer, NULL);
951
952                if (err == INFO_FORMAT_CHANGED) {
953                    LOGV("LV PLAYER VideoSource signalled format change");
954                    notifyVideoSize_l();
955                    sp<MetaData> meta = mVideoSource->getFormat();
956
957                    CHECK(meta->findInt32(kKeyWidth, &mReportedWidth));
958                    CHECK(meta->findInt32(kKeyHeight, &mReportedHeight));
959                    if (mVideoRenderer != NULL) {
960                        mVideoRendererIsPreview = false;
961                        err = initRenderer_l();
962                        if (err != OK) {
963                            postStreamDoneEvent_l(err);
964                        }
965
966                    }
967                    continue;
968                }
969                // So video playback is complete, but we may still have
970                // a seek request pending that needs to be applied to the audio track
971                if (mSeeking != NO_SEEK) {
972                    LOGV("video stream ended while seeking!");
973                }
974                finishSeekIfNecessary(-1);
975                LOGV("PreviewPlayer: onVideoEvent EOS reached.");
976                mFlags |= VIDEO_AT_EOS;
977                mFlags |= AUDIO_AT_EOS;
978                mOverlayUpdateEventPosted = false;
979                postStreamDoneEvent_l(err);
980                // Set the last decoded timestamp to duration
981                mDecodedVideoTs = (mPlayEndTimeMsec*1000);
982                return;
983            }
984
985            if (mVideoBuffer->range_length() == 0) {
986                // Some decoders, notably the PV AVC software decoder
987                // return spurious empty buffers that we just want to ignore.
988
989                mVideoBuffer->release();
990                mVideoBuffer = NULL;
991                continue;
992            }
993
994            int64_t videoTimeUs;
995            CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
996
997            if (mSeeking != NO_SEEK) {
998                if (videoTimeUs < mSeekTimeUs) {
999                    // buffers are before seek time
1000                    // ignore them
1001                    mVideoBuffer->release();
1002                    mVideoBuffer = NULL;
1003                    continue;
1004                }
1005            } else {
1006                if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
1007                    // Frames are before begin cut time
1008                    // Donot render
1009                    mVideoBuffer->release();
1010                    mVideoBuffer = NULL;
1011                    continue;
1012                }
1013            }
1014            break;
1015        }
1016    }
1017
1018    mNumberDecVideoFrames++;
1019
1020    int64_t timeUs;
1021    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
1022
1023    {
1024        Mutex::Autolock autoLock(mMiscStateLock);
1025        mVideoTimeUs = timeUs;
1026    }
1027
1028
1029    if(!mStartNextPlayer) {
1030        int64_t playbackTimeRemaining = (mPlayEndTimeMsec*1000) - timeUs;
1031        if(playbackTimeRemaining <= 1500000) {
1032            //When less than 1.5 sec of playback left
1033            // send notification to start next player
1034
1035            mStartNextPlayer = true;
1036            notifyListener_l(0xAAAAAAAA);
1037        }
1038    }
1039
1040    SeekType wasSeeking = mSeeking;
1041    finishSeekIfNecessary(timeUs);
1042    if (mAudioPlayer != NULL && !(mFlags & (AUDIO_RUNNING))) {
1043        status_t err = startAudioPlayer_l();
1044        if (err != OK) {
1045            LOGE("Starting the audio player failed w/ err %d", err);
1046            return;
1047        }
1048    }
1049
1050    TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
1051
1052    if(ts == NULL) {
1053        mVideoBuffer->release();
1054        mVideoBuffer = NULL;
1055        return;
1056    }
1057
1058    if(!mIsVideoSourceJpg) {
1059        if (mFlags & FIRST_FRAME) {
1060            mFlags &= ~FIRST_FRAME;
1061
1062            mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs;
1063        }
1064
1065        int64_t realTimeUs, mediaTimeUs;
1066        if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
1067            && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {
1068            mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
1069        }
1070
1071        int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
1072
1073        int64_t latenessUs = nowUs - timeUs;
1074
1075        if (wasSeeking != NO_SEEK) {
1076            // Let's display the first frame after seeking right away.
1077            latenessUs = 0;
1078        }
1079        LOGV("Audio time stamp = %lld and video time stamp = %lld",
1080                                            ts->getRealTimeUs(),timeUs);
1081        if (latenessUs > 40000) {
1082            // We're more than 40ms late.
1083
1084            LOGV("LV PLAYER we're late by %lld us (%.2f secs)",
1085                                           latenessUs, latenessUs / 1E6);
1086
1087            mVideoBuffer->release();
1088            mVideoBuffer = NULL;
1089            postVideoEvent_l(0);
1090            return;
1091        }
1092
1093        if (latenessUs < -25000) {
1094            // We're more than 25ms early.
1095            LOGV("We're more than 25ms early, lateness %lld", latenessUs);
1096
1097            postVideoEvent_l(25000);
1098            return;
1099        }
1100    }
1101
1102    if (mVideoRendererIsPreview || mVideoRenderer == NULL) {
1103        mVideoRendererIsPreview = false;
1104
1105        status_t err = initRenderer_l();
1106        if (err != OK) {
1107            postStreamDoneEvent_l(err);
1108        }
1109    }
1110
1111    // If timestamp exceeds endCutTime of clip, donot render
1112    if((timeUs/1000) > mPlayEndTimeMsec) {
1113        if (mLastVideoBuffer) {
1114            mLastVideoBuffer->release();
1115            mLastVideoBuffer = NULL;
1116        }
1117        mLastVideoBuffer = mVideoBuffer;
1118        mVideoBuffer = NULL;
1119        mFlags |= VIDEO_AT_EOS;
1120        mFlags |= AUDIO_AT_EOS;
1121        LOGV("PreviewPlayer: onVideoEvent timeUs > mPlayEndTime; send EOS..");
1122        mOverlayUpdateEventPosted = false;
1123        postStreamDoneEvent_l(ERROR_END_OF_STREAM);
1124        return;
1125    }
1126    // Capture the frame timestamp to be rendered
1127    mDecodedVideoTs = timeUs;
1128
1129    // Post processing to apply video effects
1130    for(i=0;i<mNumberEffects;i++) {
1131        // First check if effect starttime matches the clip being previewed
1132        if((mEffectsSettings[i].uiStartTime < (mDecVideoTsStoryBoard/1000)) ||
1133        (mEffectsSettings[i].uiStartTime >=
1134         ((mDecVideoTsStoryBoard/1000) + mPlayEndTimeMsec - mPlayBeginTimeMsec)))
1135        {
1136            // This effect doesn't belong to this clip, check next one
1137            continue;
1138        }
1139        // Check if effect applies to this particular frame timestamp
1140        if((mEffectsSettings[i].uiStartTime <=
1141         (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec)) &&
1142            ((mEffectsSettings[i].uiStartTime+mEffectsSettings[i].uiDuration) >=
1143             (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec))
1144              && (mEffectsSettings[i].uiDuration != 0)) {
1145            setVideoPostProcessingNode(
1146             mEffectsSettings[i].VideoEffectType, TRUE);
1147        }
1148        else {
1149            setVideoPostProcessingNode(
1150             mEffectsSettings[i].VideoEffectType, FALSE);
1151        }
1152    }
1153
1154    //Provide the overlay Update indication when there is an overlay effect
1155    if (mCurrentVideoEffect & VIDEO_EFFECT_FRAMING) {
1156        mCurrentVideoEffect &= ~VIDEO_EFFECT_FRAMING; //never apply framing here.
1157        if (!mOverlayUpdateEventPosted) {
1158            // Find the effect in effectSettings array
1159            M4OSA_UInt32 index;
1160            for (index = 0; index < mNumberEffects; index++) {
1161                M4OSA_UInt32 timeMs = mDecodedVideoTs/1000;
1162                M4OSA_UInt32 timeOffset = mDecVideoTsStoryBoard/1000;
1163                if(mEffectsSettings[index].VideoEffectType ==
1164                    (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_Framing) {
1165                    if (((mEffectsSettings[index].uiStartTime + 1) <=
1166                        timeMs + timeOffset - mPlayBeginTimeMsec) &&
1167                        ((mEffectsSettings[index].uiStartTime - 1 +
1168                        mEffectsSettings[index].uiDuration) >=
1169                        timeMs + timeOffset - mPlayBeginTimeMsec))
1170                    {
1171                        break;
1172                    }
1173                }
1174            }
1175            if (index < mNumberEffects) {
1176                mCurrFramingEffectIndex = index;
1177                mOverlayUpdateEventPosted = true;
1178                postOverlayUpdateEvent_l();
1179                LOGV("Framing index = %d", mCurrFramingEffectIndex);
1180            } else {
1181                LOGV("No framing effects found");
1182            }
1183        }
1184
1185    } else if (mOverlayUpdateEventPosted) {
1186        //Post the event when the overlay is no more valid
1187        LOGV("Overlay is Done");
1188        mOverlayUpdateEventPosted = false;
1189        postOverlayUpdateEvent_l();
1190    }
1191
1192
1193    if (mCurrentVideoEffect != VIDEO_EFFECT_NONE) {
1194        err1 = doVideoPostProcessing();
1195        if(err1 != M4NO_ERROR) {
1196            LOGE("doVideoPostProcessing returned err");
1197            bAppliedVideoEffect = false;
1198        }
1199        else {
1200            bAppliedVideoEffect = true;
1201        }
1202    }
1203    else {
1204        bAppliedVideoEffect = false;
1205        if(mRenderingMode != MEDIA_RENDERING_INVALID) {
1206            // No effects to be applied, but media rendering to be done
1207            err1 = doMediaRendering();
1208            if(err1 != M4NO_ERROR) {
1209                LOGE("doMediaRendering returned err");
1210                //Use original mVideoBuffer for rendering
1211                mVideoResizedOrCropped = false;
1212            }
1213        }
1214    }
1215
1216    if (mVideoRenderer != NULL) {
1217        LOGV("mVideoRenderer CALL render()");
1218        mVideoRenderer->render();
1219    }
1220
1221    if (mLastVideoBuffer) {
1222        mLastVideoBuffer->release();
1223        mLastVideoBuffer = NULL;
1224    }
1225
1226    mLastVideoBuffer = mVideoBuffer;
1227    mVideoBuffer = NULL;
1228
1229    // Post progress callback based on callback interval set
1230    if(mNumberDecVideoFrames >= mProgressCbInterval) {
1231        postProgressCallbackEvent_l();
1232        mNumberDecVideoFrames = 0;  // reset counter
1233    }
1234
1235    // if reached EndCutTime of clip, post EOS event
1236    if((timeUs/1000) >= mPlayEndTimeMsec) {
1237        LOGV("PreviewPlayer: onVideoEvent EOS.");
1238        mFlags |= VIDEO_AT_EOS;
1239        mFlags |= AUDIO_AT_EOS;
1240        mOverlayUpdateEventPosted = false;
1241        postStreamDoneEvent_l(ERROR_END_OF_STREAM);
1242    }
1243    else {
1244        if ((wasSeeking != NO_SEEK) && (mFlags & SEEK_PREVIEW)) {
1245            mFlags &= ~SEEK_PREVIEW;
1246            return;
1247        }
1248
1249        if(!mIsVideoSourceJpg) {
1250            postVideoEvent_l(0);
1251        }
1252        else {
1253            postVideoEvent_l(33000);
1254        }
1255    }
1256}
1257
1258status_t PreviewPlayer::prepare() {
1259    Mutex::Autolock autoLock(mLock);
1260    return prepare_l();
1261}
1262
1263status_t PreviewPlayer::prepare_l() {
1264    if (mFlags & PREPARED) {
1265        return OK;
1266    }
1267
1268    if (mFlags & PREPARING) {
1269        return UNKNOWN_ERROR;
1270    }
1271
1272    mIsAsyncPrepare = false;
1273    status_t err = prepareAsync_l();
1274
1275    if (err != OK) {
1276        return err;
1277    }
1278
1279    while (mFlags & PREPARING) {
1280        mPreparedCondition.wait(mLock);
1281    }
1282
1283    return mPrepareResult;
1284}
1285
1286status_t PreviewPlayer::prepareAsync_l() {
1287    if (mFlags & PREPARING) {
1288        return UNKNOWN_ERROR;  // async prepare already pending
1289    }
1290
1291    if (!mQueueStarted) {
1292        mQueue.start();
1293        mQueueStarted = true;
1294    }
1295
1296    mFlags |= PREPARING;
1297    mAsyncPrepareEvent = new PreviewPlayerEvent(
1298            this, &PreviewPlayer::onPrepareAsyncEvent);
1299
1300    mQueue.postEvent(mAsyncPrepareEvent);
1301
1302    return OK;
1303}
1304
1305status_t PreviewPlayer::finishSetDataSource_l() {
1306    sp<DataSource> dataSource;
1307    sp<MediaExtractor> extractor;
1308
1309    dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders);
1310
1311    if (dataSource == NULL) {
1312        return UNKNOWN_ERROR;
1313    }
1314
1315    //If file type is .rgb, then no need to check for Extractor
1316    int uriLen = strlen(mUri);
1317    int startOffset = uriLen - 4;
1318    if(!strncasecmp(mUri+startOffset, ".rgb", 4)) {
1319        extractor = NULL;
1320    }
1321    else {
1322        extractor = MediaExtractor::Create(dataSource,
1323                                        MEDIA_MIMETYPE_CONTAINER_MPEG4);
1324    }
1325
1326    if (extractor == NULL) {
1327        LOGV("PreviewPlayer::finishSetDataSource_l  extractor == NULL");
1328        return setDataSource_l_jpg();
1329    }
1330
1331    return setDataSource_l(extractor);
1332}
1333
1334
1335// static
1336bool PreviewPlayer::ContinuePreparation(void *cookie) {
1337    PreviewPlayer *me = static_cast<PreviewPlayer *>(cookie);
1338
1339    return (me->mFlags & PREPARE_CANCELLED) == 0;
1340}
1341
1342void PreviewPlayer::onPrepareAsyncEvent() {
1343    Mutex::Autolock autoLock(mLock);
1344    LOGV("onPrepareAsyncEvent");
1345
1346    if (mFlags & PREPARE_CANCELLED) {
1347        LOGV("LV PLAYER prepare was cancelled before doing anything");
1348        abortPrepare(UNKNOWN_ERROR);
1349        return;
1350    }
1351
1352    if (mUri.size() > 0) {
1353        status_t err = finishSetDataSource_l();
1354
1355        if (err != OK) {
1356            abortPrepare(err);
1357            return;
1358        }
1359    }
1360
1361    if (mVideoTrack != NULL && mVideoSource == NULL) {
1362        status_t err = initVideoDecoder(OMXCodec::kHardwareCodecsOnly);
1363
1364        if (err != OK) {
1365            abortPrepare(err);
1366            return;
1367        }
1368    }
1369
1370    if (mAudioTrack != NULL && mAudioSource == NULL) {
1371        status_t err = initAudioDecoder();
1372
1373        if (err != OK) {
1374            abortPrepare(err);
1375            return;
1376        }
1377    }
1378    finishAsyncPrepare_l();
1379
1380}
1381
1382void PreviewPlayer::finishAsyncPrepare_l() {
1383    if (mIsAsyncPrepare) {
1384        if (mVideoSource == NULL) {
1385            LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE 0 0 ");
1386            notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0);
1387        } else {
1388            LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE");
1389            notifyVideoSize_l();
1390        }
1391        LOGV("finishAsyncPrepare_l: MEDIA_PREPARED");
1392        notifyListener_l(MEDIA_PREPARED);
1393    }
1394
1395    mPrepareResult = OK;
1396    mFlags &= ~(PREPARING|PREPARE_CANCELLED);
1397    mFlags |= PREPARED;
1398    mAsyncPrepareEvent = NULL;
1399    mPreparedCondition.broadcast();
1400}
1401
1402status_t PreviewPlayer::suspend() {
1403    LOGV("suspend");
1404    Mutex::Autolock autoLock(mLock);
1405
1406    if (mSuspensionState != NULL) {
1407        if (mLastVideoBuffer == NULL) {
1408            //go into here if video is suspended again
1409            //after resuming without being played between
1410            //them
1411            SuspensionState *state = mSuspensionState;
1412            mSuspensionState = NULL;
1413            reset_l();
1414            mSuspensionState = state;
1415            return OK;
1416        }
1417
1418        delete mSuspensionState;
1419        mSuspensionState = NULL;
1420    }
1421
1422    if (mFlags & PREPARING) {
1423        mFlags |= PREPARE_CANCELLED;
1424    }
1425
1426    while (mFlags & PREPARING) {
1427        mPreparedCondition.wait(mLock);
1428    }
1429
1430    SuspensionState *state = new SuspensionState;
1431    state->mUri = mUri;
1432    state->mUriHeaders = mUriHeaders;
1433    state->mFileSource = mFileSource;
1434
1435    state->mFlags = mFlags & (PLAYING | AUTO_LOOPING | LOOPING | AT_EOS);
1436    getPosition(&state->mPositionUs);
1437
1438    if (mLastVideoBuffer) {
1439        size_t size = mLastVideoBuffer->range_length();
1440        if (size) {
1441            int32_t unreadable;
1442            if (!mLastVideoBuffer->meta_data()->findInt32(
1443                        kKeyIsUnreadable, &unreadable)
1444                    || unreadable == 0) {
1445                state->mLastVideoFrameSize = size;
1446                state->mLastVideoFrame = malloc(size);
1447                memcpy(state->mLastVideoFrame,
1448                   (const uint8_t *)mLastVideoBuffer->data()
1449                        + mLastVideoBuffer->range_offset(),
1450                   size);
1451
1452                state->mVideoWidth = mVideoWidth;
1453                state->mVideoHeight = mVideoHeight;
1454
1455                sp<MetaData> meta = mVideoSource->getFormat();
1456                CHECK(meta->findInt32(kKeyColorFormat, &state->mColorFormat));
1457                CHECK(meta->findInt32(kKeyWidth, &state->mDecodedWidth));
1458                CHECK(meta->findInt32(kKeyHeight, &state->mDecodedHeight));
1459            } else {
1460                LOGV("Unable to save last video frame, we have no access to "
1461                     "the decoded video data.");
1462            }
1463        }
1464    }
1465
1466    reset_l();
1467
1468    mSuspensionState = state;
1469
1470    return OK;
1471}
1472
1473status_t PreviewPlayer::resume() {
1474    LOGV("resume");
1475    Mutex::Autolock autoLock(mLock);
1476
1477    if (mSuspensionState == NULL) {
1478        return INVALID_OPERATION;
1479    }
1480
1481    SuspensionState *state = mSuspensionState;
1482    mSuspensionState = NULL;
1483
1484    status_t err;
1485    if (state->mFileSource != NULL) {
1486        err = AwesomePlayer::setDataSource_l(state->mFileSource);
1487
1488        if (err == OK) {
1489            mFileSource = state->mFileSource;
1490        }
1491    } else {
1492        err = AwesomePlayer::setDataSource_l(state->mUri, &state->mUriHeaders);
1493    }
1494
1495    if (err != OK) {
1496        delete state;
1497        state = NULL;
1498
1499        return err;
1500    }
1501
1502    seekTo_l(state->mPositionUs);
1503
1504    mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS);
1505
1506    if (state->mLastVideoFrame && (mSurface != NULL || mISurface != NULL)) {
1507        mVideoRenderer =
1508            PreviewLocalRenderer::initPreviewLocalRenderer(
1509                    true,  // previewOnly
1510                    (OMX_COLOR_FORMATTYPE)state->mColorFormat,
1511                    mSurface,
1512                    state->mVideoWidth,
1513                    state->mVideoHeight,
1514                    state->mDecodedWidth,
1515                    state->mDecodedHeight);
1516
1517        mVideoRendererIsPreview = true;
1518
1519        ((PreviewLocalRenderer *)mVideoRenderer.get())->render(
1520                state->mLastVideoFrame, state->mLastVideoFrameSize);
1521    }
1522
1523    if (state->mFlags & PLAYING) {
1524        play_l();
1525    }
1526
1527    mSuspensionState = state;
1528    state = NULL;
1529
1530    return OK;
1531}
1532
1533
1534status_t PreviewPlayer::loadEffectsSettings(
1535                    M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) {
1536    M4OSA_UInt32 i = 0, rgbSize = 0;
1537    M4VIFI_UInt8 *tmp = M4OSA_NULL;
1538
1539    mNumberEffects = nEffects;
1540    mEffectsSettings = pEffectSettings;
1541    return OK;
1542}
1543
1544status_t PreviewPlayer::loadAudioMixSettings(
1545                    M4xVSS_AudioMixingSettings* pAudioMixSettings) {
1546
1547    LOGV("PreviewPlayer: loadAudioMixSettings: ");
1548    mPreviewPlayerAudioMixSettings = pAudioMixSettings;
1549    return OK;
1550}
1551
1552status_t PreviewPlayer::setAudioMixPCMFileHandle(
1553                    M4OSA_Context pAudioMixPCMFileHandle) {
1554
1555    LOGV("PreviewPlayer: setAudioMixPCMFileHandle: ");
1556    mAudioMixPCMFileHandle = pAudioMixPCMFileHandle;
1557    return OK;
1558}
1559
1560status_t PreviewPlayer::setAudioMixStoryBoardParam(
1561                    M4OSA_UInt32 audioMixStoryBoardTS,
1562                    M4OSA_UInt32 currentMediaBeginCutTime,
1563                    M4OSA_UInt32 primaryTrackVolValue ) {
1564
1565    mAudioMixStoryBoardTS = audioMixStoryBoardTS;
1566    mCurrentMediaBeginCutTime = currentMediaBeginCutTime;
1567    mCurrentMediaVolumeValue = primaryTrackVolValue;
1568    return OK;
1569}
1570
1571status_t PreviewPlayer::setPlaybackBeginTime(uint32_t msec) {
1572
1573    mPlayBeginTimeMsec = msec;
1574    return OK;
1575}
1576
1577status_t PreviewPlayer::setPlaybackEndTime(uint32_t msec) {
1578
1579    mPlayEndTimeMsec = msec;
1580    return OK;
1581}
1582
1583status_t PreviewPlayer::setStoryboardStartTime(uint32_t msec) {
1584
1585    mStoryboardStartTimeMsec = msec;
1586    mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000;
1587    return OK;
1588}
1589
1590status_t PreviewPlayer::setProgressCallbackInterval(uint32_t cbInterval) {
1591
1592    mProgressCbInterval = cbInterval;
1593    return OK;
1594}
1595
1596
1597status_t PreviewPlayer::setMediaRenderingMode(
1598        M4xVSS_MediaRendering mode,
1599        M4VIDEOEDITING_VideoFrameSize outputVideoSize) {
1600
1601    mRenderingMode = mode;
1602
1603    /* reset boolean for each clip*/
1604    mVideoResizedOrCropped = false;
1605
1606    switch(outputVideoSize) {
1607        case M4VIDEOEDITING_kSQCIF:
1608            mOutputVideoWidth = 128;
1609            mOutputVideoHeight = 96;
1610            break;
1611
1612        case M4VIDEOEDITING_kQQVGA:
1613            mOutputVideoWidth = 160;
1614            mOutputVideoHeight = 120;
1615            break;
1616
1617        case M4VIDEOEDITING_kQCIF:
1618            mOutputVideoWidth = 176;
1619            mOutputVideoHeight = 144;
1620            break;
1621
1622        case M4VIDEOEDITING_kQVGA:
1623            mOutputVideoWidth = 320;
1624            mOutputVideoHeight = 240;
1625            break;
1626
1627        case M4VIDEOEDITING_kCIF:
1628            mOutputVideoWidth = 352;
1629            mOutputVideoHeight = 288;
1630            break;
1631
1632        case M4VIDEOEDITING_kVGA:
1633            mOutputVideoWidth = 640;
1634            mOutputVideoHeight = 480;
1635            break;
1636
1637        case M4VIDEOEDITING_kWVGA:
1638            mOutputVideoWidth = 800;
1639            mOutputVideoHeight = 480;
1640            break;
1641
1642        case M4VIDEOEDITING_kNTSC:
1643            mOutputVideoWidth = 720;
1644            mOutputVideoHeight = 480;
1645            break;
1646
1647        case M4VIDEOEDITING_k640_360:
1648            mOutputVideoWidth = 640;
1649            mOutputVideoHeight = 360;
1650            break;
1651
1652        case M4VIDEOEDITING_k854_480:
1653            mOutputVideoWidth = 854;
1654            mOutputVideoHeight = 480;
1655            break;
1656
1657        case M4VIDEOEDITING_kHD1280:
1658            mOutputVideoWidth = 1280;
1659            mOutputVideoHeight = 720;
1660            break;
1661
1662        case M4VIDEOEDITING_kHD1080:
1663            mOutputVideoWidth = 1080;
1664            mOutputVideoHeight = 720;
1665            break;
1666
1667        case M4VIDEOEDITING_kHD960:
1668            mOutputVideoWidth = 960;
1669            mOutputVideoHeight = 720;
1670            break;
1671
1672        default:
1673            LOGE("unsupported output video size set");
1674            return BAD_VALUE;
1675    }
1676
1677    return OK;
1678}
1679
1680M4OSA_ERR PreviewPlayer::doMediaRendering() {
1681    M4OSA_ERR err = M4NO_ERROR;
1682    M4VIFI_ImagePlane planeIn[3], planeOut[3];
1683    M4VIFI_UInt8 *inBuffer = M4OSA_NULL, *finalOutputBuffer = M4OSA_NULL;
1684    M4VIFI_UInt8 *tempOutputBuffer= M4OSA_NULL;
1685    size_t videoBufferSize = 0;
1686    M4OSA_UInt32 frameSize = 0, i=0, index =0, nFrameCount =0, bufferOffset =0;
1687    int32_t colorFormat = 0;
1688
1689    if(!mIsVideoSourceJpg) {
1690        sp<MetaData> meta = mVideoSource->getFormat();
1691        CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
1692    }
1693    else {
1694        colorFormat = OMX_COLOR_FormatYUV420Planar;
1695    }
1696
1697    videoBufferSize = mVideoBuffer->size();
1698    frameSize = (mVideoWidth*mVideoHeight*3) >> 1;
1699
1700    uint8_t* outBuffer;
1701    size_t outBufferStride = 0;
1702
1703    mVideoRenderer->getBuffer(&outBuffer, &outBufferStride);
1704
1705    bufferOffset = index*frameSize;
1706    inBuffer = (M4OSA_UInt8 *)mVideoBuffer->data()+
1707                mVideoBuffer->range_offset()+bufferOffset;
1708
1709
1710    /* In plane*/
1711    prepareYUV420ImagePlane(planeIn, mVideoWidth,
1712      mVideoHeight, (M4VIFI_UInt8 *)inBuffer, mReportedWidth, mReportedHeight);
1713
1714    // Set the output YUV420 plane to be compatible with YV12 format
1715    // W & H even
1716    // YVU instead of YUV
1717    // align buffers on 32 bits
1718
1719    //In YV12 format, sizes must be even
1720    M4OSA_UInt32 yv12PlaneWidth = ((mOutputVideoWidth +1)>>1)<<1;
1721    M4OSA_UInt32 yv12PlaneHeight = ((mOutputVideoHeight+1)>>1)<<1;
1722
1723    prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,
1724     (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer);
1725
1726
1727    err = applyRenderingMode(planeIn, planeOut, mRenderingMode);
1728
1729    if(err != M4NO_ERROR)
1730    {
1731        LOGE("doMediaRendering: applyRenderingMode returned err=0x%x", (int)err);
1732        return err;
1733    }
1734    mVideoResizedOrCropped = true;
1735
1736    return err;
1737}
1738
1739status_t PreviewPlayer::resetJniCallbackTimeStamp() {
1740
1741    mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000;
1742    return OK;
1743}
1744
1745void PreviewPlayer::postProgressCallbackEvent_l() {
1746    if (mProgressCbEventPending) {
1747        return;
1748    }
1749    mProgressCbEventPending = true;
1750
1751    mQueue.postEvent(mProgressCbEvent);
1752}
1753
1754
1755void PreviewPlayer::onProgressCbEvent() {
1756    Mutex::Autolock autoLock(mLock);
1757    if (!mProgressCbEventPending) {
1758        return;
1759    }
1760    mProgressCbEventPending = false;
1761    // If playback starts from previous I-frame,
1762    // then send frame storyboard duration
1763    if((mDecodedVideoTs/1000) < mPlayBeginTimeMsec) {
1764        notifyListener_l(MEDIA_INFO, 0, mDecVideoTsStoryBoard/1000);
1765    }
1766    else {
1767        notifyListener_l(MEDIA_INFO, 0,
1768        (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec));
1769    }
1770}
1771
1772void PreviewPlayer::postOverlayUpdateEvent_l() {
1773    if (mOverlayUpdateEventPending) {
1774        return;
1775    }
1776    mOverlayUpdateEventPending = true;
1777    mQueue.postEvent(mOverlayUpdateEvent);
1778}
1779
1780void PreviewPlayer::onUpdateOverlayEvent() {
1781    Mutex::Autolock autoLock(mLock);
1782
1783    if (!mOverlayUpdateEventPending) {
1784        return;
1785    }
1786    mOverlayUpdateEventPending = false;
1787
1788    int updateState;
1789    if (mOverlayUpdateEventPosted) {
1790        updateState = 1;
1791    } else {
1792        updateState = 0;
1793    }
1794    notifyListener_l(0xBBBBBBBB, updateState, mCurrFramingEffectIndex);
1795}
1796
1797
1798void PreviewPlayer::setVideoPostProcessingNode(
1799                    M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable) {
1800
1801    uint32_t effect = VIDEO_EFFECT_NONE;
1802
1803    //Map M4VSS3GPP_VideoEffectType to local enum
1804    switch(type) {
1805        case M4VSS3GPP_kVideoEffectType_FadeFromBlack:
1806            effect = VIDEO_EFFECT_FADEFROMBLACK;
1807            break;
1808
1809        case M4VSS3GPP_kVideoEffectType_FadeToBlack:
1810            effect = VIDEO_EFFECT_FADETOBLACK;
1811            break;
1812
1813        case M4VSS3GPP_kVideoEffectType_CurtainOpening:
1814            effect = VIDEO_EFFECT_CURTAINOPEN;
1815            break;
1816
1817        case M4VSS3GPP_kVideoEffectType_CurtainClosing:
1818            effect = VIDEO_EFFECT_CURTAINCLOSE;
1819            break;
1820
1821        case M4xVSS_kVideoEffectType_BlackAndWhite:
1822            effect = VIDEO_EFFECT_BLACKANDWHITE;
1823            break;
1824
1825        case M4xVSS_kVideoEffectType_Pink:
1826            effect = VIDEO_EFFECT_PINK;
1827            break;
1828
1829        case M4xVSS_kVideoEffectType_Green:
1830            effect = VIDEO_EFFECT_GREEN;
1831            break;
1832
1833        case M4xVSS_kVideoEffectType_Sepia:
1834            effect = VIDEO_EFFECT_SEPIA;
1835            break;
1836
1837        case M4xVSS_kVideoEffectType_Negative:
1838            effect = VIDEO_EFFECT_NEGATIVE;
1839            break;
1840
1841        case M4xVSS_kVideoEffectType_Framing:
1842            effect = VIDEO_EFFECT_FRAMING;
1843            break;
1844
1845        case M4xVSS_kVideoEffectType_Fifties:
1846            effect = VIDEO_EFFECT_FIFTIES;
1847            break;
1848
1849        case M4xVSS_kVideoEffectType_ColorRGB16:
1850            effect = VIDEO_EFFECT_COLOR_RGB16;
1851            break;
1852
1853        case M4xVSS_kVideoEffectType_Gradient:
1854            effect = VIDEO_EFFECT_GRADIENT;
1855            break;
1856
1857        default:
1858            effect = VIDEO_EFFECT_NONE;
1859            break;
1860    }
1861
1862    if(enable == M4OSA_TRUE) {
1863        //If already set, then no need to set again
1864        if(!(mCurrentVideoEffect & effect)) {
1865            mCurrentVideoEffect |= effect;
1866            if(effect == VIDEO_EFFECT_FIFTIES) {
1867                mIsFiftiesEffectStarted = true;
1868            }
1869        }
1870    }
1871    else  {
1872        //Reset only if already set
1873        if(mCurrentVideoEffect & effect) {
1874            mCurrentVideoEffect &= ~effect;
1875        }
1876    }
1877}
1878
1879status_t PreviewPlayer::setImageClipProperties(uint32_t width,uint32_t height) {
1880    mVideoWidth = width;
1881    mVideoHeight = height;
1882    return OK;
1883}
1884
1885
1886M4OSA_ERR PreviewPlayer::doVideoPostProcessing() {
1887    M4OSA_ERR err = M4NO_ERROR;
1888    vePostProcessParams postProcessParams;
1889    int32_t colorFormat = 0;
1890
1891
1892    if(!mIsVideoSourceJpg) {
1893        sp<MetaData> meta = mVideoSource->getFormat();
1894        CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
1895    }
1896    else {
1897        colorFormat = OMX_COLOR_FormatYUV420Planar;
1898    }
1899
1900    if((colorFormat == OMX_COLOR_FormatYUV420SemiPlanar) ||
1901       (colorFormat == 0x7FA30C00)) {
1902          LOGE("doVideoPostProcessing: colorFormat YUV420Sp not supported");
1903          return M4ERR_UNSUPPORTED_MEDIA_TYPE;
1904    }
1905
1906    postProcessParams.vidBuffer = (M4VIFI_UInt8*)mVideoBuffer->data()
1907        + mVideoBuffer->range_offset();
1908
1909    postProcessParams.videoWidth = mVideoWidth;
1910    postProcessParams.videoHeight = mVideoHeight;
1911    postProcessParams.timeMs = mDecodedVideoTs/1000;
1912    postProcessParams.timeOffset = mDecVideoTsStoryBoard/1000;
1913    postProcessParams.effectsSettings = mEffectsSettings;
1914    postProcessParams.numberEffects = mNumberEffects;
1915    postProcessParams.outVideoWidth = mOutputVideoWidth;
1916    postProcessParams.outVideoHeight = mOutputVideoHeight;
1917    postProcessParams.currentVideoEffect = mCurrentVideoEffect;
1918    postProcessParams.renderingMode = mRenderingMode;
1919    if(mIsFiftiesEffectStarted == M4OSA_TRUE) {
1920        postProcessParams.isFiftiesEffectStarted = M4OSA_TRUE;
1921        mIsFiftiesEffectStarted = M4OSA_FALSE;
1922    }
1923    else {
1924       postProcessParams.isFiftiesEffectStarted = M4OSA_FALSE;
1925    }
1926
1927    postProcessParams.overlayFrameRGBBuffer = mFrameRGBBuffer;
1928    postProcessParams.overlayFrameYUVBuffer = mFrameYUVBuffer;
1929    mVideoRenderer->getBuffer(&(postProcessParams.pOutBuffer), &(postProcessParams.outBufferStride));
1930    err = applyEffectsAndRenderingMode(&postProcessParams, mReportedWidth, mReportedHeight);
1931
1932    return err;
1933}
1934
1935status_t PreviewPlayer::readFirstVideoFrame() {
1936    LOGV("PreviewPlayer::readFirstVideoFrame");
1937
1938    if (!mVideoBuffer) {
1939        MediaSource::ReadOptions options;
1940        if (mSeeking != NO_SEEK) {
1941            LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
1942                    mSeekTimeUs / 1E6);
1943
1944            options.setSeekTo(
1945                    mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
1946        }
1947        for (;;) {
1948            status_t err = mVideoSource->read(&mVideoBuffer, &options);
1949            options.clearSeekTo();
1950
1951            if (err != OK) {
1952                CHECK_EQ(mVideoBuffer, NULL);
1953
1954                if (err == INFO_FORMAT_CHANGED) {
1955                    LOGV("LV PLAYER VideoSource signalled format change");
1956                    notifyVideoSize_l();
1957                    sp<MetaData> meta = mVideoSource->getFormat();
1958
1959                    CHECK(meta->findInt32(kKeyWidth, &mReportedWidth));
1960                    CHECK(meta->findInt32(kKeyHeight, &mReportedHeight));
1961
1962                    if (mVideoRenderer != NULL) {
1963                        mVideoRendererIsPreview = false;
1964                        err = initRenderer_l();
1965                        if (err != OK) {
1966                            postStreamDoneEvent_l(err);
1967                        }
1968                    }
1969                    continue;
1970                }
1971                LOGV("PreviewPlayer: onVideoEvent EOS reached.");
1972                mFlags |= VIDEO_AT_EOS;
1973                mFlags |= AUDIO_AT_EOS;
1974                postStreamDoneEvent_l(err);
1975                return OK;
1976            }
1977
1978            if (mVideoBuffer->range_length() == 0) {
1979                // Some decoders, notably the PV AVC software decoder
1980                // return spurious empty buffers that we just want to ignore.
1981
1982                mVideoBuffer->release();
1983                mVideoBuffer = NULL;
1984                continue;
1985            }
1986
1987            int64_t videoTimeUs;
1988            CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
1989            if (mSeeking != NO_SEEK) {
1990                if (videoTimeUs < mSeekTimeUs) {
1991                    // buffers are before seek time
1992                    // ignore them
1993                    mVideoBuffer->release();
1994                    mVideoBuffer = NULL;
1995                    continue;
1996                }
1997            } else {
1998                if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
1999                    // buffers are before begin cut time
2000                    // ignore them
2001                    mVideoBuffer->release();
2002                    mVideoBuffer = NULL;
2003                    continue;
2004                }
2005            }
2006            break;
2007        }
2008    }
2009
2010    int64_t timeUs;
2011    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
2012
2013    {
2014        Mutex::Autolock autoLock(mMiscStateLock);
2015        mVideoTimeUs = timeUs;
2016    }
2017
2018    mDecodedVideoTs = timeUs;
2019
2020    return OK;
2021
2022}
2023
2024status_t PreviewPlayer::getLastRenderedTimeMs(uint32_t *lastRenderedTimeMs) {
2025    *lastRenderedTimeMs = (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec);
2026    return OK;
2027}
2028
2029}  // namespace android
2030