PreviewPlayer.cpp revision b2d6e0f74a12e5cce5b429e646172c63346346c3
1/*
2 * Copyright (C) 2011 NXP Software
3 * Copyright (C) 2011 The Android Open Source Project
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 *      http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18
19#define LOG_NDEBUG 1
20#define LOG_TAG "PreviewPlayer"
21#include <utils/Log.h>
22
23#include <dlfcn.h>
24
25#include "include/ARTSPController.h"
26#include "PreviewPlayer.h"
27#include "DummyAudioSource.h"
28#include "DummyVideoSource.h"
29#include "VideoEditorSRC.h"
30#include "include/NuCachedSource2.h"
31#include "include/ThrottledSource.h"
32
33
34#include "PreviewRenderer.h"
35
36#include <binder/IPCThreadState.h>
37#include <media/stagefright/DataSource.h>
38#include <media/stagefright/FileSource.h>
39#include <media/stagefright/MediaBuffer.h>
40#include <media/stagefright/MediaDefs.h>
41#include <media/stagefright/MediaExtractor.h>
42#include <media/stagefright/MediaDebug.h>
43#include <media/stagefright/MediaSource.h>
44#include <media/stagefright/MetaData.h>
45#include <media/stagefright/OMXCodec.h>
46
47#include <surfaceflinger/Surface.h>
48#include <media/stagefright/foundation/ALooper.h>
49
50namespace android {
51
52
53struct PreviewPlayerEvent : public TimedEventQueue::Event {
54    PreviewPlayerEvent(
55            PreviewPlayer *player,
56            void (PreviewPlayer::*method)())
57        : mPlayer(player),
58          mMethod(method) {
59    }
60
61protected:
62    virtual ~PreviewPlayerEvent() {}
63
64    virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) {
65        (mPlayer->*mMethod)();
66    }
67
68private:
69    PreviewPlayer *mPlayer;
70    void (PreviewPlayer::*mMethod)();
71
72    PreviewPlayerEvent(const PreviewPlayerEvent &);
73    PreviewPlayerEvent &operator=(const PreviewPlayerEvent &);
74};
75
76
77struct PreviewLocalRenderer : public PreviewPlayerRenderer {
78
79    static PreviewLocalRenderer* initPreviewLocalRenderer (
80            bool previewOnly,
81            OMX_COLOR_FORMATTYPE colorFormat,
82            const sp<Surface> &surface,
83            size_t displayWidth, size_t displayHeight,
84            size_t decodedWidth, size_t decodedHeight,
85            int32_t rotationDegrees = 0)
86    {
87        PreviewLocalRenderer* mLocalRenderer = new
88            PreviewLocalRenderer(
89                previewOnly,
90                colorFormat,
91                surface,
92                displayWidth, displayHeight,
93                decodedWidth, decodedHeight,
94                rotationDegrees);
95
96        if ( mLocalRenderer->init(previewOnly,
97                 colorFormat, surface,
98                 displayWidth, displayHeight,
99                 decodedWidth, decodedHeight,
100                 rotationDegrees) != OK )
101        {
102            delete mLocalRenderer;
103            return NULL;
104        }
105        return mLocalRenderer;
106    }
107
108    virtual void render(MediaBuffer *buffer) {
109        render((const uint8_t *)buffer->data() + buffer->range_offset(),
110               buffer->range_length());
111    }
112
113    void render(const void *data, size_t size) {
114        mTarget->render(data, size, NULL);
115    }
116    void render() {
117        mTarget->renderYV12();
118    }
119    void getBuffer(uint8_t **data, size_t *stride) {
120        mTarget->getBufferYV12(data, stride);
121    }
122
123protected:
124    virtual ~PreviewLocalRenderer() {
125        delete mTarget;
126        mTarget = NULL;
127    }
128
129private:
130    PreviewRenderer *mTarget;
131
132    PreviewLocalRenderer(
133            bool previewOnly,
134            OMX_COLOR_FORMATTYPE colorFormat,
135            const sp<Surface> &surface,
136            size_t displayWidth, size_t displayHeight,
137            size_t decodedWidth, size_t decodedHeight,
138            int32_t rotationDegrees = 0)
139        : mTarget(NULL) {
140    }
141
142
143    int init(
144            bool previewOnly,
145            OMX_COLOR_FORMATTYPE colorFormat,
146            const sp<Surface> &surface,
147            size_t displayWidth, size_t displayHeight,
148            size_t decodedWidth, size_t decodedHeight,
149            int32_t rotationDegrees = 0);
150
151    PreviewLocalRenderer(const PreviewLocalRenderer &);
152    PreviewLocalRenderer &operator=(const PreviewLocalRenderer &);;
153};
154
155int PreviewLocalRenderer::init(
156        bool previewOnly,
157        OMX_COLOR_FORMATTYPE colorFormat,
158        const sp<Surface> &surface,
159        size_t displayWidth, size_t displayHeight,
160        size_t decodedWidth, size_t decodedHeight,
161        int32_t rotationDegrees) {
162
163    mTarget = PreviewRenderer::CreatePreviewRenderer (
164            colorFormat, surface, displayWidth, displayHeight,
165            decodedWidth, decodedHeight, rotationDegrees);
166    if (mTarget == M4OSA_NULL) {
167        return UNKNOWN_ERROR;
168    }
169    return OK;
170}
171
172PreviewPlayer::PreviewPlayer()
173    : AwesomePlayer(),
174      mFrameRGBBuffer(NULL),
175      mFrameYUVBuffer(NULL),
176      mReportedWidth(0),
177      mReportedHeight(0),
178      mCurrFramingEffectIndex(0) {
179
180    mVideoRenderer = NULL;
181    mLastVideoBuffer = NULL;
182    mSuspensionState = NULL;
183    mEffectsSettings = NULL;
184    mVeAudioPlayer = NULL;
185    mAudioMixStoryBoardTS = 0;
186    mCurrentMediaBeginCutTime = 0;
187    mCurrentMediaVolumeValue = 0;
188    mNumberEffects = 0;
189    mDecodedVideoTs = 0;
190    mDecVideoTsStoryBoard = 0;
191    mCurrentVideoEffect = VIDEO_EFFECT_NONE;
192    mProgressCbInterval = 0;
193    mNumberDecVideoFrames = 0;
194    mOverlayUpdateEventPosted = false;
195
196    mVideoEvent = new PreviewPlayerEvent(this, &PreviewPlayer::onVideoEvent);
197    mVideoEventPending = false;
198    mStreamDoneEvent = new PreviewPlayerEvent(this,
199         &AwesomePlayer::onStreamDone);
200
201    mStreamDoneEventPending = false;
202
203    mCheckAudioStatusEvent = new PreviewPlayerEvent(
204        this, &AwesomePlayer::onCheckAudioStatus);
205
206    mAudioStatusEventPending = false;
207
208    mProgressCbEvent = new PreviewPlayerEvent(this,
209         &PreviewPlayer::onProgressCbEvent);
210
211    mOverlayUpdateEvent = new PreviewPlayerEvent(this,
212        &PreviewPlayer::onUpdateOverlayEvent);
213    mProgressCbEventPending = false;
214
215    mOverlayUpdateEventPending = false;
216    mResizedVideoBuffer = NULL;
217    mVideoResizedOrCropped = false;
218    mRenderingMode = (M4xVSS_MediaRendering)MEDIA_RENDERING_INVALID;
219    mIsFiftiesEffectStarted = false;
220    reset();
221}
222
223PreviewPlayer::~PreviewPlayer() {
224
225    if (mQueueStarted) {
226        mQueue.stop();
227    }
228
229    reset();
230
231    if(mResizedVideoBuffer != NULL) {
232        M4OSA_free((M4OSA_MemAddr32)(mResizedVideoBuffer->data()));
233        mResizedVideoBuffer = NULL;
234    }
235
236    mVideoRenderer.clear();
237    mVideoRenderer = NULL;
238}
239
240void PreviewPlayer::cancelPlayerEvents(bool keepBufferingGoing) {
241    mQueue.cancelEvent(mVideoEvent->eventID());
242    mVideoEventPending = false;
243    mQueue.cancelEvent(mStreamDoneEvent->eventID());
244    mStreamDoneEventPending = false;
245    mQueue.cancelEvent(mCheckAudioStatusEvent->eventID());
246    mAudioStatusEventPending = false;
247
248    mQueue.cancelEvent(mProgressCbEvent->eventID());
249    mProgressCbEventPending = false;
250}
251
252status_t PreviewPlayer::setDataSource(
253        const char *uri, const KeyedVector<String8, String8> *headers) {
254    Mutex::Autolock autoLock(mLock);
255    return setDataSource_l(uri, headers);
256}
257
258status_t PreviewPlayer::setDataSource_l(
259        const char *uri, const KeyedVector<String8, String8> *headers) {
260    reset_l();
261
262    mUri = uri;
263
264    if (headers) {
265        mUriHeaders = *headers;
266    }
267
268    // The actual work will be done during preparation in the call to
269    // ::finishSetDataSource_l to avoid blocking the calling thread in
270    // setDataSource for any significant time.
271    return OK;
272}
273
274status_t PreviewPlayer::setDataSource_l(const sp<MediaExtractor> &extractor) {
275    bool haveAudio = false;
276    bool haveVideo = false;
277    for (size_t i = 0; i < extractor->countTracks(); ++i) {
278        sp<MetaData> meta = extractor->getTrackMetaData(i);
279
280        const char *mime;
281        CHECK(meta->findCString(kKeyMIMEType, &mime));
282
283        if (!haveVideo && !strncasecmp(mime, "video/", 6)) {
284            setVideoSource(extractor->getTrack(i));
285            haveVideo = true;
286        } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) {
287            setAudioSource(extractor->getTrack(i));
288            haveAudio = true;
289
290            if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
291                // Only do this for vorbis audio, none of the other audio
292                // formats even support this ringtone specific hack and
293                // retrieving the metadata on some extractors may turn out
294                // to be very expensive.
295                sp<MetaData> fileMeta = extractor->getMetaData();
296                int32_t loop;
297                if (fileMeta != NULL
298                        && fileMeta->findInt32(kKeyAutoLoop, &loop)
299                         && loop != 0) {
300                    mFlags |= AUTO_LOOPING;
301                }
302            }
303        }
304
305        if (haveAudio && haveVideo) {
306            break;
307        }
308    }
309
310    /* Add the support for Dummy audio*/
311    if( !haveAudio ){
312        LOGV("PreviewPlayer: setDataSource_l Dummyaudiocreation started");
313
314        mAudioTrack = DummyAudioSource::Create(32000, 2, 20000,
315                                              ((mPlayEndTimeMsec)*1000));
316        LOGV("PreviewPlayer: setDataSource_l Dummyauiosource created");
317        if(mAudioTrack != NULL) {
318            haveAudio = true;
319        }
320    }
321
322    if (!haveAudio && !haveVideo) {
323        return UNKNOWN_ERROR;
324    }
325
326    mExtractorFlags = extractor->flags();
327    return OK;
328}
329
330status_t PreviewPlayer::setDataSource_l_jpg() {
331    M4OSA_ERR err = M4NO_ERROR;
332    LOGV("PreviewPlayer: setDataSource_l_jpg started");
333
334    mAudioSource = DummyAudioSource::Create(32000, 2, 20000,
335                                          ((mPlayEndTimeMsec)*1000));
336    LOGV("PreviewPlayer: setDataSource_l_jpg Dummyaudiosource created");
337    if(mAudioSource != NULL) {
338        setAudioSource(mAudioSource);
339    }
340    status_t error = mAudioSource->start();
341    if (error != OK) {
342        LOGV("Error starting dummy audio source");
343        mAudioSource.clear();
344        return err;
345    }
346
347    mDurationUs = (mPlayEndTimeMsec - mPlayBeginTimeMsec)*1000;
348
349    mVideoSource = DummyVideoSource::Create(mVideoWidth, mVideoHeight,
350                                            mDurationUs, mUri);
351    mReportedWidth = mVideoWidth;
352    mReportedHeight = mVideoHeight;
353
354    setVideoSource(mVideoSource);
355    status_t err1 = mVideoSource->start();
356    if (err1 != OK) {
357        mVideoSource.clear();
358        return err;
359    }
360
361    mIsVideoSourceJpg = true;
362    return OK;
363}
364
365void PreviewPlayer::reset() {
366    Mutex::Autolock autoLock(mLock);
367    reset_l();
368}
369
370void PreviewPlayer::reset_l() {
371
372    if (mFlags & PREPARING) {
373        mFlags |= PREPARE_CANCELLED;
374    }
375
376    while (mFlags & PREPARING) {
377        mPreparedCondition.wait(mLock);
378    }
379
380    cancelPlayerEvents();
381    mAudioTrack.clear();
382    mVideoTrack.clear();
383
384    // Shutdown audio first, so that the respone to the reset request
385    // appears to happen instantaneously as far as the user is concerned
386    // If we did this later, audio would continue playing while we
387    // shutdown the video-related resources and the player appear to
388    // not be as responsive to a reset request.
389    if (mAudioPlayer == NULL && mAudioSource != NULL) {
390        // If we had an audio player, it would have effectively
391        // taken possession of the audio source and stopped it when
392        // _it_ is stopped. Otherwise this is still our responsibility.
393        mAudioSource->stop();
394    }
395    mAudioSource.clear();
396
397    mTimeSource = NULL;
398
399    delete mAudioPlayer;
400    mAudioPlayer = NULL;
401
402    if (mLastVideoBuffer) {
403        mLastVideoBuffer->release();
404        mLastVideoBuffer = NULL;
405    }
406
407    if (mVideoBuffer) {
408        mVideoBuffer->release();
409        mVideoBuffer = NULL;
410    }
411
412    if (mVideoSource != NULL) {
413        mVideoSource->stop();
414
415        // The following hack is necessary to ensure that the OMX
416        // component is completely released by the time we may try
417        // to instantiate it again.
418        wp<MediaSource> tmp = mVideoSource;
419        mVideoSource.clear();
420        while (tmp.promote() != NULL) {
421            usleep(1000);
422        }
423        IPCThreadState::self()->flushCommands();
424    }
425
426    mDurationUs = -1;
427    mFlags = 0;
428    mExtractorFlags = 0;
429    mVideoWidth = mVideoHeight = -1;
430    mTimeSourceDeltaUs = 0;
431    mVideoTimeUs = 0;
432
433    mSeeking = false;
434    mSeekNotificationSent = false;
435    mSeekTimeUs = 0;
436
437    mUri.setTo("");
438    mUriHeaders.clear();
439
440    mFileSource.clear();
441
442    delete mSuspensionState;
443    mSuspensionState = NULL;
444
445    mCurrentVideoEffect = VIDEO_EFFECT_NONE;
446    mIsVideoSourceJpg = false;
447    mFrameRGBBuffer = NULL;
448    if(mFrameYUVBuffer != NULL) {
449        M4OSA_free((M4OSA_MemAddr32)mFrameYUVBuffer);
450        mFrameYUVBuffer = NULL;
451    }
452}
453
454status_t PreviewPlayer::play() {
455    Mutex::Autolock autoLock(mLock);
456
457    mFlags &= ~CACHE_UNDERRUN;
458
459    return play_l();
460}
461
462status_t PreviewPlayer::startAudioPlayer_l() {
463    CHECK(!(mFlags & AUDIO_RUNNING));
464
465    if (mAudioSource == NULL || mAudioPlayer == NULL) {
466        return OK;
467    }
468
469    if (!(mFlags & AUDIOPLAYER_STARTED)) {
470        mFlags |= AUDIOPLAYER_STARTED;
471
472        // We've already started the MediaSource in order to enable
473        // the prefetcher to read its data.
474        status_t err = mVeAudioPlayer->start(
475                true /* sourceAlreadyStarted */);
476
477        if (err != OK) {
478            notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
479            return err;
480        }
481    } else {
482        mVeAudioPlayer->resume();
483    }
484
485    mFlags |= AUDIO_RUNNING;
486
487    mWatchForAudioEOS = true;
488
489    return OK;
490}
491
492status_t PreviewPlayer::play_l() {
493
494    mFlags &= ~SEEK_PREVIEW;
495
496    if (mFlags & PLAYING) {
497        return OK;
498    }
499    mStartNextPlayer = false;
500
501    if (!(mFlags & PREPARED)) {
502        status_t err = prepare_l();
503
504        if (err != OK) {
505            return err;
506        }
507    }
508
509    mFlags |= PLAYING;
510    mFlags |= FIRST_FRAME;
511
512    bool deferredAudioSeek = false;
513
514    if (mAudioSource != NULL) {
515        if (mAudioPlayer == NULL) {
516            if (mAudioSink != NULL) {
517
518                mAudioPlayer = new VideoEditorAudioPlayer(mAudioSink, this);
519                mVeAudioPlayer =
520                          (VideoEditorAudioPlayer*)mAudioPlayer;
521
522                mAudioPlayer->setSource(mAudioSource);
523
524                mVeAudioPlayer->setAudioMixSettings(
525                 mPreviewPlayerAudioMixSettings);
526
527                mVeAudioPlayer->setAudioMixPCMFileHandle(
528                 mAudioMixPCMFileHandle);
529
530                mVeAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
531                 mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
532                 mCurrentMediaVolumeValue);
533
534                mTimeSource = mVeAudioPlayer; //mAudioPlayer;
535
536                deferredAudioSeek = true;
537                mWatchForAudioSeekComplete = false;
538                mWatchForAudioEOS = true;
539            }
540         }
541
542        CHECK(!(mFlags & AUDIO_RUNNING));
543
544        if (mVideoSource == NULL) {
545            status_t err = startAudioPlayer_l();
546
547            if (err != OK) {
548                delete mAudioPlayer;
549                mAudioPlayer = NULL;
550                mFlags &= ~(PLAYING | FIRST_FRAME);
551                return err;
552            }
553        }
554    }
555
556    if (mTimeSource == NULL && mAudioPlayer == NULL) {
557        mTimeSource = &mSystemTimeSource;
558    }
559
560    // Set the seek option for Image source files and read.
561    // This resets the timestamping for image play
562    if (mIsVideoSourceJpg) {
563        MediaSource::ReadOptions options;
564        MediaBuffer *aLocalBuffer;
565        options.setSeekTo(mSeekTimeUs);
566        mVideoSource->read(&aLocalBuffer, &options);
567        aLocalBuffer->release();
568    }
569
570    if (mVideoSource != NULL) {
571        // Kick off video playback
572        postVideoEvent_l();
573    }
574
575    if (deferredAudioSeek) {
576        // If there was a seek request while we were paused
577        // and we're just starting up again, honor the request now.
578        seekAudioIfNecessary_l();
579    }
580
581    if (mFlags & AT_EOS) {
582        // Legacy behaviour, if a stream finishes playing and then
583        // is started again, we play from the start...
584        seekTo_l(0);
585    }
586
587    return OK;
588}
589
590
591status_t PreviewPlayer::initRenderer_l() {
592    if (mSurface != NULL || mISurface != NULL) {
593        sp<MetaData> meta = mVideoSource->getFormat();
594
595        int32_t format;
596        const char *component;
597        int32_t decodedWidth, decodedHeight;
598        CHECK(meta->findInt32(kKeyColorFormat, &format));
599        CHECK(meta->findCString(kKeyDecoderComponent, &component));
600        CHECK(meta->findInt32(kKeyWidth, &decodedWidth));
601        CHECK(meta->findInt32(kKeyHeight, &decodedHeight));
602
603        // Must ensure that mVideoRenderer's destructor is actually executed
604        // before creating a new one.
605        IPCThreadState::self()->flushCommands();
606
607        // always use localrenderer since decoded buffers are modified
608        // by postprocessing module
609        // Other decoders are instantiated locally and as a consequence
610        // allocate their buffers in local address space.
611        if(mVideoRenderer == NULL) {
612
613            mVideoRenderer = PreviewLocalRenderer:: initPreviewLocalRenderer (
614                false,  // previewOnly
615                (OMX_COLOR_FORMATTYPE)format,
616                mSurface,
617                mOutputVideoWidth, mOutputVideoHeight,
618                mOutputVideoWidth, mOutputVideoHeight);
619
620            if ( mVideoRenderer == NULL )
621            {
622                return UNKNOWN_ERROR;
623            }
624            return OK;
625        }
626    }
627    return OK;
628}
629
630
631void PreviewPlayer::setISurface(const sp<ISurface> &isurface) {
632    Mutex::Autolock autoLock(mLock);
633    mISurface = isurface;
634}
635
636
637status_t PreviewPlayer::seekTo(int64_t timeUs) {
638
639    if ((mExtractorFlags & MediaExtractor::CAN_SEEK) || (mIsVideoSourceJpg)) {
640        Mutex::Autolock autoLock(mLock);
641        return seekTo_l(timeUs);
642    }
643
644    return OK;
645}
646
647
648status_t PreviewPlayer::getVideoDimensions(
649        int32_t *width, int32_t *height) const {
650    Mutex::Autolock autoLock(mLock);
651
652    if (mVideoWidth < 0 || mVideoHeight < 0) {
653        return UNKNOWN_ERROR;
654    }
655
656    *width = mVideoWidth;
657    *height = mVideoHeight;
658
659    return OK;
660}
661
662
663status_t PreviewPlayer::initAudioDecoder() {
664    sp<MetaData> meta = mAudioTrack->getFormat();
665    const char *mime;
666    CHECK(meta->findCString(kKeyMIMEType, &mime));
667
668    if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
669        mAudioSource = mAudioTrack;
670    } else {
671        sp<MediaSource> aRawSource;
672        aRawSource = OMXCodec::Create(
673                mClient.interface(), mAudioTrack->getFormat(),
674                false, // createEncoder
675                mAudioTrack);
676
677        if(aRawSource != NULL) {
678            LOGV("initAudioDecoder: new VideoEditorSRC");
679            mAudioSource = new VideoEditorSRC(aRawSource);
680        }
681    }
682
683    if (mAudioSource != NULL) {
684        int64_t durationUs;
685        if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
686            Mutex::Autolock autoLock(mMiscStateLock);
687            if (mDurationUs < 0 || durationUs > mDurationUs) {
688                mDurationUs = durationUs;
689            }
690        }
691        status_t err = mAudioSource->start();
692
693        if (err != OK) {
694            mAudioSource.clear();
695            return err;
696        }
697    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) {
698        // For legacy reasons we're simply going to ignore the absence
699        // of an audio decoder for QCELP instead of aborting playback
700        // altogether.
701        return OK;
702    }
703
704    return mAudioSource != NULL ? OK : UNKNOWN_ERROR;
705}
706
707
708status_t PreviewPlayer::initVideoDecoder(uint32_t flags) {
709
710    mVideoSource = OMXCodec::Create(
711            mClient.interface(), mVideoTrack->getFormat(),
712            false,
713            mVideoTrack,
714            NULL, flags);
715
716    if (mVideoSource != NULL) {
717        int64_t durationUs;
718        if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
719            Mutex::Autolock autoLock(mMiscStateLock);
720            if (mDurationUs < 0 || durationUs > mDurationUs) {
721                mDurationUs = durationUs;
722            }
723        }
724
725        CHECK(mVideoTrack->getFormat()->findInt32(kKeyWidth, &mVideoWidth));
726        CHECK(mVideoTrack->getFormat()->findInt32(kKeyHeight, &mVideoHeight));
727
728        mReportedWidth = mVideoWidth;
729        mReportedHeight = mVideoHeight;
730
731        status_t err = mVideoSource->start();
732
733        if (err != OK) {
734            mVideoSource.clear();
735            return err;
736        }
737    }
738
739    return mVideoSource != NULL ? OK : UNKNOWN_ERROR;
740}
741
742
743void PreviewPlayer::onVideoEvent() {
744    uint32_t i=0;
745    bool bAppliedVideoEffect = false;
746    M4OSA_ERR err1 = M4NO_ERROR;
747    int64_t imageFrameTimeUs = 0;
748
749    Mutex::Autolock autoLock(mLock);
750    if (!mVideoEventPending) {
751        // The event has been cancelled in reset_l() but had already
752        // been scheduled for execution at that time.
753        return;
754    }
755    mVideoEventPending = false;
756
757    if (mFlags & SEEK_PREVIEW) {
758        mFlags &= ~SEEK_PREVIEW;
759        return;
760    }
761
762    TimeSource *ts_st =  &mSystemTimeSource;
763    int64_t timeStartUs = ts_st->getRealTimeUs();
764
765    if (mSeeking) {
766        if (mLastVideoBuffer) {
767            mLastVideoBuffer->release();
768            mLastVideoBuffer = NULL;
769        }
770
771
772        if(mAudioSource != NULL) {
773
774            // We're going to seek the video source first, followed by
775            // the audio source.
776            // In order to avoid jumps in the DataSource offset caused by
777            // the audio codec prefetching data from the old locations
778            // while the video codec is already reading data from the new
779            // locations, we'll "pause" the audio source, causing it to
780            // stop reading input data until a subsequent seek.
781
782            if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) {
783                mAudioPlayer->pause();
784                mFlags &= ~AUDIO_RUNNING;
785            }
786            mAudioSource->pause();
787        }
788    }
789
790    if (!mVideoBuffer) {
791        MediaSource::ReadOptions options;
792        if (mSeeking) {
793            LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
794                                                      mSeekTimeUs / 1E6);
795
796            options.setSeekTo(
797                    mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
798        }
799        for (;;) {
800            status_t err = mVideoSource->read(&mVideoBuffer, &options);
801            options.clearSeekTo();
802
803            if (err != OK) {
804                CHECK_EQ(mVideoBuffer, NULL);
805
806                if (err == INFO_FORMAT_CHANGED) {
807                    LOGV("LV PLAYER VideoSource signalled format change");
808                    notifyVideoSize_l();
809                    sp<MetaData> meta = mVideoSource->getFormat();
810
811                    CHECK(meta->findInt32(kKeyWidth, &mReportedWidth));
812                    CHECK(meta->findInt32(kKeyHeight, &mReportedHeight));
813                    if (mVideoRenderer != NULL) {
814                        mVideoRendererIsPreview = false;
815                        err = initRenderer_l();
816                        if (err != OK) {
817                            postStreamDoneEvent_l(err);
818                        }
819
820                    }
821                    continue;
822                }
823                // So video playback is complete, but we may still have
824                // a seek request pending that needs to be applied to the audio track
825                if (mSeeking) {
826                    LOGV("video stream ended while seeking!");
827                }
828                finishSeekIfNecessary(-1);
829                LOGV("PreviewPlayer: onVideoEvent EOS reached.");
830                mFlags |= VIDEO_AT_EOS;
831                mOverlayUpdateEventPosted = false;
832                postStreamDoneEvent_l(err);
833                // Set the last decoded timestamp to duration
834                mDecodedVideoTs = (mPlayEndTimeMsec*1000);
835                return;
836            }
837
838            if (mVideoBuffer->range_length() == 0) {
839                // Some decoders, notably the PV AVC software decoder
840                // return spurious empty buffers that we just want to ignore.
841
842                mVideoBuffer->release();
843                mVideoBuffer = NULL;
844                continue;
845            }
846
847            int64_t videoTimeUs;
848            CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
849
850            if (mSeeking) {
851                if (videoTimeUs < mSeekTimeUs) {
852                    // buffers are before seek time
853                    // ignore them
854                    mVideoBuffer->release();
855                    mVideoBuffer = NULL;
856                    continue;
857                }
858            } else {
859                if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
860                    // Frames are before begin cut time
861                    // Donot render
862                    mVideoBuffer->release();
863                    mVideoBuffer = NULL;
864                    continue;
865                }
866            }
867            break;
868        }
869    }
870
871    mNumberDecVideoFrames++;
872
873    int64_t timeUs;
874    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
875
876    {
877        Mutex::Autolock autoLock(mMiscStateLock);
878        mVideoTimeUs = timeUs;
879    }
880
881
882    if(!mStartNextPlayer) {
883        int64_t playbackTimeRemaining = (mPlayEndTimeMsec*1000) - timeUs;
884        if(playbackTimeRemaining <= 1500000) {
885            //When less than 1.5 sec of playback left
886            // send notification to start next player
887
888            mStartNextPlayer = true;
889            notifyListener_l(0xAAAAAAAA);
890        }
891    }
892
893    bool wasSeeking = mSeeking;
894    finishSeekIfNecessary(timeUs);
895    if (mAudioPlayer != NULL && !(mFlags & (AUDIO_RUNNING))) {
896        status_t err = startAudioPlayer_l();
897        if (err != OK) {
898            LOGE("Starting the audio player failed w/ err %d", err);
899            return;
900        }
901    }
902
903    TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
904
905    if(ts == NULL) {
906        mVideoBuffer->release();
907        mVideoBuffer = NULL;
908        return;
909    }
910
911    if(!mIsVideoSourceJpg) {
912        if (mFlags & FIRST_FRAME) {
913            mFlags &= ~FIRST_FRAME;
914
915            mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs;
916        }
917
918        int64_t realTimeUs, mediaTimeUs;
919        if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
920            && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {
921            mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
922        }
923
924        int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
925
926        int64_t latenessUs = nowUs - timeUs;
927
928        if (wasSeeking) {
929            // Let's display the first frame after seeking right away.
930            latenessUs = 0;
931        }
932        LOGV("Audio time stamp = %lld and video time stamp = %lld",
933                                            ts->getRealTimeUs(),timeUs);
934        if (latenessUs > 40000) {
935            // We're more than 40ms late.
936
937            LOGV("LV PLAYER we're late by %lld us (%.2f secs)",
938                                           latenessUs, latenessUs / 1E6);
939
940            mVideoBuffer->release();
941            mVideoBuffer = NULL;
942            postVideoEvent_l(0);
943            return;
944        }
945
946        if (latenessUs < -25000) {
947            // We're more than 25ms early.
948            LOGV("We're more than 25ms early, lateness %lld", latenessUs);
949
950            postVideoEvent_l(25000);
951            return;
952        }
953    }
954
955    if (mVideoRendererIsPreview || mVideoRenderer == NULL) {
956        mVideoRendererIsPreview = false;
957
958        status_t err = initRenderer_l();
959        if (err != OK) {
960            postStreamDoneEvent_l(err);
961        }
962    }
963
964    // If timestamp exceeds endCutTime of clip, donot render
965    if((timeUs/1000) > mPlayEndTimeMsec) {
966        if (mLastVideoBuffer) {
967            mLastVideoBuffer->release();
968            mLastVideoBuffer = NULL;
969        }
970        mLastVideoBuffer = mVideoBuffer;
971        mVideoBuffer = NULL;
972        mFlags |= VIDEO_AT_EOS;
973        mFlags |= AUDIO_AT_EOS;
974        LOGV("PreviewPlayer: onVideoEvent timeUs > mPlayEndTime; send EOS..");
975        mOverlayUpdateEventPosted = false;
976        postStreamDoneEvent_l(ERROR_END_OF_STREAM);
977        return;
978    }
979    // Capture the frame timestamp to be rendered
980    mDecodedVideoTs = timeUs;
981
982    // Post processing to apply video effects
983    for(i=0;i<mNumberEffects;i++) {
984        // First check if effect starttime matches the clip being previewed
985        if((mEffectsSettings[i].uiStartTime < (mDecVideoTsStoryBoard/1000)) ||
986        (mEffectsSettings[i].uiStartTime >=
987         ((mDecVideoTsStoryBoard/1000) + mPlayEndTimeMsec - mPlayBeginTimeMsec)))
988        {
989            // This effect doesn't belong to this clip, check next one
990            continue;
991        }
992        // Check if effect applies to this particular frame timestamp
993        if((mEffectsSettings[i].uiStartTime <=
994         (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec)) &&
995            ((mEffectsSettings[i].uiStartTime+mEffectsSettings[i].uiDuration) >=
996             (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec))
997              && (mEffectsSettings[i].uiDuration != 0)) {
998            setVideoPostProcessingNode(
999             mEffectsSettings[i].VideoEffectType, TRUE);
1000        }
1001        else {
1002            setVideoPostProcessingNode(
1003             mEffectsSettings[i].VideoEffectType, FALSE);
1004        }
1005    }
1006
1007    //Provide the overlay Update indication when there is an overlay effect
1008    if (mCurrentVideoEffect & VIDEO_EFFECT_FRAMING) {
1009        mCurrentVideoEffect &= ~VIDEO_EFFECT_FRAMING; //never apply framing here.
1010        if (!mOverlayUpdateEventPosted) {
1011            // Find the effect in effectSettings array
1012            int index;
1013            for (index = 0; index < mNumberEffects; index++) {
1014                M4OSA_UInt32 timeMs = mDecodedVideoTs/1000;
1015                M4OSA_UInt32 timeOffset = mDecVideoTsStoryBoard/1000;
1016                if(mEffectsSettings[index].VideoEffectType ==
1017                    M4xVSS_kVideoEffectType_Framing) {
1018                    if (((mEffectsSettings[index].uiStartTime + 1) <=
1019                        timeMs + timeOffset - mPlayBeginTimeMsec) &&
1020                        ((mEffectsSettings[index].uiStartTime - 1 +
1021                        mEffectsSettings[index].uiDuration) >=
1022                        timeMs + timeOffset - mPlayBeginTimeMsec))
1023                    {
1024                        break;
1025                    }
1026                }
1027            }
1028            if (index < mNumberEffects) {
1029                mCurrFramingEffectIndex = index;
1030                mOverlayUpdateEventPosted = true;
1031                postOverlayUpdateEvent_l();
1032                LOGV("Framing index = %d", mCurrFramingEffectIndex);
1033            } else {
1034                LOGV("No framing effects found");
1035            }
1036        }
1037
1038    } else if (mOverlayUpdateEventPosted) {
1039        //Post the event when the overlay is no more valid
1040        LOGV("Overlay is Done");
1041        mOverlayUpdateEventPosted = false;
1042        postOverlayUpdateEvent_l();
1043    }
1044
1045
1046    if (mCurrentVideoEffect != VIDEO_EFFECT_NONE) {
1047        err1 = doVideoPostProcessing();
1048        if(err1 != M4NO_ERROR) {
1049            LOGE("doVideoPostProcessing returned err");
1050            bAppliedVideoEffect = false;
1051        }
1052        else {
1053            bAppliedVideoEffect = true;
1054        }
1055    }
1056    else {
1057        bAppliedVideoEffect = false;
1058        if(mRenderingMode != MEDIA_RENDERING_INVALID) {
1059            // No effects to be applied, but media rendering to be done
1060            err1 = doMediaRendering();
1061            if(err1 != M4NO_ERROR) {
1062                LOGE("doMediaRendering returned err");
1063                //Use original mVideoBuffer for rendering
1064                mVideoResizedOrCropped = false;
1065            }
1066        }
1067    }
1068
1069    if (mVideoRenderer != NULL) {
1070        LOGV("mVideoRenderer CALL render()");
1071        mVideoRenderer->render();
1072    }
1073
1074    if (mLastVideoBuffer) {
1075        mLastVideoBuffer->release();
1076        mLastVideoBuffer = NULL;
1077    }
1078
1079    mLastVideoBuffer = mVideoBuffer;
1080    mVideoBuffer = NULL;
1081
1082    // Post progress callback based on callback interval set
1083    if(mNumberDecVideoFrames >= mProgressCbInterval) {
1084        postProgressCallbackEvent_l();
1085        mNumberDecVideoFrames = 0;  // reset counter
1086    }
1087
1088    // if reached EndCutTime of clip, post EOS event
1089    if((timeUs/1000) >= mPlayEndTimeMsec) {
1090        LOGV("PreviewPlayer: onVideoEvent EOS.");
1091        mFlags |= VIDEO_AT_EOS;
1092        mFlags |= AUDIO_AT_EOS;
1093        mOverlayUpdateEventPosted = false;
1094        postStreamDoneEvent_l(ERROR_END_OF_STREAM);
1095    }
1096    else {
1097        if(!mIsVideoSourceJpg) {
1098            postVideoEvent_l(0);
1099        }
1100        else {
1101            postVideoEvent_l(33000);
1102        }
1103    }
1104}
1105
1106status_t PreviewPlayer::prepare() {
1107    Mutex::Autolock autoLock(mLock);
1108    return prepare_l();
1109}
1110
1111status_t PreviewPlayer::prepare_l() {
1112    if (mFlags & PREPARED) {
1113        return OK;
1114    }
1115
1116    if (mFlags & PREPARING) {
1117        return UNKNOWN_ERROR;
1118    }
1119
1120    mIsAsyncPrepare = false;
1121    status_t err = prepareAsync_l();
1122
1123    if (err != OK) {
1124        return err;
1125    }
1126
1127    while (mFlags & PREPARING) {
1128        mPreparedCondition.wait(mLock);
1129    }
1130
1131    return mPrepareResult;
1132}
1133
1134status_t PreviewPlayer::prepareAsync_l() {
1135    if (mFlags & PREPARING) {
1136        return UNKNOWN_ERROR;  // async prepare already pending
1137    }
1138
1139    if (!mQueueStarted) {
1140        mQueue.start();
1141        mQueueStarted = true;
1142    }
1143
1144    mFlags |= PREPARING;
1145    mAsyncPrepareEvent = new PreviewPlayerEvent(
1146            this, &PreviewPlayer::onPrepareAsyncEvent);
1147
1148    mQueue.postEvent(mAsyncPrepareEvent);
1149
1150    return OK;
1151}
1152
1153status_t PreviewPlayer::finishSetDataSource_l() {
1154    sp<DataSource> dataSource;
1155    sp<MediaExtractor> extractor;
1156
1157    dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders);
1158
1159    if (dataSource == NULL) {
1160        return UNKNOWN_ERROR;
1161    }
1162
1163    //If file type is .rgb, then no need to check for Extractor
1164    int uriLen = strlen(mUri);
1165    int startOffset = uriLen - 4;
1166    if(!strncasecmp(mUri+startOffset, ".rgb", 4)) {
1167        extractor = NULL;
1168    }
1169    else {
1170        extractor = MediaExtractor::Create(dataSource,
1171                                        MEDIA_MIMETYPE_CONTAINER_MPEG4);
1172    }
1173
1174    if (extractor == NULL) {
1175        LOGV("PreviewPlayer::finishSetDataSource_l  extractor == NULL");
1176        return setDataSource_l_jpg();
1177    }
1178
1179    return setDataSource_l(extractor);
1180}
1181
1182
1183// static
1184bool PreviewPlayer::ContinuePreparation(void *cookie) {
1185    PreviewPlayer *me = static_cast<PreviewPlayer *>(cookie);
1186
1187    return (me->mFlags & PREPARE_CANCELLED) == 0;
1188}
1189
1190void PreviewPlayer::onPrepareAsyncEvent() {
1191    Mutex::Autolock autoLock(mLock);
1192    LOGV("onPrepareAsyncEvent");
1193
1194    if (mFlags & PREPARE_CANCELLED) {
1195        LOGV("LV PLAYER prepare was cancelled before doing anything");
1196        abortPrepare(UNKNOWN_ERROR);
1197        return;
1198    }
1199
1200    if (mUri.size() > 0) {
1201        status_t err = finishSetDataSource_l();
1202
1203        if (err != OK) {
1204            abortPrepare(err);
1205            return;
1206        }
1207    }
1208
1209    if (mVideoTrack != NULL && mVideoSource == NULL) {
1210        status_t err = initVideoDecoder(OMXCodec::kHardwareCodecsOnly);
1211
1212        if (err != OK) {
1213            abortPrepare(err);
1214            return;
1215        }
1216    }
1217
1218    if (mAudioTrack != NULL && mAudioSource == NULL) {
1219        status_t err = initAudioDecoder();
1220
1221        if (err != OK) {
1222            abortPrepare(err);
1223            return;
1224        }
1225    }
1226    finishAsyncPrepare_l();
1227
1228}
1229
1230void PreviewPlayer::finishAsyncPrepare_l() {
1231    if (mIsAsyncPrepare) {
1232        if (mVideoSource == NULL) {
1233            LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE 0 0 ");
1234            notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0);
1235        } else {
1236            LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE");
1237            notifyVideoSize_l();
1238        }
1239        LOGV("finishAsyncPrepare_l: MEDIA_PREPARED");
1240        notifyListener_l(MEDIA_PREPARED);
1241    }
1242
1243    mPrepareResult = OK;
1244    mFlags &= ~(PREPARING|PREPARE_CANCELLED);
1245    mFlags |= PREPARED;
1246    mAsyncPrepareEvent = NULL;
1247    mPreparedCondition.broadcast();
1248}
1249
1250status_t PreviewPlayer::suspend() {
1251    LOGV("suspend");
1252    Mutex::Autolock autoLock(mLock);
1253
1254    if (mSuspensionState != NULL) {
1255        if (mLastVideoBuffer == NULL) {
1256            //go into here if video is suspended again
1257            //after resuming without being played between
1258            //them
1259            SuspensionState *state = mSuspensionState;
1260            mSuspensionState = NULL;
1261            reset_l();
1262            mSuspensionState = state;
1263            return OK;
1264        }
1265
1266        delete mSuspensionState;
1267        mSuspensionState = NULL;
1268    }
1269
1270    if (mFlags & PREPARING) {
1271        mFlags |= PREPARE_CANCELLED;
1272    }
1273
1274    while (mFlags & PREPARING) {
1275        mPreparedCondition.wait(mLock);
1276    }
1277
1278    SuspensionState *state = new SuspensionState;
1279    state->mUri = mUri;
1280    state->mUriHeaders = mUriHeaders;
1281    state->mFileSource = mFileSource;
1282
1283    state->mFlags = mFlags & (PLAYING | AUTO_LOOPING | LOOPING | AT_EOS);
1284    getPosition(&state->mPositionUs);
1285
1286    if (mLastVideoBuffer) {
1287        size_t size = mLastVideoBuffer->range_length();
1288        if (size) {
1289            int32_t unreadable;
1290            if (!mLastVideoBuffer->meta_data()->findInt32(
1291                        kKeyIsUnreadable, &unreadable)
1292                    || unreadable == 0) {
1293                state->mLastVideoFrameSize = size;
1294                state->mLastVideoFrame = malloc(size);
1295                memcpy(state->mLastVideoFrame,
1296                   (const uint8_t *)mLastVideoBuffer->data()
1297                        + mLastVideoBuffer->range_offset(),
1298                   size);
1299
1300                state->mVideoWidth = mVideoWidth;
1301                state->mVideoHeight = mVideoHeight;
1302
1303                sp<MetaData> meta = mVideoSource->getFormat();
1304                CHECK(meta->findInt32(kKeyColorFormat, &state->mColorFormat));
1305                CHECK(meta->findInt32(kKeyWidth, &state->mDecodedWidth));
1306                CHECK(meta->findInt32(kKeyHeight, &state->mDecodedHeight));
1307            } else {
1308                LOGV("Unable to save last video frame, we have no access to "
1309                     "the decoded video data.");
1310            }
1311        }
1312    }
1313
1314    reset_l();
1315
1316    mSuspensionState = state;
1317
1318    return OK;
1319}
1320
1321status_t PreviewPlayer::resume() {
1322    LOGV("resume");
1323    Mutex::Autolock autoLock(mLock);
1324
1325    if (mSuspensionState == NULL) {
1326        return INVALID_OPERATION;
1327    }
1328
1329    SuspensionState *state = mSuspensionState;
1330    mSuspensionState = NULL;
1331
1332    status_t err;
1333    if (state->mFileSource != NULL) {
1334        err = AwesomePlayer::setDataSource_l(state->mFileSource);
1335
1336        if (err == OK) {
1337            mFileSource = state->mFileSource;
1338        }
1339    } else {
1340        err = AwesomePlayer::setDataSource_l(state->mUri, &state->mUriHeaders);
1341    }
1342
1343    if (err != OK) {
1344        delete state;
1345        state = NULL;
1346
1347        return err;
1348    }
1349
1350    seekTo_l(state->mPositionUs);
1351
1352    mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS);
1353
1354    if (state->mLastVideoFrame && (mSurface != NULL || mISurface != NULL)) {
1355        mVideoRenderer =
1356            PreviewLocalRenderer::initPreviewLocalRenderer(
1357                    true,  // previewOnly
1358                    (OMX_COLOR_FORMATTYPE)state->mColorFormat,
1359                    mSurface,
1360                    state->mVideoWidth,
1361                    state->mVideoHeight,
1362                    state->mDecodedWidth,
1363                    state->mDecodedHeight);
1364
1365        mVideoRendererIsPreview = true;
1366
1367        ((PreviewLocalRenderer *)mVideoRenderer.get())->render(
1368                state->mLastVideoFrame, state->mLastVideoFrameSize);
1369    }
1370
1371    if (state->mFlags & PLAYING) {
1372        play_l();
1373    }
1374
1375    mSuspensionState = state;
1376    state = NULL;
1377
1378    return OK;
1379}
1380
1381
1382status_t PreviewPlayer::loadEffectsSettings(
1383                    M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) {
1384    M4OSA_UInt32 i = 0, rgbSize = 0;
1385    M4VIFI_UInt8 *tmp = M4OSA_NULL;
1386
1387    mNumberEffects = nEffects;
1388    mEffectsSettings = pEffectSettings;
1389    return OK;
1390}
1391
1392status_t PreviewPlayer::loadAudioMixSettings(
1393                    M4xVSS_AudioMixingSettings* pAudioMixSettings) {
1394
1395    LOGV("PreviewPlayer: loadAudioMixSettings: ");
1396    mPreviewPlayerAudioMixSettings = pAudioMixSettings;
1397    return OK;
1398}
1399
1400status_t PreviewPlayer::setAudioMixPCMFileHandle(
1401                    M4OSA_Context pAudioMixPCMFileHandle) {
1402
1403    LOGV("PreviewPlayer: setAudioMixPCMFileHandle: ");
1404    mAudioMixPCMFileHandle = pAudioMixPCMFileHandle;
1405    return OK;
1406}
1407
1408status_t PreviewPlayer::setAudioMixStoryBoardParam(
1409                    M4OSA_UInt32 audioMixStoryBoardTS,
1410                    M4OSA_UInt32 currentMediaBeginCutTime,
1411                    M4OSA_UInt32 primaryTrackVolValue ) {
1412
1413    mAudioMixStoryBoardTS = audioMixStoryBoardTS;
1414    mCurrentMediaBeginCutTime = currentMediaBeginCutTime;
1415    mCurrentMediaVolumeValue = primaryTrackVolValue;
1416    return OK;
1417}
1418
1419status_t PreviewPlayer::setPlaybackBeginTime(uint32_t msec) {
1420
1421    mPlayBeginTimeMsec = msec;
1422    return OK;
1423}
1424
1425status_t PreviewPlayer::setPlaybackEndTime(uint32_t msec) {
1426
1427    mPlayEndTimeMsec = msec;
1428    return OK;
1429}
1430
1431status_t PreviewPlayer::setStoryboardStartTime(uint32_t msec) {
1432
1433    mStoryboardStartTimeMsec = msec;
1434    mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000;
1435    return OK;
1436}
1437
1438status_t PreviewPlayer::setProgressCallbackInterval(uint32_t cbInterval) {
1439
1440    mProgressCbInterval = cbInterval;
1441    return OK;
1442}
1443
1444
1445status_t PreviewPlayer::setMediaRenderingMode(
1446        M4xVSS_MediaRendering mode,
1447        M4VIDEOEDITING_VideoFrameSize outputVideoSize) {
1448
1449    mRenderingMode = mode;
1450
1451    /* reset boolean for each clip*/
1452    mVideoResizedOrCropped = false;
1453
1454    switch(outputVideoSize) {
1455        case M4VIDEOEDITING_kSQCIF:
1456            mOutputVideoWidth = 128;
1457            mOutputVideoHeight = 96;
1458            break;
1459
1460        case M4VIDEOEDITING_kQQVGA:
1461            mOutputVideoWidth = 160;
1462            mOutputVideoHeight = 120;
1463            break;
1464
1465        case M4VIDEOEDITING_kQCIF:
1466            mOutputVideoWidth = 176;
1467            mOutputVideoHeight = 144;
1468            break;
1469
1470        case M4VIDEOEDITING_kQVGA:
1471            mOutputVideoWidth = 320;
1472            mOutputVideoHeight = 240;
1473            break;
1474
1475        case M4VIDEOEDITING_kCIF:
1476            mOutputVideoWidth = 352;
1477            mOutputVideoHeight = 288;
1478            break;
1479
1480        case M4VIDEOEDITING_kVGA:
1481            mOutputVideoWidth = 640;
1482            mOutputVideoHeight = 480;
1483            break;
1484
1485        case M4VIDEOEDITING_kWVGA:
1486            mOutputVideoWidth = 800;
1487            mOutputVideoHeight = 480;
1488            break;
1489
1490        case M4VIDEOEDITING_kNTSC:
1491            mOutputVideoWidth = 720;
1492            mOutputVideoHeight = 480;
1493            break;
1494
1495        case M4VIDEOEDITING_k640_360:
1496            mOutputVideoWidth = 640;
1497            mOutputVideoHeight = 360;
1498            break;
1499
1500        case M4VIDEOEDITING_k854_480:
1501            mOutputVideoWidth = 854;
1502            mOutputVideoHeight = 480;
1503            break;
1504
1505        case M4VIDEOEDITING_kHD1280:
1506            mOutputVideoWidth = 1280;
1507            mOutputVideoHeight = 720;
1508            break;
1509
1510        case M4VIDEOEDITING_kHD1080:
1511            mOutputVideoWidth = 1080;
1512            mOutputVideoHeight = 720;
1513            break;
1514
1515        case M4VIDEOEDITING_kHD960:
1516            mOutputVideoWidth = 960;
1517            mOutputVideoHeight = 720;
1518            break;
1519
1520        default:
1521            LOGE("unsupported output video size set");
1522            return BAD_VALUE;
1523    }
1524
1525    return OK;
1526}
1527
1528M4OSA_ERR PreviewPlayer::doMediaRendering() {
1529    M4OSA_ERR err = M4NO_ERROR;
1530    M4VIFI_ImagePlane planeIn[3], planeOut[3];
1531    M4VIFI_UInt8 *inBuffer = M4OSA_NULL, *finalOutputBuffer = M4OSA_NULL;
1532    M4VIFI_UInt8 *tempOutputBuffer= M4OSA_NULL;
1533    size_t videoBufferSize = 0;
1534    M4OSA_UInt32 frameSize = 0, i=0, index =0, nFrameCount =0, bufferOffset =0;
1535    int32_t colorFormat = 0;
1536
1537    if(!mIsVideoSourceJpg) {
1538        sp<MetaData> meta = mVideoSource->getFormat();
1539        CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
1540    }
1541    else {
1542        colorFormat = OMX_COLOR_FormatYUV420Planar;
1543    }
1544
1545    videoBufferSize = mVideoBuffer->size();
1546    frameSize = (mVideoWidth*mVideoHeight*3) >> 1;
1547
1548    uint8_t* outBuffer;
1549    size_t outBufferStride = 0;
1550
1551    mVideoRenderer->getBuffer(&outBuffer, &outBufferStride);
1552
1553    bufferOffset = index*frameSize;
1554    inBuffer = (M4OSA_UInt8 *)mVideoBuffer->data()+
1555                mVideoBuffer->range_offset()+bufferOffset;
1556
1557
1558    /* In plane*/
1559    prepareYUV420ImagePlane(planeIn, mVideoWidth,
1560      mVideoHeight, (M4VIFI_UInt8 *)inBuffer, mReportedWidth, mReportedHeight);
1561
1562    // Set the output YUV420 plane to be compatible with YV12 format
1563    // W & H even
1564    // YVU instead of YUV
1565    // align buffers on 32 bits
1566
1567    //In YV12 format, sizes must be even
1568    M4OSA_UInt32 yv12PlaneWidth = ((mOutputVideoWidth +1)>>1)<<1;
1569    M4OSA_UInt32 yv12PlaneHeight = ((mOutputVideoHeight+1)>>1)<<1;
1570
1571    prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,
1572     (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer);
1573
1574
1575    err = applyRenderingMode(planeIn, planeOut, mRenderingMode);
1576
1577    if(err != M4NO_ERROR)
1578    {
1579        LOGE("doMediaRendering: applyRenderingMode returned err=0x%x", err);
1580        return err;
1581    }
1582    mVideoResizedOrCropped = true;
1583
1584    return err;
1585}
1586
1587status_t PreviewPlayer::resetJniCallbackTimeStamp() {
1588
1589    mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000;
1590    return OK;
1591}
1592
1593void PreviewPlayer::postProgressCallbackEvent_l() {
1594    if (mProgressCbEventPending) {
1595        return;
1596    }
1597    mProgressCbEventPending = true;
1598
1599    mQueue.postEvent(mProgressCbEvent);
1600}
1601
1602
1603void PreviewPlayer::onProgressCbEvent() {
1604    Mutex::Autolock autoLock(mLock);
1605    if (!mProgressCbEventPending) {
1606        return;
1607    }
1608    mProgressCbEventPending = false;
1609    // If playback starts from previous I-frame,
1610    // then send frame storyboard duration
1611    if((mDecodedVideoTs/1000) < mPlayBeginTimeMsec) {
1612        notifyListener_l(MEDIA_INFO, 0, mDecVideoTsStoryBoard/1000);
1613    }
1614    else {
1615        notifyListener_l(MEDIA_INFO, 0,
1616        (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec));
1617    }
1618}
1619
1620void PreviewPlayer::postOverlayUpdateEvent_l() {
1621    if (mOverlayUpdateEventPending) {
1622        return;
1623    }
1624    mOverlayUpdateEventPending = true;
1625    mQueue.postEvent(mOverlayUpdateEvent);
1626}
1627
1628void PreviewPlayer::onUpdateOverlayEvent() {
1629    Mutex::Autolock autoLock(mLock);
1630
1631    if (!mOverlayUpdateEventPending) {
1632        return;
1633    }
1634    mOverlayUpdateEventPending = false;
1635
1636    int updateState;
1637    if (mOverlayUpdateEventPosted) {
1638        updateState = 1;
1639    } else {
1640        updateState = 0;
1641    }
1642    notifyListener_l(0xBBBBBBBB, updateState, mCurrFramingEffectIndex);
1643}
1644
1645
1646void PreviewPlayer::setVideoPostProcessingNode(
1647                    M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable) {
1648
1649    uint32_t effect = VIDEO_EFFECT_NONE;
1650
1651    //Map M4VSS3GPP_VideoEffectType to local enum
1652    switch(type) {
1653        case M4VSS3GPP_kVideoEffectType_FadeFromBlack:
1654            effect = VIDEO_EFFECT_FADEFROMBLACK;
1655            break;
1656
1657        case M4VSS3GPP_kVideoEffectType_FadeToBlack:
1658            effect = VIDEO_EFFECT_FADETOBLACK;
1659            break;
1660
1661        case M4VSS3GPP_kVideoEffectType_CurtainOpening:
1662            effect = VIDEO_EFFECT_CURTAINOPEN;
1663            break;
1664
1665        case M4VSS3GPP_kVideoEffectType_CurtainClosing:
1666            effect = VIDEO_EFFECT_CURTAINCLOSE;
1667            break;
1668
1669        case M4xVSS_kVideoEffectType_BlackAndWhite:
1670            effect = VIDEO_EFFECT_BLACKANDWHITE;
1671            break;
1672
1673        case M4xVSS_kVideoEffectType_Pink:
1674            effect = VIDEO_EFFECT_PINK;
1675            break;
1676
1677        case M4xVSS_kVideoEffectType_Green:
1678            effect = VIDEO_EFFECT_GREEN;
1679            break;
1680
1681        case M4xVSS_kVideoEffectType_Sepia:
1682            effect = VIDEO_EFFECT_SEPIA;
1683            break;
1684
1685        case M4xVSS_kVideoEffectType_Negative:
1686            effect = VIDEO_EFFECT_NEGATIVE;
1687            break;
1688
1689        case M4xVSS_kVideoEffectType_Framing:
1690            effect = VIDEO_EFFECT_FRAMING;
1691            break;
1692
1693        case M4xVSS_kVideoEffectType_Fifties:
1694            effect = VIDEO_EFFECT_FIFTIES;
1695            break;
1696
1697        case M4xVSS_kVideoEffectType_ColorRGB16:
1698            effect = VIDEO_EFFECT_COLOR_RGB16;
1699            break;
1700
1701        case M4xVSS_kVideoEffectType_Gradient:
1702            effect = VIDEO_EFFECT_GRADIENT;
1703            break;
1704
1705        default:
1706            effect = VIDEO_EFFECT_NONE;
1707            break;
1708    }
1709
1710    if(enable == M4OSA_TRUE) {
1711        //If already set, then no need to set again
1712        if(!(mCurrentVideoEffect & effect)) {
1713            mCurrentVideoEffect |= effect;
1714            if(effect == VIDEO_EFFECT_FIFTIES) {
1715                mIsFiftiesEffectStarted = true;
1716            }
1717        }
1718    }
1719    else  {
1720        //Reset only if already set
1721        if(mCurrentVideoEffect & effect) {
1722            mCurrentVideoEffect &= ~effect;
1723        }
1724    }
1725}
1726
1727status_t PreviewPlayer::setImageClipProperties(uint32_t width,uint32_t height) {
1728    mVideoWidth = width;
1729    mVideoHeight = height;
1730    return OK;
1731}
1732
1733
1734M4OSA_ERR PreviewPlayer::doVideoPostProcessing() {
1735    M4OSA_ERR err = M4NO_ERROR;
1736    vePostProcessParams postProcessParams;
1737    int32_t colorFormat = 0;
1738
1739
1740    if(!mIsVideoSourceJpg) {
1741        sp<MetaData> meta = mVideoSource->getFormat();
1742        CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
1743    }
1744    else {
1745        colorFormat = OMX_COLOR_FormatYUV420Planar;
1746    }
1747
1748    if((colorFormat == OMX_COLOR_FormatYUV420SemiPlanar) ||
1749       (colorFormat == 0x7FA30C00)) {
1750          LOGE("doVideoPostProcessing: colorFormat YUV420Sp not supported");
1751          return M4ERR_UNSUPPORTED_MEDIA_TYPE;
1752    }
1753
1754    postProcessParams.vidBuffer = (M4VIFI_UInt8*)mVideoBuffer->data()
1755        + mVideoBuffer->range_offset();
1756
1757    postProcessParams.videoWidth = mVideoWidth;
1758    postProcessParams.videoHeight = mVideoHeight;
1759    postProcessParams.timeMs = mDecodedVideoTs/1000;
1760    postProcessParams.timeOffset = mDecVideoTsStoryBoard/1000;
1761    postProcessParams.effectsSettings = mEffectsSettings;
1762    postProcessParams.numberEffects = mNumberEffects;
1763    postProcessParams.outVideoWidth = mOutputVideoWidth;
1764    postProcessParams.outVideoHeight = mOutputVideoHeight;
1765    postProcessParams.currentVideoEffect = mCurrentVideoEffect;
1766    postProcessParams.renderingMode = mRenderingMode;
1767    if(mIsFiftiesEffectStarted == M4OSA_TRUE) {
1768        postProcessParams.isFiftiesEffectStarted = M4OSA_TRUE;
1769        mIsFiftiesEffectStarted = M4OSA_FALSE;
1770    }
1771    else {
1772       postProcessParams.isFiftiesEffectStarted = M4OSA_FALSE;
1773    }
1774
1775    postProcessParams.overlayFrameRGBBuffer = mFrameRGBBuffer;
1776    postProcessParams.overlayFrameYUVBuffer = mFrameYUVBuffer;
1777    mVideoRenderer->getBuffer(&(postProcessParams.pOutBuffer), &(postProcessParams.outBufferStride));
1778    err = applyEffectsAndRenderingMode(&postProcessParams, mReportedWidth, mReportedHeight);
1779
1780    return err;
1781}
1782
1783status_t PreviewPlayer::readFirstVideoFrame() {
1784    LOGV("PreviewPlayer::readFirstVideoFrame");
1785
1786    if (!mVideoBuffer) {
1787        MediaSource::ReadOptions options;
1788        if (mSeeking) {
1789            LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
1790                    mSeekTimeUs / 1E6);
1791
1792            options.setSeekTo(
1793                    mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
1794        }
1795        for (;;) {
1796            status_t err = mVideoSource->read(&mVideoBuffer, &options);
1797            options.clearSeekTo();
1798
1799            if (err != OK) {
1800                CHECK_EQ(mVideoBuffer, NULL);
1801
1802                if (err == INFO_FORMAT_CHANGED) {
1803                    LOGV("LV PLAYER VideoSource signalled format change");
1804                    notifyVideoSize_l();
1805                    sp<MetaData> meta = mVideoSource->getFormat();
1806
1807                    CHECK(meta->findInt32(kKeyWidth, &mReportedWidth));
1808                    CHECK(meta->findInt32(kKeyHeight, &mReportedHeight));
1809
1810                    if (mVideoRenderer != NULL) {
1811                        mVideoRendererIsPreview = false;
1812                        err = initRenderer_l();
1813                        if (err != OK) {
1814                            postStreamDoneEvent_l(err);
1815                        }
1816                    }
1817                    continue;
1818                }
1819                LOGV("PreviewPlayer: onVideoEvent EOS reached.");
1820                mFlags |= VIDEO_AT_EOS;
1821                postStreamDoneEvent_l(err);
1822                return OK;
1823            }
1824
1825            if (mVideoBuffer->range_length() == 0) {
1826                // Some decoders, notably the PV AVC software decoder
1827                // return spurious empty buffers that we just want to ignore.
1828
1829                mVideoBuffer->release();
1830                mVideoBuffer = NULL;
1831                continue;
1832            }
1833
1834            int64_t videoTimeUs;
1835            CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
1836            if (mSeeking) {
1837                if (videoTimeUs < mSeekTimeUs) {
1838                    // buffers are before seek time
1839                    // ignore them
1840                    mVideoBuffer->release();
1841                    mVideoBuffer = NULL;
1842                    continue;
1843                }
1844            } else {
1845                if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
1846                    // buffers are before begin cut time
1847                    // ignore them
1848                    mVideoBuffer->release();
1849                    mVideoBuffer = NULL;
1850                    continue;
1851                }
1852            }
1853            break;
1854        }
1855    }
1856
1857    int64_t timeUs;
1858    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
1859
1860    {
1861        Mutex::Autolock autoLock(mMiscStateLock);
1862        mVideoTimeUs = timeUs;
1863    }
1864
1865    mDecodedVideoTs = timeUs;
1866
1867    return OK;
1868
1869}
1870
1871status_t PreviewPlayer::getLastRenderedTimeMs(uint32_t *lastRenderedTimeMs) {
1872    *lastRenderedTimeMs = (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec);
1873    return OK;
1874}
1875
1876}  // namespace android
1877