PreviewPlayer.cpp revision fa31daff175709d2c58d61d1354daa9495696631
1/*
2 * Copyright (C) 2011 NXP Software
3 * Copyright (C) 2011 The Android Open Source Project
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 *      http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18
19#define LOG_NDEBUG 1
20#define LOG_TAG "PreviewPlayer"
21#include <utils/Log.h>
22
23#include <dlfcn.h>
24
25#include "include/ARTSPController.h"
26#include "PreviewPlayer.h"
27#include "DummyAudioSource.h"
28#include "DummyVideoSource.h"
29#include "VideoEditorSRC.h"
30#include "include/NuCachedSource2.h"
31#include "include/ThrottledSource.h"
32
33
34#include "PreviewRenderer.h"
35
36#include <binder/IPCThreadState.h>
37#include <media/stagefright/DataSource.h>
38#include <media/stagefright/FileSource.h>
39#include <media/stagefright/MediaBuffer.h>
40#include <media/stagefright/MediaDefs.h>
41#include <media/stagefright/MediaExtractor.h>
42#include <media/stagefright/MediaDebug.h>
43#include <media/stagefright/MediaSource.h>
44#include <media/stagefright/MetaData.h>
45#include <media/stagefright/OMXCodec.h>
46
47#include <surfaceflinger/Surface.h>
48#include <media/stagefright/foundation/ALooper.h>
49
50namespace android {
51
52
53struct PreviewPlayerEvent : public TimedEventQueue::Event {
54    PreviewPlayerEvent(
55            PreviewPlayer *player,
56            void (PreviewPlayer::*method)())
57        : mPlayer(player),
58          mMethod(method) {
59    }
60
61protected:
62    virtual ~PreviewPlayerEvent() {}
63
64    virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) {
65        (mPlayer->*mMethod)();
66    }
67
68private:
69    PreviewPlayer *mPlayer;
70    void (PreviewPlayer::*mMethod)();
71
72    PreviewPlayerEvent(const PreviewPlayerEvent &);
73    PreviewPlayerEvent &operator=(const PreviewPlayerEvent &);
74};
75
76
77struct PreviewLocalRenderer : public PreviewPlayerRenderer {
78
79    static PreviewLocalRenderer* initPreviewLocalRenderer (
80            bool previewOnly,
81            OMX_COLOR_FORMATTYPE colorFormat,
82            const sp<Surface> &surface,
83            size_t displayWidth, size_t displayHeight,
84            size_t decodedWidth, size_t decodedHeight,
85            int32_t rotationDegrees = 0)
86    {
87        PreviewLocalRenderer* mLocalRenderer = new
88            PreviewLocalRenderer(
89                previewOnly,
90                colorFormat,
91                surface,
92                displayWidth, displayHeight,
93                decodedWidth, decodedHeight,
94                rotationDegrees);
95
96        if ( mLocalRenderer->init(previewOnly,
97                 colorFormat, surface,
98                 displayWidth, displayHeight,
99                 decodedWidth, decodedHeight,
100                 rotationDegrees) != OK )
101        {
102            delete mLocalRenderer;
103            return NULL;
104        }
105        return mLocalRenderer;
106    }
107
108    virtual void render(MediaBuffer *buffer) {
109        render((const uint8_t *)buffer->data() + buffer->range_offset(),
110               buffer->range_length());
111    }
112
113    void render(const void *data, size_t size) {
114        mTarget->render(data, size, NULL);
115    }
116    void render() {
117        mTarget->renderYV12();
118    }
119    void getBuffer(uint8_t **data, size_t *stride) {
120        mTarget->getBufferYV12(data, stride);
121    }
122
123protected:
124    virtual ~PreviewLocalRenderer() {
125        delete mTarget;
126        mTarget = NULL;
127    }
128
129private:
130    PreviewRenderer *mTarget;
131
132    PreviewLocalRenderer(
133            bool previewOnly,
134            OMX_COLOR_FORMATTYPE colorFormat,
135            const sp<Surface> &surface,
136            size_t displayWidth, size_t displayHeight,
137            size_t decodedWidth, size_t decodedHeight,
138            int32_t rotationDegrees = 0)
139        : mTarget(NULL) {
140    }
141
142
143    int init(
144            bool previewOnly,
145            OMX_COLOR_FORMATTYPE colorFormat,
146            const sp<Surface> &surface,
147            size_t displayWidth, size_t displayHeight,
148            size_t decodedWidth, size_t decodedHeight,
149            int32_t rotationDegrees = 0);
150
151    PreviewLocalRenderer(const PreviewLocalRenderer &);
152    PreviewLocalRenderer &operator=(const PreviewLocalRenderer &);;
153};
154
155int PreviewLocalRenderer::init(
156        bool previewOnly,
157        OMX_COLOR_FORMATTYPE colorFormat,
158        const sp<Surface> &surface,
159        size_t displayWidth, size_t displayHeight,
160        size_t decodedWidth, size_t decodedHeight,
161        int32_t rotationDegrees) {
162
163    mTarget = PreviewRenderer::CreatePreviewRenderer (
164            colorFormat, surface, displayWidth, displayHeight,
165            decodedWidth, decodedHeight, rotationDegrees);
166    if (mTarget == M4OSA_NULL) {
167        return UNKNOWN_ERROR;
168    }
169    return OK;
170}
171
172PreviewPlayer::PreviewPlayer()
173    : AwesomePlayer(),
174      mCurrFramingEffectIndex(0)   ,
175      mReportedWidth(0),
176      mReportedHeight(0),
177      mFrameRGBBuffer(NULL),
178      mFrameYUVBuffer(NULL){
179
180    mVideoRenderer = NULL;
181    mLastVideoBuffer = NULL;
182    mSuspensionState = NULL;
183    mEffectsSettings = NULL;
184    mVeAudioPlayer = NULL;
185    mAudioMixStoryBoardTS = 0;
186    mCurrentMediaBeginCutTime = 0;
187    mCurrentMediaVolumeValue = 0;
188    mNumberEffects = 0;
189    mDecodedVideoTs = 0;
190    mDecVideoTsStoryBoard = 0;
191    mCurrentVideoEffect = VIDEO_EFFECT_NONE;
192    mProgressCbInterval = 0;
193    mNumberDecVideoFrames = 0;
194    mOverlayUpdateEventPosted = false;
195    mIsChangeSourceRequired = true;
196
197    mVideoEvent = new PreviewPlayerEvent(this, &PreviewPlayer::onVideoEvent);
198    mVideoEventPending = false;
199    mStreamDoneEvent = new PreviewPlayerEvent(this,
200         &PreviewPlayer::onStreamDone);
201
202    mStreamDoneEventPending = false;
203
204    mCheckAudioStatusEvent = new PreviewPlayerEvent(
205        this, &AwesomePlayer::onCheckAudioStatus);
206
207    mAudioStatusEventPending = false;
208
209    mProgressCbEvent = new PreviewPlayerEvent(this,
210         &PreviewPlayer::onProgressCbEvent);
211
212    mOverlayUpdateEvent = new PreviewPlayerEvent(this,
213        &PreviewPlayer::onUpdateOverlayEvent);
214    mProgressCbEventPending = false;
215
216    mOverlayUpdateEventPending = false;
217    mResizedVideoBuffer = NULL;
218    mVideoResizedOrCropped = false;
219    mRenderingMode = (M4xVSS_MediaRendering)MEDIA_RENDERING_INVALID;
220    mIsFiftiesEffectStarted = false;
221    reset();
222}
223
224PreviewPlayer::~PreviewPlayer() {
225
226    if (mQueueStarted) {
227        mQueue.stop();
228    }
229
230    reset();
231
232    if(mResizedVideoBuffer != NULL) {
233        M4OSA_free((M4OSA_MemAddr32)(mResizedVideoBuffer->data()));
234        mResizedVideoBuffer = NULL;
235    }
236
237    mVideoRenderer.clear();
238    mVideoRenderer = NULL;
239}
240
241void PreviewPlayer::cancelPlayerEvents(bool keepBufferingGoing) {
242    mQueue.cancelEvent(mVideoEvent->eventID());
243    mVideoEventPending = false;
244    mQueue.cancelEvent(mStreamDoneEvent->eventID());
245    mStreamDoneEventPending = false;
246    mQueue.cancelEvent(mCheckAudioStatusEvent->eventID());
247    mAudioStatusEventPending = false;
248
249    mQueue.cancelEvent(mProgressCbEvent->eventID());
250    mProgressCbEventPending = false;
251}
252
253status_t PreviewPlayer::setDataSource(
254        const char *uri, const KeyedVector<String8, String8> *headers) {
255    Mutex::Autolock autoLock(mLock);
256    return setDataSource_l(uri, headers);
257}
258
259status_t PreviewPlayer::setDataSource_l(
260        const char *uri, const KeyedVector<String8, String8> *headers) {
261    reset_l();
262
263    mUri = uri;
264
265    if (headers) {
266        mUriHeaders = *headers;
267    }
268
269    // The actual work will be done during preparation in the call to
270    // ::finishSetDataSource_l to avoid blocking the calling thread in
271    // setDataSource for any significant time.
272    return OK;
273}
274
275status_t PreviewPlayer::setDataSource_l(const sp<MediaExtractor> &extractor) {
276    bool haveAudio = false;
277    bool haveVideo = false;
278    for (size_t i = 0; i < extractor->countTracks(); ++i) {
279        sp<MetaData> meta = extractor->getTrackMetaData(i);
280
281        const char *mime;
282        CHECK(meta->findCString(kKeyMIMEType, &mime));
283
284        if (!haveVideo && !strncasecmp(mime, "video/", 6)) {
285            setVideoSource(extractor->getTrack(i));
286            haveVideo = true;
287        } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) {
288            setAudioSource(extractor->getTrack(i));
289            haveAudio = true;
290
291            if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
292                // Only do this for vorbis audio, none of the other audio
293                // formats even support this ringtone specific hack and
294                // retrieving the metadata on some extractors may turn out
295                // to be very expensive.
296                sp<MetaData> fileMeta = extractor->getMetaData();
297                int32_t loop;
298                if (fileMeta != NULL
299                        && fileMeta->findInt32(kKeyAutoLoop, &loop)
300                         && loop != 0) {
301                    mFlags |= AUTO_LOOPING;
302                }
303            }
304        }
305
306        if (haveAudio && haveVideo) {
307            break;
308        }
309    }
310
311    /* Add the support for Dummy audio*/
312    if( !haveAudio ){
313        LOGV("PreviewPlayer: setDataSource_l Dummyaudiocreation started");
314
315        mAudioTrack = DummyAudioSource::Create(32000, 2, 20000,
316                                              ((mPlayEndTimeMsec)*1000LL));
317        LOGV("PreviewPlayer: setDataSource_l Dummyauiosource created");
318        if(mAudioTrack != NULL) {
319            haveAudio = true;
320        }
321    }
322
323    if (!haveAudio && !haveVideo) {
324        return UNKNOWN_ERROR;
325    }
326
327    mExtractorFlags = extractor->flags();
328    return OK;
329}
330
331status_t PreviewPlayer::setDataSource_l_jpg() {
332    M4OSA_ERR err = M4NO_ERROR;
333    LOGV("PreviewPlayer: setDataSource_l_jpg started");
334
335    mAudioSource = DummyAudioSource::Create(32000, 2, 20000,
336                                          ((mPlayEndTimeMsec)*1000LL));
337    LOGV("PreviewPlayer: setDataSource_l_jpg Dummyaudiosource created");
338    if(mAudioSource != NULL) {
339        setAudioSource(mAudioSource);
340    }
341    status_t error = mAudioSource->start();
342    if (error != OK) {
343        LOGV("Error starting dummy audio source");
344        mAudioSource.clear();
345        return err;
346    }
347
348    mDurationUs = (mPlayEndTimeMsec - mPlayBeginTimeMsec)*1000LL;
349
350    mVideoSource = DummyVideoSource::Create(mVideoWidth, mVideoHeight,
351                                            mDurationUs, mUri);
352    mReportedWidth = mVideoWidth;
353    mReportedHeight = mVideoHeight;
354
355    setVideoSource(mVideoSource);
356    status_t err1 = mVideoSource->start();
357    if (err1 != OK) {
358        mVideoSource.clear();
359        return err;
360    }
361
362    mIsVideoSourceJpg = true;
363    return OK;
364}
365
366void PreviewPlayer::reset() {
367    Mutex::Autolock autoLock(mLock);
368    reset_l();
369}
370
371void PreviewPlayer::reset_l() {
372
373    if (mFlags & PREPARING) {
374        mFlags |= PREPARE_CANCELLED;
375    }
376
377    while (mFlags & PREPARING) {
378        mPreparedCondition.wait(mLock);
379    }
380
381    cancelPlayerEvents();
382    mAudioTrack.clear();
383    mVideoTrack.clear();
384
385    // Shutdown audio first, so that the respone to the reset request
386    // appears to happen instantaneously as far as the user is concerned
387    // If we did this later, audio would continue playing while we
388    // shutdown the video-related resources and the player appear to
389    // not be as responsive to a reset request.
390    if (mAudioPlayer == NULL && mAudioSource != NULL) {
391        // If we had an audio player, it would have effectively
392        // taken possession of the audio source and stopped it when
393        // _it_ is stopped. Otherwise this is still our responsibility.
394        mAudioSource->stop();
395    }
396    mAudioSource.clear();
397
398    mTimeSource = NULL;
399
400    //Single audio player instance used
401    //So donot delete it here
402    //It is deleted from PreviewController class
403    //delete mAudioPlayer;
404    mAudioPlayer = NULL;
405
406    if (mLastVideoBuffer) {
407        mLastVideoBuffer->release();
408        mLastVideoBuffer = NULL;
409    }
410
411    if (mVideoBuffer) {
412        mVideoBuffer->release();
413        mVideoBuffer = NULL;
414    }
415
416    if (mVideoSource != NULL) {
417        mVideoSource->stop();
418
419        // The following hack is necessary to ensure that the OMX
420        // component is completely released by the time we may try
421        // to instantiate it again.
422        wp<MediaSource> tmp = mVideoSource;
423        mVideoSource.clear();
424        while (tmp.promote() != NULL) {
425            usleep(1000);
426        }
427        IPCThreadState::self()->flushCommands();
428    }
429
430    mDurationUs = -1;
431    mFlags = 0;
432    mExtractorFlags = 0;
433    mVideoWidth = mVideoHeight = -1;
434    mTimeSourceDeltaUs = 0;
435    mVideoTimeUs = 0;
436
437    mSeeking = NO_SEEK;
438    mSeekNotificationSent = false;
439    mSeekTimeUs = 0;
440
441    mUri.setTo("");
442    mUriHeaders.clear();
443
444    mFileSource.clear();
445
446    delete mSuspensionState;
447    mSuspensionState = NULL;
448
449    mCurrentVideoEffect = VIDEO_EFFECT_NONE;
450    mIsVideoSourceJpg = false;
451    mFrameRGBBuffer = NULL;
452    if(mFrameYUVBuffer != NULL) {
453        M4OSA_free((M4OSA_MemAddr32)mFrameYUVBuffer);
454        mFrameYUVBuffer = NULL;
455    }
456}
457
458status_t PreviewPlayer::play() {
459    Mutex::Autolock autoLock(mLock);
460
461    mFlags &= ~CACHE_UNDERRUN;
462
463    return play_l();
464}
465
466status_t PreviewPlayer::startAudioPlayer_l() {
467    CHECK(!(mFlags & AUDIO_RUNNING));
468
469    if (mAudioSource == NULL || mAudioPlayer == NULL) {
470        return OK;
471    }
472
473    if (!(mFlags & AUDIOPLAYER_STARTED)) {
474        mFlags |= AUDIOPLAYER_STARTED;
475
476        // We've already started the MediaSource in order to enable
477        // the prefetcher to read its data.
478        status_t err = mVeAudioPlayer->start(
479                true /* sourceAlreadyStarted */);
480
481        if (err != OK) {
482            notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
483            return err;
484        }
485    } else {
486        mVeAudioPlayer->resume();
487    }
488
489    mFlags |= AUDIO_RUNNING;
490
491    mWatchForAudioEOS = true;
492
493    return OK;
494}
495
496status_t PreviewPlayer::setAudioPlayer(AudioPlayer *audioPlayer) {
497    Mutex::Autolock autoLock(mLock);
498    CHECK(!(mFlags & PLAYING));
499    mAudioPlayer = audioPlayer;
500
501    LOGV("SetAudioPlayer");
502    mIsChangeSourceRequired = true;
503    mVeAudioPlayer =
504            (VideoEditorAudioPlayer*)mAudioPlayer;
505
506    // check if the new and old source are dummy
507    sp<MediaSource> anAudioSource = mVeAudioPlayer->getSource();
508    if (anAudioSource == NULL) {
509        // Audio player does not have any source set.
510        LOGV("setAudioPlayer: Audio player does not have any source set");
511        return OK;
512    }
513
514    // If new video source is not dummy, then always change source
515    // Else audio player continues using old audio source and there are
516    // frame drops to maintain AV sync
517    sp<MetaData> meta;
518    if (mVideoSource != NULL) {
519        meta = mVideoSource->getFormat();
520        const char *pVidSrcType;
521        if (meta->findCString(kKeyDecoderComponent, &pVidSrcType)) {
522            if (strcmp(pVidSrcType, "DummyVideoSource") != 0) {
523                LOGV(" Video clip with silent audio; need to change source");
524                return OK;
525            }
526        }
527    }
528
529    const char *pSrcType1;
530    const char *pSrcType2;
531    meta = anAudioSource->getFormat();
532
533    if (meta->findCString(kKeyDecoderComponent, &pSrcType1)) {
534        if (strcmp(pSrcType1, "DummyAudioSource") == 0) {
535            meta = mAudioSource->getFormat();
536            if (meta->findCString(kKeyDecoderComponent, &pSrcType2)) {
537                if (strcmp(pSrcType2, "DummyAudioSource") == 0) {
538                    mIsChangeSourceRequired = false;
539                    // Just set the new play duration for the existing source
540                    MediaSource *pMediaSrc = anAudioSource.get();
541                    DummyAudioSource *pDummyAudioSource = (DummyAudioSource*)pMediaSrc;
542                    //Increment the duration of audio source
543                    pDummyAudioSource->setDuration(
544                        (int64_t)((mPlayEndTimeMsec)*1000LL));
545
546                    // Stop the new audio source
547                    // since we continue using old source
548                    LOGV("setAudioPlayer: stop new audio source");
549                    mAudioSource->stop();
550                }
551            }
552        }
553    }
554
555    return OK;
556}
557
558void PreviewPlayer::onStreamDone() {
559    // Posted whenever any stream finishes playing.
560
561    Mutex::Autolock autoLock(mLock);
562    if (!mStreamDoneEventPending) {
563        return;
564    }
565    mStreamDoneEventPending = false;
566
567    if (mStreamDoneStatus != ERROR_END_OF_STREAM) {
568        LOGV("MEDIA_ERROR %d", mStreamDoneStatus);
569
570        notifyListener_l(
571                MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, mStreamDoneStatus);
572
573        pause_l(true /* at eos */);
574
575        mFlags |= AT_EOS;
576        return;
577    }
578
579    const bool allDone =
580        (mVideoSource == NULL || (mFlags & VIDEO_AT_EOS))
581            && (mAudioSource == NULL || (mFlags & AUDIO_AT_EOS));
582
583    if (!allDone) {
584        return;
585    }
586
587    if (mFlags & (LOOPING | AUTO_LOOPING)) {
588        seekTo_l(0);
589
590        if (mVideoSource != NULL) {
591            postVideoEvent_l();
592        }
593    } else {
594        LOGV("MEDIA_PLAYBACK_COMPLETE");
595        //pause before sending event
596        pause_l(true /* at eos */);
597
598        //This lock is used to syncronize onStreamDone() in PreviewPlayer and
599        //stopPreview() in PreviewController
600        Mutex::Autolock autoLock(mLockControl);
601        notifyListener_l(MEDIA_PLAYBACK_COMPLETE);
602
603        mFlags |= AT_EOS;
604        LOGV("onStreamDone end");
605        return;
606    }
607}
608
609
610status_t PreviewPlayer::play_l() {
611
612    mFlags &= ~SEEK_PREVIEW;
613
614    if (mFlags & PLAYING) {
615        return OK;
616    }
617    mStartNextPlayer = false;
618
619    if (!(mFlags & PREPARED)) {
620        status_t err = prepare_l();
621
622        if (err != OK) {
623            return err;
624        }
625    }
626
627    mFlags |= PLAYING;
628    mFlags |= FIRST_FRAME;
629
630    bool deferredAudioSeek = false;
631
632    if (mAudioSource != NULL) {
633        if (mAudioPlayer == NULL) {
634            if (mAudioSink != NULL) {
635
636                mAudioPlayer = new VideoEditorAudioPlayer(mAudioSink, this);
637                mVeAudioPlayer =
638                          (VideoEditorAudioPlayer*)mAudioPlayer;
639
640                mAudioPlayer->setSource(mAudioSource);
641
642                mVeAudioPlayer->setAudioMixSettings(
643                 mPreviewPlayerAudioMixSettings);
644
645                mVeAudioPlayer->setAudioMixPCMFileHandle(
646                 mAudioMixPCMFileHandle);
647
648                mVeAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
649                 mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
650                 mCurrentMediaVolumeValue);
651
652                 mFlags |= AUDIOPLAYER_STARTED;
653                // We've already started the MediaSource in order to enable
654                // the prefetcher to read its data.
655                status_t err = mVeAudioPlayer->start(
656                        true /* sourceAlreadyStarted */);
657
658                if (err != OK) {
659                    //delete mAudioPlayer;
660                    mAudioPlayer = NULL;
661
662                    mFlags &= ~(PLAYING | FIRST_FRAME);
663                    return err;
664                }
665
666                mTimeSource = mVeAudioPlayer;
667                mFlags |= AUDIO_RUNNING;
668                deferredAudioSeek = true;
669                mWatchForAudioSeekComplete = false;
670                mWatchForAudioEOS = true;
671            }
672        } else {
673            mVeAudioPlayer = (VideoEditorAudioPlayer*)mAudioPlayer;
674            bool isAudioPlayerStarted = mVeAudioPlayer->isStarted();
675
676            if (mIsChangeSourceRequired == true) {
677                LOGV("play_l: Change audio source required");
678
679                if (isAudioPlayerStarted == true) {
680                    mVeAudioPlayer->pause();
681                }
682
683                mVeAudioPlayer->setSource(mAudioSource);
684                mVeAudioPlayer->setObserver(this);
685
686                mVeAudioPlayer->setAudioMixSettings(
687                 mPreviewPlayerAudioMixSettings);
688
689                mVeAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
690                    mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
691                    mCurrentMediaVolumeValue);
692
693                if (isAudioPlayerStarted == true) {
694                    mVeAudioPlayer->resume();
695                } else {
696                    status_t err = OK;
697                    err = mVeAudioPlayer->start(true);
698                    if (err != OK) {
699                        mAudioPlayer = NULL;
700                        mVeAudioPlayer = NULL;
701
702                        mFlags &= ~(PLAYING | FIRST_FRAME);
703                        return err;
704                    }
705                }
706            } else {
707                LOGV("play_l: No Source change required");
708                mVeAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
709                    mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
710                    mCurrentMediaVolumeValue);
711
712                mVeAudioPlayer->resume();
713            }
714
715            mFlags |= AUDIOPLAYER_STARTED;
716            mFlags |= AUDIO_RUNNING;
717            mTimeSource = mVeAudioPlayer;
718            deferredAudioSeek = true;
719            mWatchForAudioSeekComplete = false;
720            mWatchForAudioEOS = true;
721        }
722    }
723
724    if (mTimeSource == NULL && mAudioPlayer == NULL) {
725        mTimeSource = &mSystemTimeSource;
726    }
727
728    // Set the seek option for Image source files and read.
729    // This resets the timestamping for image play
730    if (mIsVideoSourceJpg) {
731        MediaSource::ReadOptions options;
732        MediaBuffer *aLocalBuffer;
733        options.setSeekTo(mSeekTimeUs);
734        mVideoSource->read(&aLocalBuffer, &options);
735        aLocalBuffer->release();
736    }
737
738    if (mVideoSource != NULL) {
739        // Kick off video playback
740        postVideoEvent_l();
741    }
742
743    if (deferredAudioSeek) {
744        // If there was a seek request while we were paused
745        // and we're just starting up again, honor the request now.
746        seekAudioIfNecessary_l();
747    }
748
749    if (mFlags & AT_EOS) {
750        // Legacy behaviour, if a stream finishes playing and then
751        // is started again, we play from the start...
752        seekTo_l(0);
753    }
754
755    return OK;
756}
757
758
759status_t PreviewPlayer::initRenderer_l() {
760    if (mSurface != NULL || mISurface != NULL) {
761        sp<MetaData> meta = mVideoSource->getFormat();
762
763        int32_t format;
764        const char *component;
765        int32_t decodedWidth, decodedHeight;
766        CHECK(meta->findInt32(kKeyColorFormat, &format));
767        CHECK(meta->findCString(kKeyDecoderComponent, &component));
768        CHECK(meta->findInt32(kKeyWidth, &decodedWidth));
769        CHECK(meta->findInt32(kKeyHeight, &decodedHeight));
770
771        // Must ensure that mVideoRenderer's destructor is actually executed
772        // before creating a new one.
773        IPCThreadState::self()->flushCommands();
774
775        // always use localrenderer since decoded buffers are modified
776        // by postprocessing module
777        // Other decoders are instantiated locally and as a consequence
778        // allocate their buffers in local address space.
779        if(mVideoRenderer == NULL) {
780
781            mVideoRenderer = PreviewLocalRenderer:: initPreviewLocalRenderer (
782                false,  // previewOnly
783                (OMX_COLOR_FORMATTYPE)format,
784                mSurface,
785                mOutputVideoWidth, mOutputVideoHeight,
786                mOutputVideoWidth, mOutputVideoHeight);
787
788            if ( mVideoRenderer == NULL )
789            {
790                return UNKNOWN_ERROR;
791            }
792            return OK;
793        }
794    }
795    return OK;
796}
797
798
799void PreviewPlayer::setISurface(const sp<ISurface> &isurface) {
800    Mutex::Autolock autoLock(mLock);
801    mISurface = isurface;
802}
803
804
805status_t PreviewPlayer::seekTo(int64_t timeUs) {
806
807    if ((mExtractorFlags & MediaExtractor::CAN_SEEK) || (mIsVideoSourceJpg)) {
808        Mutex::Autolock autoLock(mLock);
809        return seekTo_l(timeUs);
810    }
811
812    return OK;
813}
814
815
816status_t PreviewPlayer::getVideoDimensions(
817        int32_t *width, int32_t *height) const {
818    Mutex::Autolock autoLock(mLock);
819
820    if (mVideoWidth < 0 || mVideoHeight < 0) {
821        return UNKNOWN_ERROR;
822    }
823
824    *width = mVideoWidth;
825    *height = mVideoHeight;
826
827    return OK;
828}
829
830
831status_t PreviewPlayer::initAudioDecoder() {
832    sp<MetaData> meta = mAudioTrack->getFormat();
833    const char *mime;
834    CHECK(meta->findCString(kKeyMIMEType, &mime));
835
836    if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
837        mAudioSource = mAudioTrack;
838    } else {
839        sp<MediaSource> aRawSource;
840        aRawSource = OMXCodec::Create(
841                mClient.interface(), mAudioTrack->getFormat(),
842                false, // createEncoder
843                mAudioTrack);
844
845        if(aRawSource != NULL) {
846            LOGV("initAudioDecoder: new VideoEditorSRC");
847            mAudioSource = new VideoEditorSRC(aRawSource);
848        }
849    }
850
851    if (mAudioSource != NULL) {
852        int64_t durationUs;
853        if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
854            Mutex::Autolock autoLock(mMiscStateLock);
855            if (mDurationUs < 0 || durationUs > mDurationUs) {
856                mDurationUs = durationUs;
857            }
858        }
859        status_t err = mAudioSource->start();
860
861        if (err != OK) {
862            mAudioSource.clear();
863            return err;
864        }
865    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) {
866        // For legacy reasons we're simply going to ignore the absence
867        // of an audio decoder for QCELP instead of aborting playback
868        // altogether.
869        return OK;
870    }
871
872    return mAudioSource != NULL ? OK : UNKNOWN_ERROR;
873}
874
875
876status_t PreviewPlayer::initVideoDecoder(uint32_t flags) {
877
878    mVideoSource = OMXCodec::Create(
879            mClient.interface(), mVideoTrack->getFormat(),
880            false,
881            mVideoTrack,
882            NULL, flags);
883
884    if (mVideoSource != NULL) {
885        int64_t durationUs;
886        if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
887            Mutex::Autolock autoLock(mMiscStateLock);
888            if (mDurationUs < 0 || durationUs > mDurationUs) {
889                mDurationUs = durationUs;
890            }
891        }
892
893        CHECK(mVideoTrack->getFormat()->findInt32(kKeyWidth, &mVideoWidth));
894        CHECK(mVideoTrack->getFormat()->findInt32(kKeyHeight, &mVideoHeight));
895
896        mReportedWidth = mVideoWidth;
897        mReportedHeight = mVideoHeight;
898
899        status_t err = mVideoSource->start();
900
901        if (err != OK) {
902            mVideoSource.clear();
903            return err;
904        }
905    }
906
907    return mVideoSource != NULL ? OK : UNKNOWN_ERROR;
908}
909
910
911void PreviewPlayer::onVideoEvent() {
912    uint32_t i=0;
913    bool bAppliedVideoEffect = false;
914    M4OSA_ERR err1 = M4NO_ERROR;
915    int64_t imageFrameTimeUs = 0;
916
917    Mutex::Autolock autoLock(mLock);
918    if (!mVideoEventPending) {
919        // The event has been cancelled in reset_l() but had already
920        // been scheduled for execution at that time.
921        return;
922    }
923    mVideoEventPending = false;
924
925    if (mFlags & SEEK_PREVIEW) {
926        mFlags &= ~SEEK_PREVIEW;
927        return;
928    }
929
930    TimeSource *ts_st =  &mSystemTimeSource;
931    int64_t timeStartUs = ts_st->getRealTimeUs();
932
933    if (mSeeking != NO_SEEK) {
934        if (mLastVideoBuffer) {
935            mLastVideoBuffer->release();
936            mLastVideoBuffer = NULL;
937        }
938
939
940        if(mAudioSource != NULL) {
941
942            // We're going to seek the video source first, followed by
943            // the audio source.
944            // In order to avoid jumps in the DataSource offset caused by
945            // the audio codec prefetching data from the old locations
946            // while the video codec is already reading data from the new
947            // locations, we'll "pause" the audio source, causing it to
948            // stop reading input data until a subsequent seek.
949
950            if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) {
951                mAudioPlayer->pause();
952                mFlags &= ~AUDIO_RUNNING;
953            }
954            mAudioSource->pause();
955        }
956    }
957
958    if (!mVideoBuffer) {
959        MediaSource::ReadOptions options;
960        if (mSeeking != NO_SEEK) {
961            LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
962                                                      mSeekTimeUs / 1E6);
963
964            options.setSeekTo(
965                    mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
966        }
967        for (;;) {
968            status_t err = mVideoSource->read(&mVideoBuffer, &options);
969            options.clearSeekTo();
970
971            if (err != OK) {
972                CHECK_EQ(mVideoBuffer, NULL);
973
974                if (err == INFO_FORMAT_CHANGED) {
975                    LOGV("LV PLAYER VideoSource signalled format change");
976                    notifyVideoSize_l();
977                    sp<MetaData> meta = mVideoSource->getFormat();
978
979                    CHECK(meta->findInt32(kKeyWidth, &mReportedWidth));
980                    CHECK(meta->findInt32(kKeyHeight, &mReportedHeight));
981                    if (mVideoRenderer != NULL) {
982                        mVideoRendererIsPreview = false;
983                        err = initRenderer_l();
984                        if (err != OK) {
985                            postStreamDoneEvent_l(err);
986                        }
987
988                    }
989                    continue;
990                }
991                // So video playback is complete, but we may still have
992                // a seek request pending that needs to be applied to the audio track
993                if (mSeeking != NO_SEEK) {
994                    LOGV("video stream ended while seeking!");
995                }
996                finishSeekIfNecessary(-1);
997                LOGV("PreviewPlayer: onVideoEvent EOS reached.");
998                mFlags |= VIDEO_AT_EOS;
999                mFlags |= AUDIO_AT_EOS;
1000                mOverlayUpdateEventPosted = false;
1001                postStreamDoneEvent_l(err);
1002                // Set the last decoded timestamp to duration
1003                mDecodedVideoTs = (mPlayEndTimeMsec*1000LL);
1004                return;
1005            }
1006
1007            if (mVideoBuffer->range_length() == 0) {
1008                // Some decoders, notably the PV AVC software decoder
1009                // return spurious empty buffers that we just want to ignore.
1010
1011                mVideoBuffer->release();
1012                mVideoBuffer = NULL;
1013                continue;
1014            }
1015
1016            int64_t videoTimeUs;
1017            CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
1018
1019            if (mSeeking != NO_SEEK) {
1020                if (videoTimeUs < mSeekTimeUs) {
1021                    // buffers are before seek time
1022                    // ignore them
1023                    mVideoBuffer->release();
1024                    mVideoBuffer = NULL;
1025                    continue;
1026                }
1027            } else {
1028                if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
1029                    // Frames are before begin cut time
1030                    // Donot render
1031                    mVideoBuffer->release();
1032                    mVideoBuffer = NULL;
1033                    continue;
1034                }
1035            }
1036            break;
1037        }
1038    }
1039
1040    mNumberDecVideoFrames++;
1041
1042    int64_t timeUs;
1043    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
1044
1045    {
1046        Mutex::Autolock autoLock(mMiscStateLock);
1047        mVideoTimeUs = timeUs;
1048    }
1049
1050
1051    if(!mStartNextPlayer) {
1052        int64_t playbackTimeRemaining = (mPlayEndTimeMsec*1000LL) - timeUs;
1053        if(playbackTimeRemaining <= 1500000) {
1054            //When less than 1.5 sec of playback left
1055            // send notification to start next player
1056
1057            mStartNextPlayer = true;
1058            notifyListener_l(0xAAAAAAAA);
1059        }
1060    }
1061
1062    SeekType wasSeeking = mSeeking;
1063    finishSeekIfNecessary(timeUs);
1064    if (mAudioPlayer != NULL && !(mFlags & (AUDIO_RUNNING))) {
1065        status_t err = startAudioPlayer_l();
1066        if (err != OK) {
1067            LOGE("Starting the audio player failed w/ err %d", err);
1068            return;
1069        }
1070    }
1071
1072    TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
1073
1074    if(ts == NULL) {
1075        mVideoBuffer->release();
1076        mVideoBuffer = NULL;
1077        return;
1078    }
1079
1080    if(!mIsVideoSourceJpg) {
1081        if (mFlags & FIRST_FRAME) {
1082            mFlags &= ~FIRST_FRAME;
1083
1084            mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs;
1085        }
1086
1087        int64_t realTimeUs, mediaTimeUs;
1088        if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
1089            && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {
1090            mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
1091        }
1092
1093        int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
1094
1095        int64_t latenessUs = nowUs - timeUs;
1096
1097        if (wasSeeking != NO_SEEK) {
1098            // Let's display the first frame after seeking right away.
1099            latenessUs = 0;
1100        }
1101        LOGV("Audio time stamp = %lld and video time stamp = %lld",
1102                                            ts->getRealTimeUs(),timeUs);
1103        if (latenessUs > 40000) {
1104            // We're more than 40ms late.
1105
1106            LOGV("LV PLAYER we're late by %lld us (%.2f secs)",
1107                                           latenessUs, latenessUs / 1E6);
1108
1109            mVideoBuffer->release();
1110            mVideoBuffer = NULL;
1111            postVideoEvent_l(0);
1112            return;
1113        }
1114
1115        if (latenessUs < -25000) {
1116            // We're more than 25ms early.
1117            LOGV("We're more than 25ms early, lateness %lld", latenessUs);
1118
1119            postVideoEvent_l(25000);
1120            return;
1121        }
1122    }
1123
1124    if (mVideoRendererIsPreview || mVideoRenderer == NULL) {
1125        mVideoRendererIsPreview = false;
1126
1127        status_t err = initRenderer_l();
1128        if (err != OK) {
1129            postStreamDoneEvent_l(err);
1130        }
1131    }
1132
1133    // If timestamp exceeds endCutTime of clip, donot render
1134    if((timeUs/1000) > mPlayEndTimeMsec) {
1135        if (mLastVideoBuffer) {
1136            mLastVideoBuffer->release();
1137            mLastVideoBuffer = NULL;
1138        }
1139        mLastVideoBuffer = mVideoBuffer;
1140        mVideoBuffer = NULL;
1141        mFlags |= VIDEO_AT_EOS;
1142        mFlags |= AUDIO_AT_EOS;
1143        LOGV("PreviewPlayer: onVideoEvent timeUs > mPlayEndTime; send EOS..");
1144        mOverlayUpdateEventPosted = false;
1145        // Set the last decoded timestamp to duration
1146        mDecodedVideoTs = (mPlayEndTimeMsec*1000LL);
1147        postStreamDoneEvent_l(ERROR_END_OF_STREAM);
1148        return;
1149    }
1150    // Capture the frame timestamp to be rendered
1151    mDecodedVideoTs = timeUs;
1152
1153    // Post processing to apply video effects
1154    for(i=0;i<mNumberEffects;i++) {
1155        // First check if effect starttime matches the clip being previewed
1156        if((mEffectsSettings[i].uiStartTime < (mDecVideoTsStoryBoard/1000)) ||
1157        (mEffectsSettings[i].uiStartTime >=
1158         ((mDecVideoTsStoryBoard/1000) + mPlayEndTimeMsec - mPlayBeginTimeMsec)))
1159        {
1160            // This effect doesn't belong to this clip, check next one
1161            continue;
1162        }
1163        // Check if effect applies to this particular frame timestamp
1164        if((mEffectsSettings[i].uiStartTime <=
1165         (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec)) &&
1166            ((mEffectsSettings[i].uiStartTime+mEffectsSettings[i].uiDuration) >=
1167             (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec))
1168              && (mEffectsSettings[i].uiDuration != 0)) {
1169            setVideoPostProcessingNode(
1170             mEffectsSettings[i].VideoEffectType, TRUE);
1171        }
1172        else {
1173            setVideoPostProcessingNode(
1174             mEffectsSettings[i].VideoEffectType, FALSE);
1175        }
1176    }
1177
1178    //Provide the overlay Update indication when there is an overlay effect
1179    if (mCurrentVideoEffect & VIDEO_EFFECT_FRAMING) {
1180        mCurrentVideoEffect &= ~VIDEO_EFFECT_FRAMING; //never apply framing here.
1181        if (!mOverlayUpdateEventPosted) {
1182            // Find the effect in effectSettings array
1183            M4OSA_UInt32 index;
1184            for (index = 0; index < mNumberEffects; index++) {
1185                M4OSA_UInt32 timeMs = mDecodedVideoTs/1000;
1186                M4OSA_UInt32 timeOffset = mDecVideoTsStoryBoard/1000;
1187                if(mEffectsSettings[index].VideoEffectType ==
1188                    (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_Framing) {
1189                    if (((mEffectsSettings[index].uiStartTime + 1) <=
1190                        timeMs + timeOffset - mPlayBeginTimeMsec) &&
1191                        ((mEffectsSettings[index].uiStartTime - 1 +
1192                        mEffectsSettings[index].uiDuration) >=
1193                        timeMs + timeOffset - mPlayBeginTimeMsec))
1194                    {
1195                        break;
1196                    }
1197                }
1198            }
1199            if (index < mNumberEffects) {
1200                mCurrFramingEffectIndex = index;
1201                mOverlayUpdateEventPosted = true;
1202                postOverlayUpdateEvent_l();
1203                LOGV("Framing index = %d", mCurrFramingEffectIndex);
1204            } else {
1205                LOGV("No framing effects found");
1206            }
1207        }
1208
1209    } else if (mOverlayUpdateEventPosted) {
1210        //Post the event when the overlay is no more valid
1211        LOGV("Overlay is Done");
1212        mOverlayUpdateEventPosted = false;
1213        postOverlayUpdateEvent_l();
1214    }
1215
1216
1217    if (mCurrentVideoEffect != VIDEO_EFFECT_NONE) {
1218        err1 = doVideoPostProcessing();
1219        if(err1 != M4NO_ERROR) {
1220            LOGE("doVideoPostProcessing returned err");
1221            bAppliedVideoEffect = false;
1222        }
1223        else {
1224            bAppliedVideoEffect = true;
1225        }
1226    }
1227    else {
1228        bAppliedVideoEffect = false;
1229        if(mRenderingMode != MEDIA_RENDERING_INVALID) {
1230            // No effects to be applied, but media rendering to be done
1231            err1 = doMediaRendering();
1232            if(err1 != M4NO_ERROR) {
1233                LOGE("doMediaRendering returned err");
1234                //Use original mVideoBuffer for rendering
1235                mVideoResizedOrCropped = false;
1236            }
1237        }
1238    }
1239
1240    if (mVideoRenderer != NULL) {
1241        LOGV("mVideoRenderer CALL render()");
1242        mVideoRenderer->render();
1243    }
1244
1245    if (mLastVideoBuffer) {
1246        mLastVideoBuffer->release();
1247        mLastVideoBuffer = NULL;
1248    }
1249
1250    mLastVideoBuffer = mVideoBuffer;
1251    mVideoBuffer = NULL;
1252
1253    // Post progress callback based on callback interval set
1254    if(mNumberDecVideoFrames >= mProgressCbInterval) {
1255        postProgressCallbackEvent_l();
1256        mNumberDecVideoFrames = 0;  // reset counter
1257    }
1258
1259    // if reached EndCutTime of clip, post EOS event
1260    if((timeUs/1000) >= mPlayEndTimeMsec) {
1261        LOGV("PreviewPlayer: onVideoEvent EOS.");
1262        mFlags |= VIDEO_AT_EOS;
1263        mFlags |= AUDIO_AT_EOS;
1264        mOverlayUpdateEventPosted = false;
1265        // Set the last decoded timestamp to duration
1266        mDecodedVideoTs = (mPlayEndTimeMsec*1000LL);
1267        postStreamDoneEvent_l(ERROR_END_OF_STREAM);
1268    }
1269    else {
1270        if ((wasSeeking != NO_SEEK) && (mFlags & SEEK_PREVIEW)) {
1271            mFlags &= ~SEEK_PREVIEW;
1272            return;
1273        }
1274
1275        if(!mIsVideoSourceJpg) {
1276            postVideoEvent_l(0);
1277        }
1278        else {
1279            postVideoEvent_l(33000);
1280        }
1281    }
1282}
1283
1284status_t PreviewPlayer::prepare() {
1285    Mutex::Autolock autoLock(mLock);
1286    return prepare_l();
1287}
1288
1289status_t PreviewPlayer::prepare_l() {
1290    if (mFlags & PREPARED) {
1291        return OK;
1292    }
1293
1294    if (mFlags & PREPARING) {
1295        return UNKNOWN_ERROR;
1296    }
1297
1298    mIsAsyncPrepare = false;
1299    status_t err = prepareAsync_l();
1300
1301    if (err != OK) {
1302        return err;
1303    }
1304
1305    while (mFlags & PREPARING) {
1306        mPreparedCondition.wait(mLock);
1307    }
1308
1309    return mPrepareResult;
1310}
1311
1312status_t PreviewPlayer::prepareAsync_l() {
1313    if (mFlags & PREPARING) {
1314        return UNKNOWN_ERROR;  // async prepare already pending
1315    }
1316
1317    if (!mQueueStarted) {
1318        mQueue.start();
1319        mQueueStarted = true;
1320    }
1321
1322    mFlags |= PREPARING;
1323    mAsyncPrepareEvent = new PreviewPlayerEvent(
1324            this, &PreviewPlayer::onPrepareAsyncEvent);
1325
1326    mQueue.postEvent(mAsyncPrepareEvent);
1327
1328    return OK;
1329}
1330
1331status_t PreviewPlayer::finishSetDataSource_l() {
1332    sp<DataSource> dataSource;
1333    sp<MediaExtractor> extractor;
1334
1335    dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders);
1336
1337    if (dataSource == NULL) {
1338        return UNKNOWN_ERROR;
1339    }
1340
1341    //If file type is .rgb, then no need to check for Extractor
1342    int uriLen = strlen(mUri);
1343    int startOffset = uriLen - 4;
1344    if(!strncasecmp(mUri+startOffset, ".rgb", 4)) {
1345        extractor = NULL;
1346    }
1347    else {
1348        extractor = MediaExtractor::Create(dataSource,
1349                                        MEDIA_MIMETYPE_CONTAINER_MPEG4);
1350    }
1351
1352    if (extractor == NULL) {
1353        LOGV("PreviewPlayer::finishSetDataSource_l  extractor == NULL");
1354        return setDataSource_l_jpg();
1355    }
1356
1357    return setDataSource_l(extractor);
1358}
1359
1360
1361// static
1362bool PreviewPlayer::ContinuePreparation(void *cookie) {
1363    PreviewPlayer *me = static_cast<PreviewPlayer *>(cookie);
1364
1365    return (me->mFlags & PREPARE_CANCELLED) == 0;
1366}
1367
1368void PreviewPlayer::onPrepareAsyncEvent() {
1369    Mutex::Autolock autoLock(mLock);
1370    LOGV("onPrepareAsyncEvent");
1371
1372    if (mFlags & PREPARE_CANCELLED) {
1373        LOGV("LV PLAYER prepare was cancelled before doing anything");
1374        abortPrepare(UNKNOWN_ERROR);
1375        return;
1376    }
1377
1378    if (mUri.size() > 0) {
1379        status_t err = finishSetDataSource_l();
1380
1381        if (err != OK) {
1382            abortPrepare(err);
1383            return;
1384        }
1385    }
1386
1387    if (mVideoTrack != NULL && mVideoSource == NULL) {
1388        status_t err = initVideoDecoder(OMXCodec::kHardwareCodecsOnly);
1389
1390        if (err != OK) {
1391            abortPrepare(err);
1392            return;
1393        }
1394    }
1395
1396    if (mAudioTrack != NULL && mAudioSource == NULL) {
1397        status_t err = initAudioDecoder();
1398
1399        if (err != OK) {
1400            abortPrepare(err);
1401            return;
1402        }
1403    }
1404    finishAsyncPrepare_l();
1405
1406}
1407
1408void PreviewPlayer::finishAsyncPrepare_l() {
1409    if (mIsAsyncPrepare) {
1410        if (mVideoSource == NULL) {
1411            LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE 0 0 ");
1412            notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0);
1413        } else {
1414            LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE");
1415            notifyVideoSize_l();
1416        }
1417        LOGV("finishAsyncPrepare_l: MEDIA_PREPARED");
1418        notifyListener_l(MEDIA_PREPARED);
1419    }
1420
1421    mPrepareResult = OK;
1422    mFlags &= ~(PREPARING|PREPARE_CANCELLED);
1423    mFlags |= PREPARED;
1424    mAsyncPrepareEvent = NULL;
1425    mPreparedCondition.broadcast();
1426}
1427
1428status_t PreviewPlayer::suspend() {
1429    LOGV("suspend");
1430    Mutex::Autolock autoLock(mLock);
1431
1432    if (mSuspensionState != NULL) {
1433        if (mLastVideoBuffer == NULL) {
1434            //go into here if video is suspended again
1435            //after resuming without being played between
1436            //them
1437            SuspensionState *state = mSuspensionState;
1438            mSuspensionState = NULL;
1439            reset_l();
1440            mSuspensionState = state;
1441            return OK;
1442        }
1443
1444        delete mSuspensionState;
1445        mSuspensionState = NULL;
1446    }
1447
1448    if (mFlags & PREPARING) {
1449        mFlags |= PREPARE_CANCELLED;
1450    }
1451
1452    while (mFlags & PREPARING) {
1453        mPreparedCondition.wait(mLock);
1454    }
1455
1456    SuspensionState *state = new SuspensionState;
1457    state->mUri = mUri;
1458    state->mUriHeaders = mUriHeaders;
1459    state->mFileSource = mFileSource;
1460
1461    state->mFlags = mFlags & (PLAYING | AUTO_LOOPING | LOOPING | AT_EOS);
1462    getPosition(&state->mPositionUs);
1463
1464    if (mLastVideoBuffer) {
1465        size_t size = mLastVideoBuffer->range_length();
1466        if (size) {
1467            int32_t unreadable;
1468            if (!mLastVideoBuffer->meta_data()->findInt32(
1469                        kKeyIsUnreadable, &unreadable)
1470                    || unreadable == 0) {
1471                state->mLastVideoFrameSize = size;
1472                state->mLastVideoFrame = malloc(size);
1473                memcpy(state->mLastVideoFrame,
1474                   (const uint8_t *)mLastVideoBuffer->data()
1475                        + mLastVideoBuffer->range_offset(),
1476                   size);
1477
1478                state->mVideoWidth = mVideoWidth;
1479                state->mVideoHeight = mVideoHeight;
1480
1481                sp<MetaData> meta = mVideoSource->getFormat();
1482                CHECK(meta->findInt32(kKeyColorFormat, &state->mColorFormat));
1483                CHECK(meta->findInt32(kKeyWidth, &state->mDecodedWidth));
1484                CHECK(meta->findInt32(kKeyHeight, &state->mDecodedHeight));
1485            } else {
1486                LOGV("Unable to save last video frame, we have no access to "
1487                     "the decoded video data.");
1488            }
1489        }
1490    }
1491
1492    reset_l();
1493
1494    mSuspensionState = state;
1495
1496    return OK;
1497}
1498
1499void PreviewPlayer::acquireLock() {
1500    LOGV("acquireLock");
1501    mLockControl.lock();
1502}
1503
1504void PreviewPlayer::releaseLock() {
1505    LOGV("releaseLock");
1506    mLockControl.unlock();
1507}
1508
1509status_t PreviewPlayer::resume() {
1510    LOGV("resume");
1511    Mutex::Autolock autoLock(mLock);
1512
1513    if (mSuspensionState == NULL) {
1514        return INVALID_OPERATION;
1515    }
1516
1517    SuspensionState *state = mSuspensionState;
1518    mSuspensionState = NULL;
1519
1520    status_t err;
1521    if (state->mFileSource != NULL) {
1522        err = AwesomePlayer::setDataSource_l(state->mFileSource);
1523
1524        if (err == OK) {
1525            mFileSource = state->mFileSource;
1526        }
1527    } else {
1528        err = AwesomePlayer::setDataSource_l(state->mUri, &state->mUriHeaders);
1529    }
1530
1531    if (err != OK) {
1532        delete state;
1533        state = NULL;
1534
1535        return err;
1536    }
1537
1538    seekTo_l(state->mPositionUs);
1539
1540    mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS);
1541
1542    if (state->mLastVideoFrame && (mSurface != NULL || mISurface != NULL)) {
1543        mVideoRenderer =
1544            PreviewLocalRenderer::initPreviewLocalRenderer(
1545                    true,  // previewOnly
1546                    (OMX_COLOR_FORMATTYPE)state->mColorFormat,
1547                    mSurface,
1548                    state->mVideoWidth,
1549                    state->mVideoHeight,
1550                    state->mDecodedWidth,
1551                    state->mDecodedHeight);
1552
1553        mVideoRendererIsPreview = true;
1554
1555        ((PreviewLocalRenderer *)mVideoRenderer.get())->render(
1556                state->mLastVideoFrame, state->mLastVideoFrameSize);
1557    }
1558
1559    if (state->mFlags & PLAYING) {
1560        play_l();
1561    }
1562
1563    mSuspensionState = state;
1564    state = NULL;
1565
1566    return OK;
1567}
1568
1569
1570status_t PreviewPlayer::loadEffectsSettings(
1571                    M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) {
1572    M4OSA_UInt32 i = 0, rgbSize = 0;
1573    M4VIFI_UInt8 *tmp = M4OSA_NULL;
1574
1575    mNumberEffects = nEffects;
1576    mEffectsSettings = pEffectSettings;
1577    return OK;
1578}
1579
1580status_t PreviewPlayer::loadAudioMixSettings(
1581                    M4xVSS_AudioMixingSettings* pAudioMixSettings) {
1582
1583    LOGV("PreviewPlayer: loadAudioMixSettings: ");
1584    mPreviewPlayerAudioMixSettings = pAudioMixSettings;
1585    return OK;
1586}
1587
1588status_t PreviewPlayer::setAudioMixPCMFileHandle(
1589                    M4OSA_Context pAudioMixPCMFileHandle) {
1590
1591    LOGV("PreviewPlayer: setAudioMixPCMFileHandle: ");
1592    mAudioMixPCMFileHandle = pAudioMixPCMFileHandle;
1593    return OK;
1594}
1595
1596status_t PreviewPlayer::setAudioMixStoryBoardParam(
1597                    M4OSA_UInt32 audioMixStoryBoardTS,
1598                    M4OSA_UInt32 currentMediaBeginCutTime,
1599                    M4OSA_UInt32 primaryTrackVolValue ) {
1600
1601    mAudioMixStoryBoardTS = audioMixStoryBoardTS;
1602    mCurrentMediaBeginCutTime = currentMediaBeginCutTime;
1603    mCurrentMediaVolumeValue = primaryTrackVolValue;
1604    return OK;
1605}
1606
1607status_t PreviewPlayer::setPlaybackBeginTime(uint32_t msec) {
1608
1609    mPlayBeginTimeMsec = msec;
1610    return OK;
1611}
1612
1613status_t PreviewPlayer::setPlaybackEndTime(uint32_t msec) {
1614
1615    mPlayEndTimeMsec = msec;
1616    return OK;
1617}
1618
1619status_t PreviewPlayer::setStoryboardStartTime(uint32_t msec) {
1620
1621    mStoryboardStartTimeMsec = msec;
1622    mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000LL;
1623    return OK;
1624}
1625
1626status_t PreviewPlayer::setProgressCallbackInterval(uint32_t cbInterval) {
1627
1628    mProgressCbInterval = cbInterval;
1629    return OK;
1630}
1631
1632
1633status_t PreviewPlayer::setMediaRenderingMode(
1634        M4xVSS_MediaRendering mode,
1635        M4VIDEOEDITING_VideoFrameSize outputVideoSize) {
1636
1637    mRenderingMode = mode;
1638
1639    /* reset boolean for each clip*/
1640    mVideoResizedOrCropped = false;
1641
1642    switch(outputVideoSize) {
1643        case M4VIDEOEDITING_kSQCIF:
1644            mOutputVideoWidth = 128;
1645            mOutputVideoHeight = 96;
1646            break;
1647
1648        case M4VIDEOEDITING_kQQVGA:
1649            mOutputVideoWidth = 160;
1650            mOutputVideoHeight = 120;
1651            break;
1652
1653        case M4VIDEOEDITING_kQCIF:
1654            mOutputVideoWidth = 176;
1655            mOutputVideoHeight = 144;
1656            break;
1657
1658        case M4VIDEOEDITING_kQVGA:
1659            mOutputVideoWidth = 320;
1660            mOutputVideoHeight = 240;
1661            break;
1662
1663        case M4VIDEOEDITING_kCIF:
1664            mOutputVideoWidth = 352;
1665            mOutputVideoHeight = 288;
1666            break;
1667
1668        case M4VIDEOEDITING_kVGA:
1669            mOutputVideoWidth = 640;
1670            mOutputVideoHeight = 480;
1671            break;
1672
1673        case M4VIDEOEDITING_kWVGA:
1674            mOutputVideoWidth = 800;
1675            mOutputVideoHeight = 480;
1676            break;
1677
1678        case M4VIDEOEDITING_kNTSC:
1679            mOutputVideoWidth = 720;
1680            mOutputVideoHeight = 480;
1681            break;
1682
1683        case M4VIDEOEDITING_k640_360:
1684            mOutputVideoWidth = 640;
1685            mOutputVideoHeight = 360;
1686            break;
1687
1688        case M4VIDEOEDITING_k854_480:
1689            mOutputVideoWidth = 854;
1690            mOutputVideoHeight = 480;
1691            break;
1692
1693        case M4VIDEOEDITING_kHD1280:
1694            mOutputVideoWidth = 1280;
1695            mOutputVideoHeight = 720;
1696            break;
1697
1698        case M4VIDEOEDITING_kHD1080:
1699            mOutputVideoWidth = 1080;
1700            mOutputVideoHeight = 720;
1701            break;
1702
1703        case M4VIDEOEDITING_kHD960:
1704            mOutputVideoWidth = 960;
1705            mOutputVideoHeight = 720;
1706            break;
1707
1708        default:
1709            LOGE("unsupported output video size set");
1710            return BAD_VALUE;
1711    }
1712
1713    return OK;
1714}
1715
1716M4OSA_ERR PreviewPlayer::doMediaRendering() {
1717    M4OSA_ERR err = M4NO_ERROR;
1718    M4VIFI_ImagePlane planeIn[3], planeOut[3];
1719    M4VIFI_UInt8 *inBuffer = M4OSA_NULL, *finalOutputBuffer = M4OSA_NULL;
1720    M4VIFI_UInt8 *tempOutputBuffer= M4OSA_NULL;
1721    size_t videoBufferSize = 0;
1722    M4OSA_UInt32 frameSize = 0, i=0, index =0, nFrameCount =0, bufferOffset =0;
1723    int32_t colorFormat = 0;
1724
1725    if(!mIsVideoSourceJpg) {
1726        sp<MetaData> meta = mVideoSource->getFormat();
1727        CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
1728    }
1729    else {
1730        colorFormat = OMX_COLOR_FormatYUV420Planar;
1731    }
1732
1733    videoBufferSize = mVideoBuffer->size();
1734    frameSize = (mVideoWidth*mVideoHeight*3) >> 1;
1735
1736    uint8_t* outBuffer;
1737    size_t outBufferStride = 0;
1738
1739    mVideoRenderer->getBuffer(&outBuffer, &outBufferStride);
1740
1741    bufferOffset = index*frameSize;
1742    inBuffer = (M4OSA_UInt8 *)mVideoBuffer->data()+
1743                mVideoBuffer->range_offset()+bufferOffset;
1744
1745
1746    /* In plane*/
1747    prepareYUV420ImagePlane(planeIn, mVideoWidth,
1748      mVideoHeight, (M4VIFI_UInt8 *)inBuffer, mReportedWidth, mReportedHeight);
1749
1750    // Set the output YUV420 plane to be compatible with YV12 format
1751    // W & H even
1752    // YVU instead of YUV
1753    // align buffers on 32 bits
1754
1755    //In YV12 format, sizes must be even
1756    M4OSA_UInt32 yv12PlaneWidth = ((mOutputVideoWidth +1)>>1)<<1;
1757    M4OSA_UInt32 yv12PlaneHeight = ((mOutputVideoHeight+1)>>1)<<1;
1758
1759    prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,
1760     (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer);
1761
1762
1763    err = applyRenderingMode(planeIn, planeOut, mRenderingMode);
1764
1765    if(err != M4NO_ERROR)
1766    {
1767        LOGE("doMediaRendering: applyRenderingMode returned err=0x%x", (int)err);
1768        return err;
1769    }
1770    mVideoResizedOrCropped = true;
1771
1772    return err;
1773}
1774
1775status_t PreviewPlayer::resetJniCallbackTimeStamp() {
1776
1777    mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000LL;
1778    return OK;
1779}
1780
1781void PreviewPlayer::postProgressCallbackEvent_l() {
1782    if (mProgressCbEventPending) {
1783        return;
1784    }
1785    mProgressCbEventPending = true;
1786
1787    mQueue.postEvent(mProgressCbEvent);
1788}
1789
1790
1791void PreviewPlayer::onProgressCbEvent() {
1792    Mutex::Autolock autoLock(mLock);
1793    if (!mProgressCbEventPending) {
1794        return;
1795    }
1796    mProgressCbEventPending = false;
1797    // If playback starts from previous I-frame,
1798    // then send frame storyboard duration
1799    if((mDecodedVideoTs/1000) < mPlayBeginTimeMsec) {
1800        notifyListener_l(MEDIA_INFO, 0, mDecVideoTsStoryBoard/1000);
1801    }
1802    else {
1803        notifyListener_l(MEDIA_INFO, 0,
1804        (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec));
1805    }
1806}
1807
1808void PreviewPlayer::postOverlayUpdateEvent_l() {
1809    if (mOverlayUpdateEventPending) {
1810        return;
1811    }
1812    mOverlayUpdateEventPending = true;
1813    mQueue.postEvent(mOverlayUpdateEvent);
1814}
1815
1816void PreviewPlayer::onUpdateOverlayEvent() {
1817    Mutex::Autolock autoLock(mLock);
1818
1819    if (!mOverlayUpdateEventPending) {
1820        return;
1821    }
1822    mOverlayUpdateEventPending = false;
1823
1824    int updateState;
1825    if (mOverlayUpdateEventPosted) {
1826        updateState = 1;
1827    } else {
1828        updateState = 0;
1829    }
1830    notifyListener_l(0xBBBBBBBB, updateState, mCurrFramingEffectIndex);
1831}
1832
1833
1834void PreviewPlayer::setVideoPostProcessingNode(
1835                    M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable) {
1836
1837    uint32_t effect = VIDEO_EFFECT_NONE;
1838
1839    //Map M4VSS3GPP_VideoEffectType to local enum
1840    switch(type) {
1841        case M4VSS3GPP_kVideoEffectType_FadeFromBlack:
1842            effect = VIDEO_EFFECT_FADEFROMBLACK;
1843            break;
1844
1845        case M4VSS3GPP_kVideoEffectType_FadeToBlack:
1846            effect = VIDEO_EFFECT_FADETOBLACK;
1847            break;
1848
1849        case M4VSS3GPP_kVideoEffectType_CurtainOpening:
1850            effect = VIDEO_EFFECT_CURTAINOPEN;
1851            break;
1852
1853        case M4VSS3GPP_kVideoEffectType_CurtainClosing:
1854            effect = VIDEO_EFFECT_CURTAINCLOSE;
1855            break;
1856
1857        case M4xVSS_kVideoEffectType_BlackAndWhite:
1858            effect = VIDEO_EFFECT_BLACKANDWHITE;
1859            break;
1860
1861        case M4xVSS_kVideoEffectType_Pink:
1862            effect = VIDEO_EFFECT_PINK;
1863            break;
1864
1865        case M4xVSS_kVideoEffectType_Green:
1866            effect = VIDEO_EFFECT_GREEN;
1867            break;
1868
1869        case M4xVSS_kVideoEffectType_Sepia:
1870            effect = VIDEO_EFFECT_SEPIA;
1871            break;
1872
1873        case M4xVSS_kVideoEffectType_Negative:
1874            effect = VIDEO_EFFECT_NEGATIVE;
1875            break;
1876
1877        case M4xVSS_kVideoEffectType_Framing:
1878            effect = VIDEO_EFFECT_FRAMING;
1879            break;
1880
1881        case M4xVSS_kVideoEffectType_Fifties:
1882            effect = VIDEO_EFFECT_FIFTIES;
1883            break;
1884
1885        case M4xVSS_kVideoEffectType_ColorRGB16:
1886            effect = VIDEO_EFFECT_COLOR_RGB16;
1887            break;
1888
1889        case M4xVSS_kVideoEffectType_Gradient:
1890            effect = VIDEO_EFFECT_GRADIENT;
1891            break;
1892
1893        default:
1894            effect = VIDEO_EFFECT_NONE;
1895            break;
1896    }
1897
1898    if(enable == M4OSA_TRUE) {
1899        //If already set, then no need to set again
1900        if(!(mCurrentVideoEffect & effect)) {
1901            mCurrentVideoEffect |= effect;
1902            if(effect == VIDEO_EFFECT_FIFTIES) {
1903                mIsFiftiesEffectStarted = true;
1904            }
1905        }
1906    }
1907    else  {
1908        //Reset only if already set
1909        if(mCurrentVideoEffect & effect) {
1910            mCurrentVideoEffect &= ~effect;
1911        }
1912    }
1913}
1914
1915status_t PreviewPlayer::setImageClipProperties(uint32_t width,uint32_t height) {
1916    mVideoWidth = width;
1917    mVideoHeight = height;
1918    return OK;
1919}
1920
1921
1922M4OSA_ERR PreviewPlayer::doVideoPostProcessing() {
1923    M4OSA_ERR err = M4NO_ERROR;
1924    vePostProcessParams postProcessParams;
1925    int32_t colorFormat = 0;
1926
1927
1928    if(!mIsVideoSourceJpg) {
1929        sp<MetaData> meta = mVideoSource->getFormat();
1930        CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
1931    }
1932    else {
1933        colorFormat = OMX_COLOR_FormatYUV420Planar;
1934    }
1935
1936    if((colorFormat == OMX_COLOR_FormatYUV420SemiPlanar) ||
1937       (colorFormat == 0x7FA30C00)) {
1938          LOGE("doVideoPostProcessing: colorFormat YUV420Sp not supported");
1939          return M4ERR_UNSUPPORTED_MEDIA_TYPE;
1940    }
1941
1942    postProcessParams.vidBuffer = (M4VIFI_UInt8*)mVideoBuffer->data()
1943        + mVideoBuffer->range_offset();
1944
1945    postProcessParams.videoWidth = mVideoWidth;
1946    postProcessParams.videoHeight = mVideoHeight;
1947    postProcessParams.timeMs = mDecodedVideoTs/1000;
1948    postProcessParams.timeOffset = mDecVideoTsStoryBoard/1000;
1949    postProcessParams.effectsSettings = mEffectsSettings;
1950    postProcessParams.numberEffects = mNumberEffects;
1951    postProcessParams.outVideoWidth = mOutputVideoWidth;
1952    postProcessParams.outVideoHeight = mOutputVideoHeight;
1953    postProcessParams.currentVideoEffect = mCurrentVideoEffect;
1954    postProcessParams.renderingMode = mRenderingMode;
1955    if(mIsFiftiesEffectStarted == M4OSA_TRUE) {
1956        postProcessParams.isFiftiesEffectStarted = M4OSA_TRUE;
1957        mIsFiftiesEffectStarted = M4OSA_FALSE;
1958    }
1959    else {
1960       postProcessParams.isFiftiesEffectStarted = M4OSA_FALSE;
1961    }
1962
1963    postProcessParams.overlayFrameRGBBuffer = mFrameRGBBuffer;
1964    postProcessParams.overlayFrameYUVBuffer = mFrameYUVBuffer;
1965    mVideoRenderer->getBuffer(&(postProcessParams.pOutBuffer), &(postProcessParams.outBufferStride));
1966    err = applyEffectsAndRenderingMode(&postProcessParams, mReportedWidth, mReportedHeight);
1967
1968    return err;
1969}
1970
1971status_t PreviewPlayer::readFirstVideoFrame() {
1972    LOGV("PreviewPlayer::readFirstVideoFrame");
1973
1974    if (!mVideoBuffer) {
1975        MediaSource::ReadOptions options;
1976        if (mSeeking != NO_SEEK) {
1977            LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
1978                    mSeekTimeUs / 1E6);
1979
1980            options.setSeekTo(
1981                    mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
1982        }
1983        for (;;) {
1984            status_t err = mVideoSource->read(&mVideoBuffer, &options);
1985            options.clearSeekTo();
1986
1987            if (err != OK) {
1988                CHECK_EQ(mVideoBuffer, NULL);
1989
1990                if (err == INFO_FORMAT_CHANGED) {
1991                    LOGV("LV PLAYER VideoSource signalled format change");
1992                    notifyVideoSize_l();
1993                    sp<MetaData> meta = mVideoSource->getFormat();
1994
1995                    CHECK(meta->findInt32(kKeyWidth, &mReportedWidth));
1996                    CHECK(meta->findInt32(kKeyHeight, &mReportedHeight));
1997
1998                    if (mVideoRenderer != NULL) {
1999                        mVideoRendererIsPreview = false;
2000                        err = initRenderer_l();
2001                        if (err != OK) {
2002                            postStreamDoneEvent_l(err);
2003                        }
2004                    }
2005                    continue;
2006                }
2007                LOGV("PreviewPlayer: onVideoEvent EOS reached.");
2008                mFlags |= VIDEO_AT_EOS;
2009                mFlags |= AUDIO_AT_EOS;
2010                postStreamDoneEvent_l(err);
2011                return OK;
2012            }
2013
2014            if (mVideoBuffer->range_length() == 0) {
2015                // Some decoders, notably the PV AVC software decoder
2016                // return spurious empty buffers that we just want to ignore.
2017
2018                mVideoBuffer->release();
2019                mVideoBuffer = NULL;
2020                continue;
2021            }
2022
2023            int64_t videoTimeUs;
2024            CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
2025            if (mSeeking != NO_SEEK) {
2026                if (videoTimeUs < mSeekTimeUs) {
2027                    // buffers are before seek time
2028                    // ignore them
2029                    mVideoBuffer->release();
2030                    mVideoBuffer = NULL;
2031                    continue;
2032                }
2033            } else {
2034                if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
2035                    // buffers are before begin cut time
2036                    // ignore them
2037                    mVideoBuffer->release();
2038                    mVideoBuffer = NULL;
2039                    continue;
2040                }
2041            }
2042            break;
2043        }
2044    }
2045
2046    int64_t timeUs;
2047    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
2048
2049    {
2050        Mutex::Autolock autoLock(mMiscStateLock);
2051        mVideoTimeUs = timeUs;
2052    }
2053
2054    mDecodedVideoTs = timeUs;
2055
2056    return OK;
2057
2058}
2059
2060status_t PreviewPlayer::getLastRenderedTimeMs(uint32_t *lastRenderedTimeMs) {
2061    *lastRenderedTimeMs = (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec);
2062    return OK;
2063}
2064
2065}  // namespace android
2066