PreviewPlayer.cpp revision 694816d7291f17364502ac5d3319684a0b180860
19066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project/*
29066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project * Copyright (C) 2011 NXP Software
39066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project * Copyright (C) 2011 The Android Open Source Project
49066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project *
59066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project * Licensed under the Apache License, Version 2.0 (the "License");
69066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project * you may not use this file except in compliance with the License.
79066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project * You may obtain a copy of the License at
89066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project *
99066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project *      http://www.apache.org/licenses/LICENSE-2.0
109066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project *
119066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project * Unless required by applicable law or agreed to in writing, software
129066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project * distributed under the License is distributed on an "AS IS" BASIS,
139066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
149066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project * See the License for the specific language governing permissions and
159066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project * limitations under the License.
169066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project */
179066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
189066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
199066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#define LOG_NDEBUG 1
209066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#define LOG_TAG "PreviewPlayer"
219066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include <utils/Log.h>
229066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
239066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include <dlfcn.h>
249066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
259066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include "include/ARTSPController.h"
269066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include "PreviewPlayer.h"
279066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include "DummyAudioSource.h"
289066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include "DummyVideoSource.h"
299066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include "VideoEditorSRC.h"
309066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include "include/NuCachedSource2.h"
319066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include "include/ThrottledSource.h"
329066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
339066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
349066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include "PreviewRenderer.h"
359066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
369066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include <binder/IPCThreadState.h>
379066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include <media/stagefright/DataSource.h>
389066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include <media/stagefright/FileSource.h>
399066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include <media/stagefright/MediaBuffer.h>
409066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include <media/stagefright/MediaDefs.h>
419066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include <media/stagefright/MediaExtractor.h>
429066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include <media/stagefright/MediaDebug.h>
439066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include <media/stagefright/MediaSource.h>
449066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include <media/stagefright/MetaData.h>
459066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include <media/stagefright/OMXCodec.h>
469066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
479066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include <surfaceflinger/Surface.h>
489066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project#include <media/stagefright/foundation/ALooper.h>
49bb469fe3da7e6d17e851a95026a9eea905c52daaBrett Chabot
50bb469fe3da7e6d17e851a95026a9eea905c52daaBrett Chabotnamespace android {
519066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
529066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
539066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Projectstruct PreviewPlayerEvent : public TimedEventQueue::Event {
549066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    PreviewPlayerEvent(
559066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            PreviewPlayer *player,
569066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            void (PreviewPlayer::*method)())
579066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        : mPlayer(player),
589066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project          mMethod(method) {
599066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    }
609066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
619066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Projectprotected:
629066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    virtual ~PreviewPlayerEvent() {}
639066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
649066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) {
659066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        (mPlayer->*mMethod)();
669066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    }
679066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
689066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Projectprivate:
699066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    PreviewPlayer *mPlayer;
709066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    void (PreviewPlayer::*mMethod)();
719066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
729066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    PreviewPlayerEvent(const PreviewPlayerEvent &);
739066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    PreviewPlayerEvent &operator=(const PreviewPlayerEvent &);
749066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project};
759066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
769066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
779066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Projectstruct PreviewLocalRenderer : public PreviewPlayerRenderer {
789066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
799066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    static PreviewLocalRenderer* initPreviewLocalRenderer (
809066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            bool previewOnly,
819066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            OMX_COLOR_FORMATTYPE colorFormat,
829066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            const sp<Surface> &surface,
839066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            size_t displayWidth, size_t displayHeight,
849066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            size_t decodedWidth, size_t decodedHeight,
859066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            int32_t rotationDegrees = 0)
869066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    {
879066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        PreviewLocalRenderer* mLocalRenderer = new
889066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            PreviewLocalRenderer(
899066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project                previewOnly,
909066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project                colorFormat,
919066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project                surface,
929066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project                displayWidth, displayHeight,
939066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project                decodedWidth, decodedHeight,
949066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project                rotationDegrees);
959066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
969066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        if ( mLocalRenderer->init(previewOnly,
979066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project                 colorFormat, surface,
989066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project                 displayWidth, displayHeight,
999066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project                 decodedWidth, decodedHeight,
1009066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project                 rotationDegrees) != OK )
1019066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        {
1029066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            delete mLocalRenderer;
1039066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            return NULL;
1049066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        }
1059066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        return mLocalRenderer;
1069066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    }
1079066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
1089066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    virtual void render(MediaBuffer *buffer) {
1099066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        render((const uint8_t *)buffer->data() + buffer->range_offset(),
1109066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project               buffer->range_length());
1119066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    }
1129066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
1139066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    void render(const void *data, size_t size) {
1149066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        mTarget->render(data, size, NULL);
1159066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    }
1169066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    void render() {
1179066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        mTarget->renderYV12();
1189066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    }
119bb469fe3da7e6d17e851a95026a9eea905c52daaBrett Chabot    void getBuffer(uint8_t **data, size_t *stride) {
1209066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        mTarget->getBufferYV12(data, stride);
1219066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    }
1229066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
1239066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Projectprotected:
1249066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    virtual ~PreviewLocalRenderer() {
1259066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        delete mTarget;
1269066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        mTarget = NULL;
1279066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    }
1289066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
1299066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Projectprivate:
1309066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    PreviewRenderer *mTarget;
1319066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
1329066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    PreviewLocalRenderer(
1339066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            bool previewOnly,
1349066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            OMX_COLOR_FORMATTYPE colorFormat,
1359066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            const sp<Surface> &surface,
1369066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            size_t displayWidth, size_t displayHeight,
1379066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            size_t decodedWidth, size_t decodedHeight,
1389066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            int32_t rotationDegrees = 0)
1399066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        : mTarget(NULL) {
1409066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    }
1419066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
1429066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
1439066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    int init(
1449066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            bool previewOnly,
1459066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            OMX_COLOR_FORMATTYPE colorFormat,
1469066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            const sp<Surface> &surface,
1479066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            size_t displayWidth, size_t displayHeight,
1489066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            size_t decodedWidth, size_t decodedHeight,
1499066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            int32_t rotationDegrees = 0);
1509066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
1519066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    PreviewLocalRenderer(const PreviewLocalRenderer &);
1529066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    PreviewLocalRenderer &operator=(const PreviewLocalRenderer &);;
1539066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project};
1549066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
1559066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Projectint PreviewLocalRenderer::init(
1569066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        bool previewOnly,
1579066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        OMX_COLOR_FORMATTYPE colorFormat,
1589066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        const sp<Surface> &surface,
1599066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        size_t displayWidth, size_t displayHeight,
1609066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        size_t decodedWidth, size_t decodedHeight,
1619066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        int32_t rotationDegrees) {
1629066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
1639066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mTarget = PreviewRenderer::CreatePreviewRenderer (
1649066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            colorFormat, surface, displayWidth, displayHeight,
1659066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project            decodedWidth, decodedHeight, rotationDegrees);
1669066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    if (mTarget == M4OSA_NULL) {
1679066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        return UNKNOWN_ERROR;
1689066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    }
1699066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    return OK;
1709066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project}
1719066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
1729066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source ProjectPreviewPlayer::PreviewPlayer()
1739066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    : AwesomePlayer(),
1749066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project      mCurrFramingEffectIndex(0)   ,
1759066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project      mReportedWidth(0),
1769066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project      mReportedHeight(0),
1779066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project      mFrameRGBBuffer(NULL),
1789066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project      mFrameYUVBuffer(NULL){
1799066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
1809066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mVideoRenderer = NULL;
1819066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mLastVideoBuffer = NULL;
1829066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mSuspensionState = NULL;
1839066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mEffectsSettings = NULL;
1849066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mVeAudioPlayer = NULL;
1859066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mAudioMixStoryBoardTS = 0;
1869066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mCurrentMediaBeginCutTime = 0;
1879066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mCurrentMediaVolumeValue = 0;
1889066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mNumberEffects = 0;
1899066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mDecodedVideoTs = 0;
1909066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mDecVideoTsStoryBoard = 0;
1919066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mCurrentVideoEffect = VIDEO_EFFECT_NONE;
1929066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mProgressCbInterval = 0;
1939066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mNumberDecVideoFrames = 0;
1949066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mOverlayUpdateEventPosted = false;
1959066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mIsChangeSourceRequired = true;
1969066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
1979066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mVideoEvent = new PreviewPlayerEvent(this, &PreviewPlayer::onVideoEvent);
1989066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mVideoEventPending = false;
1999066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mStreamDoneEvent = new PreviewPlayerEvent(this,
2009066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project         &PreviewPlayer::onStreamDone);
2019066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
2029066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mStreamDoneEventPending = false;
2039066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
2049066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mCheckAudioStatusEvent = new PreviewPlayerEvent(
2059066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        this, &AwesomePlayer::onCheckAudioStatus);
2069066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
2079066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mAudioStatusEventPending = false;
2089066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
2099066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mProgressCbEvent = new PreviewPlayerEvent(this,
2109066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project         &PreviewPlayer::onProgressCbEvent);
2119066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
2129066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mOverlayUpdateEvent = new PreviewPlayerEvent(this,
2139066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        &PreviewPlayer::onUpdateOverlayEvent);
2149066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mProgressCbEventPending = false;
2159066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
2169066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mOverlayUpdateEventPending = false;
2179066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mResizedVideoBuffer = NULL;
2189066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mVideoResizedOrCropped = false;
2199066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mRenderingMode = (M4xVSS_MediaRendering)MEDIA_RENDERING_INVALID;
2209066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mIsFiftiesEffectStarted = false;
2219066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    reset();
2229066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project}
2239066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
2249066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source ProjectPreviewPlayer::~PreviewPlayer() {
2259066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
2269066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    if (mQueueStarted) {
2279066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        mQueue.stop();
2289066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    }
2299066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
2309066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    reset();
2319066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
2329066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    if(mResizedVideoBuffer != NULL) {
2339066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        free((mResizedVideoBuffer->data()));
2349066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        mResizedVideoBuffer = NULL;
2359066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    }
2369066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
2379066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mVideoRenderer.clear();
2389066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mVideoRenderer = NULL;
239bb469fe3da7e6d17e851a95026a9eea905c52daaBrett Chabot}
240bb469fe3da7e6d17e851a95026a9eea905c52daaBrett Chabot
241bb469fe3da7e6d17e851a95026a9eea905c52daaBrett Chabotvoid PreviewPlayer::cancelPlayerEvents(bool keepBufferingGoing) {
242bb469fe3da7e6d17e851a95026a9eea905c52daaBrett Chabot    mQueue.cancelEvent(mVideoEvent->eventID());
2439066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mVideoEventPending = false;
2449066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mQueue.cancelEvent(mStreamDoneEvent->eventID());
2459066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mStreamDoneEventPending = false;
2469066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mQueue.cancelEvent(mCheckAudioStatusEvent->eventID());
2479066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mAudioStatusEventPending = false;
2489066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
2499066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mQueue.cancelEvent(mProgressCbEvent->eventID());
2509066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    mProgressCbEventPending = false;
2519066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project}
2529066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project
2539066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Projectstatus_t PreviewPlayer::setDataSource(
2549066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project        const char *uri, const KeyedVector<String8, String8> *headers) {
2559066cfe9886ac131c34d59ed0e2d287b0e3c0087The Android Open Source Project    Mutex::Autolock autoLock(mLock);
256    return setDataSource_l(uri, headers);
257}
258
259status_t PreviewPlayer::setDataSource_l(
260        const char *uri, const KeyedVector<String8, String8> *headers) {
261    reset_l();
262
263    mUri = uri;
264
265    if (headers) {
266        mUriHeaders = *headers;
267    }
268
269    // The actual work will be done during preparation in the call to
270    // ::finishSetDataSource_l to avoid blocking the calling thread in
271    // setDataSource for any significant time.
272    return OK;
273}
274
275status_t PreviewPlayer::setDataSource_l(const sp<MediaExtractor> &extractor) {
276    bool haveAudio = false;
277    bool haveVideo = false;
278    for (size_t i = 0; i < extractor->countTracks(); ++i) {
279        sp<MetaData> meta = extractor->getTrackMetaData(i);
280
281        const char *mime;
282        CHECK(meta->findCString(kKeyMIMEType, &mime));
283
284        if (!haveVideo && !strncasecmp(mime, "video/", 6)) {
285            setVideoSource(extractor->getTrack(i));
286            haveVideo = true;
287        } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) {
288            setAudioSource(extractor->getTrack(i));
289            haveAudio = true;
290
291            if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
292                // Only do this for vorbis audio, none of the other audio
293                // formats even support this ringtone specific hack and
294                // retrieving the metadata on some extractors may turn out
295                // to be very expensive.
296                sp<MetaData> fileMeta = extractor->getMetaData();
297                int32_t loop;
298                if (fileMeta != NULL
299                        && fileMeta->findInt32(kKeyAutoLoop, &loop)
300                         && loop != 0) {
301                    mFlags |= AUTO_LOOPING;
302                }
303            }
304        }
305
306        if (haveAudio && haveVideo) {
307            break;
308        }
309    }
310
311    /* Add the support for Dummy audio*/
312    if( !haveAudio ){
313        LOGV("PreviewPlayer: setDataSource_l Dummyaudiocreation started");
314
315        mAudioTrack = DummyAudioSource::Create(32000, 2, 20000,
316                                              ((mPlayEndTimeMsec)*1000LL));
317        LOGV("PreviewPlayer: setDataSource_l Dummyauiosource created");
318        if(mAudioTrack != NULL) {
319            haveAudio = true;
320        }
321    }
322
323    if (!haveAudio && !haveVideo) {
324        return UNKNOWN_ERROR;
325    }
326
327    mExtractorFlags = extractor->flags();
328    return OK;
329}
330
331status_t PreviewPlayer::setDataSource_l_jpg() {
332    M4OSA_ERR err = M4NO_ERROR;
333    LOGV("PreviewPlayer: setDataSource_l_jpg started");
334
335    mAudioSource = DummyAudioSource::Create(32000, 2, 20000,
336                                          ((mPlayEndTimeMsec)*1000LL));
337    LOGV("PreviewPlayer: setDataSource_l_jpg Dummyaudiosource created");
338    if(mAudioSource != NULL) {
339        setAudioSource(mAudioSource);
340    }
341    status_t error = mAudioSource->start();
342    if (error != OK) {
343        LOGV("Error starting dummy audio source");
344        mAudioSource.clear();
345        return err;
346    }
347
348    mDurationUs = (mPlayEndTimeMsec - mPlayBeginTimeMsec)*1000LL;
349
350    mVideoSource = DummyVideoSource::Create(mVideoWidth, mVideoHeight,
351                                            mDurationUs, mUri);
352    mReportedWidth = mVideoWidth;
353    mReportedHeight = mVideoHeight;
354
355    setVideoSource(mVideoSource);
356    status_t err1 = mVideoSource->start();
357    if (err1 != OK) {
358        mVideoSource.clear();
359        return err;
360    }
361
362    mIsVideoSourceJpg = true;
363    return OK;
364}
365
366void PreviewPlayer::reset() {
367    Mutex::Autolock autoLock(mLock);
368    reset_l();
369}
370
371void PreviewPlayer::reset_l() {
372
373    if (mFlags & PREPARING) {
374        mFlags |= PREPARE_CANCELLED;
375    }
376
377    while (mFlags & PREPARING) {
378        mPreparedCondition.wait(mLock);
379    }
380
381    cancelPlayerEvents();
382    mAudioTrack.clear();
383    mVideoTrack.clear();
384
385    // Shutdown audio first, so that the respone to the reset request
386    // appears to happen instantaneously as far as the user is concerned
387    // If we did this later, audio would continue playing while we
388    // shutdown the video-related resources and the player appear to
389    // not be as responsive to a reset request.
390    if (mAudioPlayer == NULL && mAudioSource != NULL) {
391        // If we had an audio player, it would have effectively
392        // taken possession of the audio source and stopped it when
393        // _it_ is stopped. Otherwise this is still our responsibility.
394        mAudioSource->stop();
395    }
396    mAudioSource.clear();
397
398    mTimeSource = NULL;
399
400    //Single audio player instance used
401    //So donot delete it here
402    //It is deleted from PreviewController class
403    //delete mAudioPlayer;
404    mAudioPlayer = NULL;
405
406    if (mLastVideoBuffer) {
407        mLastVideoBuffer->release();
408        mLastVideoBuffer = NULL;
409    }
410
411    if (mVideoBuffer) {
412        mVideoBuffer->release();
413        mVideoBuffer = NULL;
414    }
415
416    if (mVideoSource != NULL) {
417        mVideoSource->stop();
418
419        // The following hack is necessary to ensure that the OMX
420        // component is completely released by the time we may try
421        // to instantiate it again.
422        wp<MediaSource> tmp = mVideoSource;
423        mVideoSource.clear();
424        while (tmp.promote() != NULL) {
425            usleep(1000);
426        }
427        IPCThreadState::self()->flushCommands();
428    }
429
430    mDurationUs = -1;
431    mFlags = 0;
432    mExtractorFlags = 0;
433    mVideoWidth = mVideoHeight = -1;
434    mTimeSourceDeltaUs = 0;
435    mVideoTimeUs = 0;
436
437    mSeeking = NO_SEEK;
438    mSeekNotificationSent = false;
439    mSeekTimeUs = 0;
440
441    mUri.setTo("");
442    mUriHeaders.clear();
443
444    mFileSource.clear();
445
446    delete mSuspensionState;
447    mSuspensionState = NULL;
448
449    mCurrentVideoEffect = VIDEO_EFFECT_NONE;
450    mIsVideoSourceJpg = false;
451    mFrameRGBBuffer = NULL;
452    if(mFrameYUVBuffer != NULL) {
453        free(mFrameYUVBuffer);
454        mFrameYUVBuffer = NULL;
455    }
456}
457
458status_t PreviewPlayer::play() {
459    Mutex::Autolock autoLock(mLock);
460
461    mFlags &= ~CACHE_UNDERRUN;
462
463    return play_l();
464}
465
466status_t PreviewPlayer::startAudioPlayer_l() {
467    CHECK(!(mFlags & AUDIO_RUNNING));
468
469    if (mAudioSource == NULL || mAudioPlayer == NULL) {
470        return OK;
471    }
472
473    if (!(mFlags & AUDIOPLAYER_STARTED)) {
474        mFlags |= AUDIOPLAYER_STARTED;
475
476        // We've already started the MediaSource in order to enable
477        // the prefetcher to read its data.
478        status_t err = mVeAudioPlayer->start(
479                true /* sourceAlreadyStarted */);
480
481        if (err != OK) {
482            notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
483            return err;
484        }
485    } else {
486        mVeAudioPlayer->resume();
487    }
488
489    mFlags |= AUDIO_RUNNING;
490
491    mWatchForAudioEOS = true;
492
493    return OK;
494}
495
496status_t PreviewPlayer::setAudioPlayer(AudioPlayer *audioPlayer) {
497    Mutex::Autolock autoLock(mLock);
498    CHECK(!(mFlags & PLAYING));
499    mAudioPlayer = audioPlayer;
500
501    LOGV("SetAudioPlayer");
502    mIsChangeSourceRequired = true;
503    mVeAudioPlayer =
504            (VideoEditorAudioPlayer*)mAudioPlayer;
505
506    // check if the new and old source are dummy
507    sp<MediaSource> anAudioSource = mVeAudioPlayer->getSource();
508    if (anAudioSource == NULL) {
509        // Audio player does not have any source set.
510        LOGV("setAudioPlayer: Audio player does not have any source set");
511        return OK;
512    }
513
514    // If new video source is not dummy, then always change source
515    // Else audio player continues using old audio source and there are
516    // frame drops to maintain AV sync
517    sp<MetaData> meta;
518    if (mVideoSource != NULL) {
519        meta = mVideoSource->getFormat();
520        const char *pVidSrcType;
521        if (meta->findCString(kKeyDecoderComponent, &pVidSrcType)) {
522            if (strcmp(pVidSrcType, "DummyVideoSource") != 0) {
523                LOGV(" Video clip with silent audio; need to change source");
524                return OK;
525            }
526        }
527    }
528
529    const char *pSrcType1;
530    const char *pSrcType2;
531    meta = anAudioSource->getFormat();
532
533    if (meta->findCString(kKeyDecoderComponent, &pSrcType1)) {
534        if (strcmp(pSrcType1, "DummyAudioSource") == 0) {
535            meta = mAudioSource->getFormat();
536            if (meta->findCString(kKeyDecoderComponent, &pSrcType2)) {
537                if (strcmp(pSrcType2, "DummyAudioSource") == 0) {
538                    mIsChangeSourceRequired = false;
539                    // Just set the new play duration for the existing source
540                    MediaSource *pMediaSrc = anAudioSource.get();
541                    DummyAudioSource *pDummyAudioSource = (DummyAudioSource*)pMediaSrc;
542                    //Increment the duration of audio source
543                    pDummyAudioSource->setDuration(
544                        (int64_t)((mPlayEndTimeMsec)*1000LL));
545
546                    // Stop the new audio source
547                    // since we continue using old source
548                    LOGV("setAudioPlayer: stop new audio source");
549                    mAudioSource->stop();
550                }
551            }
552        }
553    }
554
555    return OK;
556}
557
558void PreviewPlayer::onStreamDone() {
559    // Posted whenever any stream finishes playing.
560
561    Mutex::Autolock autoLock(mLock);
562    if (!mStreamDoneEventPending) {
563        return;
564    }
565    mStreamDoneEventPending = false;
566
567    if (mStreamDoneStatus != ERROR_END_OF_STREAM) {
568        LOGV("MEDIA_ERROR %d", mStreamDoneStatus);
569
570        notifyListener_l(
571                MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, mStreamDoneStatus);
572
573        pause_l(true /* at eos */);
574
575        mFlags |= AT_EOS;
576        return;
577    }
578
579    const bool allDone =
580        (mVideoSource == NULL || (mFlags & VIDEO_AT_EOS))
581            && (mAudioSource == NULL || (mFlags & AUDIO_AT_EOS));
582
583    if (!allDone) {
584        return;
585    }
586
587    if (mFlags & (LOOPING | AUTO_LOOPING)) {
588        seekTo_l(0);
589
590        if (mVideoSource != NULL) {
591            postVideoEvent_l();
592        }
593    } else {
594        LOGV("MEDIA_PLAYBACK_COMPLETE");
595        //pause before sending event
596        pause_l(true /* at eos */);
597
598        //This lock is used to syncronize onStreamDone() in PreviewPlayer and
599        //stopPreview() in PreviewController
600        Mutex::Autolock autoLock(mLockControl);
601        notifyListener_l(MEDIA_PLAYBACK_COMPLETE);
602
603        mFlags |= AT_EOS;
604        LOGV("onStreamDone end");
605        return;
606    }
607}
608
609
610status_t PreviewPlayer::play_l() {
611
612    mFlags &= ~SEEK_PREVIEW;
613
614    if (mFlags & PLAYING) {
615        return OK;
616    }
617    mStartNextPlayer = false;
618
619    if (!(mFlags & PREPARED)) {
620        status_t err = prepare_l();
621
622        if (err != OK) {
623            return err;
624        }
625    }
626
627    mFlags |= PLAYING;
628    mFlags |= FIRST_FRAME;
629
630    bool deferredAudioSeek = false;
631
632    if (mAudioSource != NULL) {
633        if (mAudioPlayer == NULL) {
634            if (mAudioSink != NULL) {
635
636                mAudioPlayer = new VideoEditorAudioPlayer(mAudioSink, this);
637                mVeAudioPlayer =
638                          (VideoEditorAudioPlayer*)mAudioPlayer;
639
640                mAudioPlayer->setSource(mAudioSource);
641
642                mVeAudioPlayer->setAudioMixSettings(
643                 mPreviewPlayerAudioMixSettings);
644
645                mVeAudioPlayer->setAudioMixPCMFileHandle(
646                 mAudioMixPCMFileHandle);
647
648                mVeAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
649                 mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
650                 mCurrentMediaVolumeValue);
651
652                 mFlags |= AUDIOPLAYER_STARTED;
653                // We've already started the MediaSource in order to enable
654                // the prefetcher to read its data.
655                status_t err = mVeAudioPlayer->start(
656                        true /* sourceAlreadyStarted */);
657
658                if (err != OK) {
659                    //delete mAudioPlayer;
660                    mAudioPlayer = NULL;
661
662                    mFlags &= ~(PLAYING | FIRST_FRAME);
663                    return err;
664                }
665
666                mTimeSource = mVeAudioPlayer;
667                mFlags |= AUDIO_RUNNING;
668                deferredAudioSeek = true;
669                mWatchForAudioSeekComplete = false;
670                mWatchForAudioEOS = true;
671            }
672        } else {
673            mVeAudioPlayer = (VideoEditorAudioPlayer*)mAudioPlayer;
674            bool isAudioPlayerStarted = mVeAudioPlayer->isStarted();
675
676            if (mIsChangeSourceRequired == true) {
677                LOGV("play_l: Change audio source required");
678
679                if (isAudioPlayerStarted == true) {
680                    mVeAudioPlayer->pause();
681                }
682
683                mVeAudioPlayer->setSource(mAudioSource);
684                mVeAudioPlayer->setObserver(this);
685
686                mVeAudioPlayer->setAudioMixSettings(
687                 mPreviewPlayerAudioMixSettings);
688
689                mVeAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
690                    mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
691                    mCurrentMediaVolumeValue);
692
693                if (isAudioPlayerStarted == true) {
694                    mVeAudioPlayer->resume();
695                } else {
696                    status_t err = OK;
697                    err = mVeAudioPlayer->start(true);
698                    if (err != OK) {
699                        mAudioPlayer = NULL;
700                        mVeAudioPlayer = NULL;
701
702                        mFlags &= ~(PLAYING | FIRST_FRAME);
703                        return err;
704                    }
705                }
706            } else {
707                LOGV("play_l: No Source change required");
708                mVeAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
709                    mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
710                    mCurrentMediaVolumeValue);
711
712                mVeAudioPlayer->resume();
713            }
714
715            mFlags |= AUDIOPLAYER_STARTED;
716            mFlags |= AUDIO_RUNNING;
717            mTimeSource = mVeAudioPlayer;
718            deferredAudioSeek = true;
719            mWatchForAudioSeekComplete = false;
720            mWatchForAudioEOS = true;
721        }
722    }
723
724    if (mTimeSource == NULL && mAudioPlayer == NULL) {
725        mTimeSource = &mSystemTimeSource;
726    }
727
728    // Set the seek option for Image source files and read.
729    // This resets the timestamping for image play
730    if (mIsVideoSourceJpg) {
731        MediaSource::ReadOptions options;
732        MediaBuffer *aLocalBuffer;
733        options.setSeekTo(mSeekTimeUs);
734        mVideoSource->read(&aLocalBuffer, &options);
735        aLocalBuffer->release();
736    }
737
738    if (mVideoSource != NULL) {
739        // Kick off video playback
740        postVideoEvent_l();
741    }
742
743    if (deferredAudioSeek) {
744        // If there was a seek request while we were paused
745        // and we're just starting up again, honor the request now.
746        seekAudioIfNecessary_l();
747    }
748
749    if (mFlags & AT_EOS) {
750        // Legacy behaviour, if a stream finishes playing and then
751        // is started again, we play from the start...
752        seekTo_l(0);
753    }
754
755    return OK;
756}
757
758
759status_t PreviewPlayer::initRenderer_l() {
760    if (mSurface != NULL) {
761        sp<MetaData> meta = mVideoSource->getFormat();
762
763        int32_t format;
764        const char *component;
765        int32_t decodedWidth, decodedHeight;
766        CHECK(meta->findInt32(kKeyColorFormat, &format));
767        CHECK(meta->findCString(kKeyDecoderComponent, &component));
768        CHECK(meta->findInt32(kKeyWidth, &decodedWidth));
769        CHECK(meta->findInt32(kKeyHeight, &decodedHeight));
770
771        // Must ensure that mVideoRenderer's destructor is actually executed
772        // before creating a new one.
773        IPCThreadState::self()->flushCommands();
774
775        // always use localrenderer since decoded buffers are modified
776        // by postprocessing module
777        // Other decoders are instantiated locally and as a consequence
778        // allocate their buffers in local address space.
779        if(mVideoRenderer == NULL) {
780
781            mVideoRenderer = PreviewLocalRenderer:: initPreviewLocalRenderer (
782                false,  // previewOnly
783                (OMX_COLOR_FORMATTYPE)format,
784                mSurface,
785                mOutputVideoWidth, mOutputVideoHeight,
786                mOutputVideoWidth, mOutputVideoHeight);
787
788            if ( mVideoRenderer == NULL )
789            {
790                return UNKNOWN_ERROR;
791            }
792            return OK;
793        }
794    }
795    return OK;
796}
797
798
799status_t PreviewPlayer::seekTo(int64_t timeUs) {
800
801    if ((mExtractorFlags & MediaExtractor::CAN_SEEK) || (mIsVideoSourceJpg)) {
802        Mutex::Autolock autoLock(mLock);
803        return seekTo_l(timeUs);
804    }
805
806    return OK;
807}
808
809
810status_t PreviewPlayer::getVideoDimensions(
811        int32_t *width, int32_t *height) const {
812    Mutex::Autolock autoLock(mLock);
813
814    if (mVideoWidth < 0 || mVideoHeight < 0) {
815        return UNKNOWN_ERROR;
816    }
817
818    *width = mVideoWidth;
819    *height = mVideoHeight;
820
821    return OK;
822}
823
824
825status_t PreviewPlayer::initAudioDecoder() {
826    sp<MetaData> meta = mAudioTrack->getFormat();
827    const char *mime;
828    CHECK(meta->findCString(kKeyMIMEType, &mime));
829
830    if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
831        mAudioSource = mAudioTrack;
832    } else {
833        sp<MediaSource> aRawSource;
834        aRawSource = OMXCodec::Create(
835                mClient.interface(), mAudioTrack->getFormat(),
836                false, // createEncoder
837                mAudioTrack);
838
839        if(aRawSource != NULL) {
840            LOGV("initAudioDecoder: new VideoEditorSRC");
841            mAudioSource = new VideoEditorSRC(aRawSource);
842        }
843    }
844
845    if (mAudioSource != NULL) {
846        int64_t durationUs;
847        if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
848            Mutex::Autolock autoLock(mMiscStateLock);
849            if (mDurationUs < 0 || durationUs > mDurationUs) {
850                mDurationUs = durationUs;
851            }
852        }
853        status_t err = mAudioSource->start();
854
855        if (err != OK) {
856            mAudioSource.clear();
857            return err;
858        }
859    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) {
860        // For legacy reasons we're simply going to ignore the absence
861        // of an audio decoder for QCELP instead of aborting playback
862        // altogether.
863        return OK;
864    }
865
866    return mAudioSource != NULL ? OK : UNKNOWN_ERROR;
867}
868
869
870status_t PreviewPlayer::initVideoDecoder(uint32_t flags) {
871
872    mVideoSource = OMXCodec::Create(
873            mClient.interface(), mVideoTrack->getFormat(),
874            false,
875            mVideoTrack,
876            NULL, flags);
877
878    if (mVideoSource != NULL) {
879        int64_t durationUs;
880        if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
881            Mutex::Autolock autoLock(mMiscStateLock);
882            if (mDurationUs < 0 || durationUs > mDurationUs) {
883                mDurationUs = durationUs;
884            }
885        }
886
887        CHECK(mVideoTrack->getFormat()->findInt32(kKeyWidth, &mVideoWidth));
888        CHECK(mVideoTrack->getFormat()->findInt32(kKeyHeight, &mVideoHeight));
889
890        mReportedWidth = mVideoWidth;
891        mReportedHeight = mVideoHeight;
892
893        status_t err = mVideoSource->start();
894
895        if (err != OK) {
896            mVideoSource.clear();
897            return err;
898        }
899    }
900
901    return mVideoSource != NULL ? OK : UNKNOWN_ERROR;
902}
903
904
905void PreviewPlayer::onVideoEvent() {
906    uint32_t i=0;
907    bool bAppliedVideoEffect = false;
908    M4OSA_ERR err1 = M4NO_ERROR;
909    int64_t imageFrameTimeUs = 0;
910
911    Mutex::Autolock autoLock(mLock);
912    if (!mVideoEventPending) {
913        // The event has been cancelled in reset_l() but had already
914        // been scheduled for execution at that time.
915        return;
916    }
917    mVideoEventPending = false;
918
919    if (mFlags & SEEK_PREVIEW) {
920        mFlags &= ~SEEK_PREVIEW;
921        return;
922    }
923
924    TimeSource *ts_st =  &mSystemTimeSource;
925    int64_t timeStartUs = ts_st->getRealTimeUs();
926
927    if (mSeeking != NO_SEEK) {
928        if (mLastVideoBuffer) {
929            mLastVideoBuffer->release();
930            mLastVideoBuffer = NULL;
931        }
932
933
934        if(mAudioSource != NULL) {
935
936            // We're going to seek the video source first, followed by
937            // the audio source.
938            // In order to avoid jumps in the DataSource offset caused by
939            // the audio codec prefetching data from the old locations
940            // while the video codec is already reading data from the new
941            // locations, we'll "pause" the audio source, causing it to
942            // stop reading input data until a subsequent seek.
943
944            if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) {
945                mAudioPlayer->pause();
946                mFlags &= ~AUDIO_RUNNING;
947            }
948            mAudioSource->pause();
949        }
950    }
951
952    if (!mVideoBuffer) {
953        MediaSource::ReadOptions options;
954        if (mSeeking != NO_SEEK) {
955            LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
956                                                      mSeekTimeUs / 1E6);
957
958            options.setSeekTo(
959                    mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
960        }
961        for (;;) {
962            status_t err = mVideoSource->read(&mVideoBuffer, &options);
963            options.clearSeekTo();
964
965            if (err != OK) {
966                CHECK_EQ(mVideoBuffer, NULL);
967
968                if (err == INFO_FORMAT_CHANGED) {
969                    LOGV("LV PLAYER VideoSource signalled format change");
970                    notifyVideoSize_l();
971                    sp<MetaData> meta = mVideoSource->getFormat();
972
973                    CHECK(meta->findInt32(kKeyWidth, &mReportedWidth));
974                    CHECK(meta->findInt32(kKeyHeight, &mReportedHeight));
975                    if (mVideoRenderer != NULL) {
976                        mVideoRendererIsPreview = false;
977                        err = initRenderer_l();
978                        if (err != OK) {
979                            postStreamDoneEvent_l(err);
980                        }
981
982                    }
983                    continue;
984                }
985                // So video playback is complete, but we may still have
986                // a seek request pending that needs to be applied to the audio track
987                if (mSeeking != NO_SEEK) {
988                    LOGV("video stream ended while seeking!");
989                }
990                finishSeekIfNecessary(-1);
991                LOGV("PreviewPlayer: onVideoEvent EOS reached.");
992                mFlags |= VIDEO_AT_EOS;
993                mFlags |= AUDIO_AT_EOS;
994                mOverlayUpdateEventPosted = false;
995                postStreamDoneEvent_l(err);
996                // Set the last decoded timestamp to duration
997                mDecodedVideoTs = (mPlayEndTimeMsec*1000LL);
998                return;
999            }
1000
1001            if (mVideoBuffer->range_length() == 0) {
1002                // Some decoders, notably the PV AVC software decoder
1003                // return spurious empty buffers that we just want to ignore.
1004
1005                mVideoBuffer->release();
1006                mVideoBuffer = NULL;
1007                continue;
1008            }
1009
1010            int64_t videoTimeUs;
1011            CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
1012
1013            if (mSeeking != NO_SEEK) {
1014                if (videoTimeUs < mSeekTimeUs) {
1015                    // buffers are before seek time
1016                    // ignore them
1017                    mVideoBuffer->release();
1018                    mVideoBuffer = NULL;
1019                    continue;
1020                }
1021            } else {
1022                if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
1023                    // Frames are before begin cut time
1024                    // Donot render
1025                    mVideoBuffer->release();
1026                    mVideoBuffer = NULL;
1027                    continue;
1028                }
1029            }
1030            break;
1031        }
1032    }
1033
1034    mNumberDecVideoFrames++;
1035
1036    int64_t timeUs;
1037    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
1038
1039    {
1040        Mutex::Autolock autoLock(mMiscStateLock);
1041        mVideoTimeUs = timeUs;
1042    }
1043
1044
1045    if(!mStartNextPlayer) {
1046        int64_t playbackTimeRemaining = (mPlayEndTimeMsec*1000LL) - timeUs;
1047        if(playbackTimeRemaining <= 1500000) {
1048            //When less than 1.5 sec of playback left
1049            // send notification to start next player
1050
1051            mStartNextPlayer = true;
1052            notifyListener_l(0xAAAAAAAA);
1053        }
1054    }
1055
1056    SeekType wasSeeking = mSeeking;
1057    finishSeekIfNecessary(timeUs);
1058    if (mAudioPlayer != NULL && !(mFlags & (AUDIO_RUNNING))) {
1059        status_t err = startAudioPlayer_l();
1060        if (err != OK) {
1061            LOGE("Starting the audio player failed w/ err %d", err);
1062            return;
1063        }
1064    }
1065
1066    TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
1067
1068    if(ts == NULL) {
1069        mVideoBuffer->release();
1070        mVideoBuffer = NULL;
1071        return;
1072    }
1073
1074    if(!mIsVideoSourceJpg) {
1075        if (mFlags & FIRST_FRAME) {
1076            mFlags &= ~FIRST_FRAME;
1077
1078            mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs;
1079        }
1080
1081        int64_t realTimeUs, mediaTimeUs;
1082        if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
1083            && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {
1084            mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
1085        }
1086
1087        int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
1088
1089        int64_t latenessUs = nowUs - timeUs;
1090
1091        if (wasSeeking != NO_SEEK) {
1092            // Let's display the first frame after seeking right away.
1093            latenessUs = 0;
1094        }
1095        LOGV("Audio time stamp = %lld and video time stamp = %lld",
1096                                            ts->getRealTimeUs(),timeUs);
1097        if (latenessUs > 40000) {
1098            // We're more than 40ms late.
1099
1100            LOGV("LV PLAYER we're late by %lld us (%.2f secs)",
1101                                           latenessUs, latenessUs / 1E6);
1102
1103            mVideoBuffer->release();
1104            mVideoBuffer = NULL;
1105            postVideoEvent_l(0);
1106            return;
1107        }
1108
1109        if (latenessUs < -25000) {
1110            // We're more than 25ms early.
1111            LOGV("We're more than 25ms early, lateness %lld", latenessUs);
1112
1113            postVideoEvent_l(25000);
1114            return;
1115        }
1116    }
1117
1118    if (mVideoRendererIsPreview || mVideoRenderer == NULL) {
1119        mVideoRendererIsPreview = false;
1120
1121        status_t err = initRenderer_l();
1122        if (err != OK) {
1123            postStreamDoneEvent_l(err);
1124        }
1125    }
1126
1127    // If timestamp exceeds endCutTime of clip, donot render
1128    if((timeUs/1000) > mPlayEndTimeMsec) {
1129        if (mLastVideoBuffer) {
1130            mLastVideoBuffer->release();
1131            mLastVideoBuffer = NULL;
1132        }
1133        mLastVideoBuffer = mVideoBuffer;
1134        mVideoBuffer = NULL;
1135        mFlags |= VIDEO_AT_EOS;
1136        mFlags |= AUDIO_AT_EOS;
1137        LOGV("PreviewPlayer: onVideoEvent timeUs > mPlayEndTime; send EOS..");
1138        mOverlayUpdateEventPosted = false;
1139        // Set the last decoded timestamp to duration
1140        mDecodedVideoTs = (mPlayEndTimeMsec*1000LL);
1141        postStreamDoneEvent_l(ERROR_END_OF_STREAM);
1142        return;
1143    }
1144    // Capture the frame timestamp to be rendered
1145    mDecodedVideoTs = timeUs;
1146
1147    // Post processing to apply video effects
1148    for(i=0;i<mNumberEffects;i++) {
1149        // First check if effect starttime matches the clip being previewed
1150        if((mEffectsSettings[i].uiStartTime < (mDecVideoTsStoryBoard/1000)) ||
1151        (mEffectsSettings[i].uiStartTime >=
1152         ((mDecVideoTsStoryBoard/1000) + mPlayEndTimeMsec - mPlayBeginTimeMsec)))
1153        {
1154            // This effect doesn't belong to this clip, check next one
1155            continue;
1156        }
1157        // Check if effect applies to this particular frame timestamp
1158        if((mEffectsSettings[i].uiStartTime <=
1159         (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec)) &&
1160            ((mEffectsSettings[i].uiStartTime+mEffectsSettings[i].uiDuration) >=
1161             (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec))
1162              && (mEffectsSettings[i].uiDuration != 0)) {
1163            setVideoPostProcessingNode(
1164             mEffectsSettings[i].VideoEffectType, TRUE);
1165        }
1166        else {
1167            setVideoPostProcessingNode(
1168             mEffectsSettings[i].VideoEffectType, FALSE);
1169        }
1170    }
1171
1172    //Provide the overlay Update indication when there is an overlay effect
1173    if (mCurrentVideoEffect & VIDEO_EFFECT_FRAMING) {
1174        mCurrentVideoEffect &= ~VIDEO_EFFECT_FRAMING; //never apply framing here.
1175        if (!mOverlayUpdateEventPosted) {
1176            // Find the effect in effectSettings array
1177            M4OSA_UInt32 index;
1178            for (index = 0; index < mNumberEffects; index++) {
1179                M4OSA_UInt32 timeMs = mDecodedVideoTs/1000;
1180                M4OSA_UInt32 timeOffset = mDecVideoTsStoryBoard/1000;
1181                if(mEffectsSettings[index].VideoEffectType ==
1182                    (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_Framing) {
1183                    if (((mEffectsSettings[index].uiStartTime + 1) <=
1184                        timeMs + timeOffset - mPlayBeginTimeMsec) &&
1185                        ((mEffectsSettings[index].uiStartTime - 1 +
1186                        mEffectsSettings[index].uiDuration) >=
1187                        timeMs + timeOffset - mPlayBeginTimeMsec))
1188                    {
1189                        break;
1190                    }
1191                }
1192            }
1193            if (index < mNumberEffects) {
1194                mCurrFramingEffectIndex = index;
1195                mOverlayUpdateEventPosted = true;
1196                postOverlayUpdateEvent_l();
1197                LOGV("Framing index = %d", mCurrFramingEffectIndex);
1198            } else {
1199                LOGV("No framing effects found");
1200            }
1201        }
1202
1203    } else if (mOverlayUpdateEventPosted) {
1204        //Post the event when the overlay is no more valid
1205        LOGV("Overlay is Done");
1206        mOverlayUpdateEventPosted = false;
1207        postOverlayUpdateEvent_l();
1208    }
1209
1210
1211    if (mCurrentVideoEffect != VIDEO_EFFECT_NONE) {
1212        err1 = doVideoPostProcessing();
1213        if(err1 != M4NO_ERROR) {
1214            LOGE("doVideoPostProcessing returned err");
1215            bAppliedVideoEffect = false;
1216        }
1217        else {
1218            bAppliedVideoEffect = true;
1219        }
1220    }
1221    else {
1222        bAppliedVideoEffect = false;
1223        if(mRenderingMode != MEDIA_RENDERING_INVALID) {
1224            // No effects to be applied, but media rendering to be done
1225            err1 = doMediaRendering();
1226            if(err1 != M4NO_ERROR) {
1227                LOGE("doMediaRendering returned err");
1228                //Use original mVideoBuffer for rendering
1229                mVideoResizedOrCropped = false;
1230            }
1231        }
1232    }
1233
1234    if (mVideoRenderer != NULL) {
1235        LOGV("mVideoRenderer CALL render()");
1236        mVideoRenderer->render();
1237    }
1238
1239    if (mLastVideoBuffer) {
1240        mLastVideoBuffer->release();
1241        mLastVideoBuffer = NULL;
1242    }
1243
1244    mLastVideoBuffer = mVideoBuffer;
1245    mVideoBuffer = NULL;
1246
1247    // Post progress callback based on callback interval set
1248    if(mNumberDecVideoFrames >= mProgressCbInterval) {
1249        postProgressCallbackEvent_l();
1250        mNumberDecVideoFrames = 0;  // reset counter
1251    }
1252
1253    // if reached EndCutTime of clip, post EOS event
1254    if((timeUs/1000) >= mPlayEndTimeMsec) {
1255        LOGV("PreviewPlayer: onVideoEvent EOS.");
1256        mFlags |= VIDEO_AT_EOS;
1257        mFlags |= AUDIO_AT_EOS;
1258        mOverlayUpdateEventPosted = false;
1259        // Set the last decoded timestamp to duration
1260        mDecodedVideoTs = (mPlayEndTimeMsec*1000LL);
1261        postStreamDoneEvent_l(ERROR_END_OF_STREAM);
1262    }
1263    else {
1264        if ((wasSeeking != NO_SEEK) && (mFlags & SEEK_PREVIEW)) {
1265            mFlags &= ~SEEK_PREVIEW;
1266            return;
1267        }
1268
1269        if(!mIsVideoSourceJpg) {
1270            postVideoEvent_l(0);
1271        }
1272        else {
1273            postVideoEvent_l(33000);
1274        }
1275    }
1276}
1277
1278status_t PreviewPlayer::prepare() {
1279    Mutex::Autolock autoLock(mLock);
1280    return prepare_l();
1281}
1282
1283status_t PreviewPlayer::prepare_l() {
1284    if (mFlags & PREPARED) {
1285        return OK;
1286    }
1287
1288    if (mFlags & PREPARING) {
1289        return UNKNOWN_ERROR;
1290    }
1291
1292    mIsAsyncPrepare = false;
1293    status_t err = prepareAsync_l();
1294
1295    if (err != OK) {
1296        return err;
1297    }
1298
1299    while (mFlags & PREPARING) {
1300        mPreparedCondition.wait(mLock);
1301    }
1302
1303    return mPrepareResult;
1304}
1305
1306status_t PreviewPlayer::prepareAsync_l() {
1307    if (mFlags & PREPARING) {
1308        return UNKNOWN_ERROR;  // async prepare already pending
1309    }
1310
1311    if (!mQueueStarted) {
1312        mQueue.start();
1313        mQueueStarted = true;
1314    }
1315
1316    mFlags |= PREPARING;
1317    mAsyncPrepareEvent = new PreviewPlayerEvent(
1318            this, &PreviewPlayer::onPrepareAsyncEvent);
1319
1320    mQueue.postEvent(mAsyncPrepareEvent);
1321
1322    return OK;
1323}
1324
1325status_t PreviewPlayer::finishSetDataSource_l() {
1326    sp<DataSource> dataSource;
1327    sp<MediaExtractor> extractor;
1328
1329    dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders);
1330
1331    if (dataSource == NULL) {
1332        return UNKNOWN_ERROR;
1333    }
1334
1335    //If file type is .rgb, then no need to check for Extractor
1336    int uriLen = strlen(mUri);
1337    int startOffset = uriLen - 4;
1338    if(!strncasecmp(mUri+startOffset, ".rgb", 4)) {
1339        extractor = NULL;
1340    }
1341    else {
1342        extractor = MediaExtractor::Create(dataSource,
1343                                        MEDIA_MIMETYPE_CONTAINER_MPEG4);
1344    }
1345
1346    if (extractor == NULL) {
1347        LOGV("PreviewPlayer::finishSetDataSource_l  extractor == NULL");
1348        return setDataSource_l_jpg();
1349    }
1350
1351    return setDataSource_l(extractor);
1352}
1353
1354
1355// static
1356bool PreviewPlayer::ContinuePreparation(void *cookie) {
1357    PreviewPlayer *me = static_cast<PreviewPlayer *>(cookie);
1358
1359    return (me->mFlags & PREPARE_CANCELLED) == 0;
1360}
1361
1362void PreviewPlayer::onPrepareAsyncEvent() {
1363    Mutex::Autolock autoLock(mLock);
1364    LOGV("onPrepareAsyncEvent");
1365
1366    if (mFlags & PREPARE_CANCELLED) {
1367        LOGV("LV PLAYER prepare was cancelled before doing anything");
1368        abortPrepare(UNKNOWN_ERROR);
1369        return;
1370    }
1371
1372    if (mUri.size() > 0) {
1373        status_t err = finishSetDataSource_l();
1374
1375        if (err != OK) {
1376            abortPrepare(err);
1377            return;
1378        }
1379    }
1380
1381    if (mVideoTrack != NULL && mVideoSource == NULL) {
1382        status_t err = initVideoDecoder(OMXCodec::kHardwareCodecsOnly);
1383
1384        if (err != OK) {
1385            abortPrepare(err);
1386            return;
1387        }
1388    }
1389
1390    if (mAudioTrack != NULL && mAudioSource == NULL) {
1391        status_t err = initAudioDecoder();
1392
1393        if (err != OK) {
1394            abortPrepare(err);
1395            return;
1396        }
1397    }
1398    finishAsyncPrepare_l();
1399
1400}
1401
1402void PreviewPlayer::finishAsyncPrepare_l() {
1403    if (mIsAsyncPrepare) {
1404        if (mVideoSource == NULL) {
1405            LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE 0 0 ");
1406            notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0);
1407        } else {
1408            LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE");
1409            notifyVideoSize_l();
1410        }
1411        LOGV("finishAsyncPrepare_l: MEDIA_PREPARED");
1412        notifyListener_l(MEDIA_PREPARED);
1413    }
1414
1415    mPrepareResult = OK;
1416    mFlags &= ~(PREPARING|PREPARE_CANCELLED);
1417    mFlags |= PREPARED;
1418    mAsyncPrepareEvent = NULL;
1419    mPreparedCondition.broadcast();
1420}
1421
1422status_t PreviewPlayer::suspend() {
1423    LOGV("suspend");
1424    Mutex::Autolock autoLock(mLock);
1425
1426    if (mSuspensionState != NULL) {
1427        if (mLastVideoBuffer == NULL) {
1428            //go into here if video is suspended again
1429            //after resuming without being played between
1430            //them
1431            SuspensionState *state = mSuspensionState;
1432            mSuspensionState = NULL;
1433            reset_l();
1434            mSuspensionState = state;
1435            return OK;
1436        }
1437
1438        delete mSuspensionState;
1439        mSuspensionState = NULL;
1440    }
1441
1442    if (mFlags & PREPARING) {
1443        mFlags |= PREPARE_CANCELLED;
1444    }
1445
1446    while (mFlags & PREPARING) {
1447        mPreparedCondition.wait(mLock);
1448    }
1449
1450    SuspensionState *state = new SuspensionState;
1451    state->mUri = mUri;
1452    state->mUriHeaders = mUriHeaders;
1453    state->mFileSource = mFileSource;
1454
1455    state->mFlags = mFlags & (PLAYING | AUTO_LOOPING | LOOPING | AT_EOS);
1456    getPosition(&state->mPositionUs);
1457
1458    if (mLastVideoBuffer) {
1459        size_t size = mLastVideoBuffer->range_length();
1460        if (size) {
1461            int32_t unreadable;
1462            if (!mLastVideoBuffer->meta_data()->findInt32(
1463                        kKeyIsUnreadable, &unreadable)
1464                    || unreadable == 0) {
1465                state->mLastVideoFrameSize = size;
1466                state->mLastVideoFrame = malloc(size);
1467                memcpy(state->mLastVideoFrame,
1468                   (const uint8_t *)mLastVideoBuffer->data()
1469                        + mLastVideoBuffer->range_offset(),
1470                   size);
1471
1472                state->mVideoWidth = mVideoWidth;
1473                state->mVideoHeight = mVideoHeight;
1474
1475                sp<MetaData> meta = mVideoSource->getFormat();
1476                CHECK(meta->findInt32(kKeyColorFormat, &state->mColorFormat));
1477                CHECK(meta->findInt32(kKeyWidth, &state->mDecodedWidth));
1478                CHECK(meta->findInt32(kKeyHeight, &state->mDecodedHeight));
1479            } else {
1480                LOGV("Unable to save last video frame, we have no access to "
1481                     "the decoded video data.");
1482            }
1483        }
1484    }
1485
1486    reset_l();
1487
1488    mSuspensionState = state;
1489
1490    return OK;
1491}
1492
1493void PreviewPlayer::acquireLock() {
1494    LOGV("acquireLock");
1495    mLockControl.lock();
1496}
1497
1498void PreviewPlayer::releaseLock() {
1499    LOGV("releaseLock");
1500    mLockControl.unlock();
1501}
1502
1503status_t PreviewPlayer::resume() {
1504    LOGV("resume");
1505    Mutex::Autolock autoLock(mLock);
1506
1507    if (mSuspensionState == NULL) {
1508        return INVALID_OPERATION;
1509    }
1510
1511    SuspensionState *state = mSuspensionState;
1512    mSuspensionState = NULL;
1513
1514    status_t err;
1515    if (state->mFileSource != NULL) {
1516        err = AwesomePlayer::setDataSource_l(state->mFileSource);
1517
1518        if (err == OK) {
1519            mFileSource = state->mFileSource;
1520        }
1521    } else {
1522        err = AwesomePlayer::setDataSource_l(state->mUri, &state->mUriHeaders);
1523    }
1524
1525    if (err != OK) {
1526        delete state;
1527        state = NULL;
1528
1529        return err;
1530    }
1531
1532    seekTo_l(state->mPositionUs);
1533
1534    mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS);
1535
1536    if (state->mLastVideoFrame && (mSurface != NULL)) {
1537        mVideoRenderer =
1538            PreviewLocalRenderer::initPreviewLocalRenderer(
1539                    true,  // previewOnly
1540                    (OMX_COLOR_FORMATTYPE)state->mColorFormat,
1541                    mSurface,
1542                    state->mVideoWidth,
1543                    state->mVideoHeight,
1544                    state->mDecodedWidth,
1545                    state->mDecodedHeight);
1546
1547        mVideoRendererIsPreview = true;
1548
1549        ((PreviewLocalRenderer *)mVideoRenderer.get())->render(
1550                state->mLastVideoFrame, state->mLastVideoFrameSize);
1551    }
1552
1553    if (state->mFlags & PLAYING) {
1554        play_l();
1555    }
1556
1557    mSuspensionState = state;
1558    state = NULL;
1559
1560    return OK;
1561}
1562
1563
1564status_t PreviewPlayer::loadEffectsSettings(
1565                    M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) {
1566    M4OSA_UInt32 i = 0, rgbSize = 0;
1567    M4VIFI_UInt8 *tmp = M4OSA_NULL;
1568
1569    mNumberEffects = nEffects;
1570    mEffectsSettings = pEffectSettings;
1571    return OK;
1572}
1573
1574status_t PreviewPlayer::loadAudioMixSettings(
1575                    M4xVSS_AudioMixingSettings* pAudioMixSettings) {
1576
1577    LOGV("PreviewPlayer: loadAudioMixSettings: ");
1578    mPreviewPlayerAudioMixSettings = pAudioMixSettings;
1579    return OK;
1580}
1581
1582status_t PreviewPlayer::setAudioMixPCMFileHandle(
1583                    M4OSA_Context pAudioMixPCMFileHandle) {
1584
1585    LOGV("PreviewPlayer: setAudioMixPCMFileHandle: ");
1586    mAudioMixPCMFileHandle = pAudioMixPCMFileHandle;
1587    return OK;
1588}
1589
1590status_t PreviewPlayer::setAudioMixStoryBoardParam(
1591                    M4OSA_UInt32 audioMixStoryBoardTS,
1592                    M4OSA_UInt32 currentMediaBeginCutTime,
1593                    M4OSA_UInt32 primaryTrackVolValue ) {
1594
1595    mAudioMixStoryBoardTS = audioMixStoryBoardTS;
1596    mCurrentMediaBeginCutTime = currentMediaBeginCutTime;
1597    mCurrentMediaVolumeValue = primaryTrackVolValue;
1598    return OK;
1599}
1600
1601status_t PreviewPlayer::setPlaybackBeginTime(uint32_t msec) {
1602
1603    mPlayBeginTimeMsec = msec;
1604    return OK;
1605}
1606
1607status_t PreviewPlayer::setPlaybackEndTime(uint32_t msec) {
1608
1609    mPlayEndTimeMsec = msec;
1610    return OK;
1611}
1612
1613status_t PreviewPlayer::setStoryboardStartTime(uint32_t msec) {
1614
1615    mStoryboardStartTimeMsec = msec;
1616    mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000LL;
1617    return OK;
1618}
1619
1620status_t PreviewPlayer::setProgressCallbackInterval(uint32_t cbInterval) {
1621
1622    mProgressCbInterval = cbInterval;
1623    return OK;
1624}
1625
1626
1627status_t PreviewPlayer::setMediaRenderingMode(
1628        M4xVSS_MediaRendering mode,
1629        M4VIDEOEDITING_VideoFrameSize outputVideoSize) {
1630
1631    mRenderingMode = mode;
1632
1633    /* reset boolean for each clip*/
1634    mVideoResizedOrCropped = false;
1635
1636    switch(outputVideoSize) {
1637        case M4VIDEOEDITING_kSQCIF:
1638            mOutputVideoWidth = 128;
1639            mOutputVideoHeight = 96;
1640            break;
1641
1642        case M4VIDEOEDITING_kQQVGA:
1643            mOutputVideoWidth = 160;
1644            mOutputVideoHeight = 120;
1645            break;
1646
1647        case M4VIDEOEDITING_kQCIF:
1648            mOutputVideoWidth = 176;
1649            mOutputVideoHeight = 144;
1650            break;
1651
1652        case M4VIDEOEDITING_kQVGA:
1653            mOutputVideoWidth = 320;
1654            mOutputVideoHeight = 240;
1655            break;
1656
1657        case M4VIDEOEDITING_kCIF:
1658            mOutputVideoWidth = 352;
1659            mOutputVideoHeight = 288;
1660            break;
1661
1662        case M4VIDEOEDITING_kVGA:
1663            mOutputVideoWidth = 640;
1664            mOutputVideoHeight = 480;
1665            break;
1666
1667        case M4VIDEOEDITING_kWVGA:
1668            mOutputVideoWidth = 800;
1669            mOutputVideoHeight = 480;
1670            break;
1671
1672        case M4VIDEOEDITING_kNTSC:
1673            mOutputVideoWidth = 720;
1674            mOutputVideoHeight = 480;
1675            break;
1676
1677        case M4VIDEOEDITING_k640_360:
1678            mOutputVideoWidth = 640;
1679            mOutputVideoHeight = 360;
1680            break;
1681
1682        case M4VIDEOEDITING_k854_480:
1683            mOutputVideoWidth = 854;
1684            mOutputVideoHeight = 480;
1685            break;
1686
1687        case M4VIDEOEDITING_kHD1280:
1688            mOutputVideoWidth = 1280;
1689            mOutputVideoHeight = 720;
1690            break;
1691
1692        case M4VIDEOEDITING_kHD1080:
1693            mOutputVideoWidth = 1080;
1694            mOutputVideoHeight = 720;
1695            break;
1696
1697        case M4VIDEOEDITING_kHD960:
1698            mOutputVideoWidth = 960;
1699            mOutputVideoHeight = 720;
1700            break;
1701
1702        default:
1703            LOGE("unsupported output video size set");
1704            return BAD_VALUE;
1705    }
1706
1707    return OK;
1708}
1709
1710M4OSA_ERR PreviewPlayer::doMediaRendering() {
1711    M4OSA_ERR err = M4NO_ERROR;
1712    M4VIFI_ImagePlane planeIn[3], planeOut[3];
1713    M4VIFI_UInt8 *inBuffer = M4OSA_NULL, *finalOutputBuffer = M4OSA_NULL;
1714    M4VIFI_UInt8 *tempOutputBuffer= M4OSA_NULL;
1715    size_t videoBufferSize = 0;
1716    M4OSA_UInt32 frameSize = 0, i=0, index =0, nFrameCount =0, bufferOffset =0;
1717    int32_t colorFormat = 0;
1718
1719    if(!mIsVideoSourceJpg) {
1720        sp<MetaData> meta = mVideoSource->getFormat();
1721        CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
1722    }
1723    else {
1724        colorFormat = OMX_COLOR_FormatYUV420Planar;
1725    }
1726
1727    videoBufferSize = mVideoBuffer->size();
1728    frameSize = (mVideoWidth*mVideoHeight*3) >> 1;
1729
1730    uint8_t* outBuffer;
1731    size_t outBufferStride = 0;
1732
1733    mVideoRenderer->getBuffer(&outBuffer, &outBufferStride);
1734
1735    bufferOffset = index*frameSize;
1736    inBuffer = (M4OSA_UInt8 *)mVideoBuffer->data()+
1737                mVideoBuffer->range_offset()+bufferOffset;
1738
1739
1740    /* In plane*/
1741    prepareYUV420ImagePlane(planeIn, mVideoWidth,
1742      mVideoHeight, (M4VIFI_UInt8 *)inBuffer, mReportedWidth, mReportedHeight);
1743
1744    // Set the output YUV420 plane to be compatible with YV12 format
1745    // W & H even
1746    // YVU instead of YUV
1747    // align buffers on 32 bits
1748
1749    //In YV12 format, sizes must be even
1750    M4OSA_UInt32 yv12PlaneWidth = ((mOutputVideoWidth +1)>>1)<<1;
1751    M4OSA_UInt32 yv12PlaneHeight = ((mOutputVideoHeight+1)>>1)<<1;
1752
1753    prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,
1754     (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer);
1755
1756
1757    err = applyRenderingMode(planeIn, planeOut, mRenderingMode);
1758
1759    if(err != M4NO_ERROR)
1760    {
1761        LOGE("doMediaRendering: applyRenderingMode returned err=0x%x", (int)err);
1762        return err;
1763    }
1764    mVideoResizedOrCropped = true;
1765
1766    return err;
1767}
1768
1769status_t PreviewPlayer::resetJniCallbackTimeStamp() {
1770
1771    mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000LL;
1772    return OK;
1773}
1774
1775void PreviewPlayer::postProgressCallbackEvent_l() {
1776    if (mProgressCbEventPending) {
1777        return;
1778    }
1779    mProgressCbEventPending = true;
1780
1781    mQueue.postEvent(mProgressCbEvent);
1782}
1783
1784
1785void PreviewPlayer::onProgressCbEvent() {
1786    Mutex::Autolock autoLock(mLock);
1787    if (!mProgressCbEventPending) {
1788        return;
1789    }
1790    mProgressCbEventPending = false;
1791    // If playback starts from previous I-frame,
1792    // then send frame storyboard duration
1793    if((mDecodedVideoTs/1000) < mPlayBeginTimeMsec) {
1794        notifyListener_l(MEDIA_INFO, 0, mDecVideoTsStoryBoard/1000);
1795    }
1796    else {
1797        notifyListener_l(MEDIA_INFO, 0,
1798        (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec));
1799    }
1800}
1801
1802void PreviewPlayer::postOverlayUpdateEvent_l() {
1803    if (mOverlayUpdateEventPending) {
1804        return;
1805    }
1806    mOverlayUpdateEventPending = true;
1807    mQueue.postEvent(mOverlayUpdateEvent);
1808}
1809
1810void PreviewPlayer::onUpdateOverlayEvent() {
1811    Mutex::Autolock autoLock(mLock);
1812
1813    if (!mOverlayUpdateEventPending) {
1814        return;
1815    }
1816    mOverlayUpdateEventPending = false;
1817
1818    int updateState;
1819    if (mOverlayUpdateEventPosted) {
1820        updateState = 1;
1821    } else {
1822        updateState = 0;
1823    }
1824    notifyListener_l(0xBBBBBBBB, updateState, mCurrFramingEffectIndex);
1825}
1826
1827
1828void PreviewPlayer::setVideoPostProcessingNode(
1829                    M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable) {
1830
1831    uint32_t effect = VIDEO_EFFECT_NONE;
1832
1833    //Map M4VSS3GPP_VideoEffectType to local enum
1834    switch(type) {
1835        case M4VSS3GPP_kVideoEffectType_FadeFromBlack:
1836            effect = VIDEO_EFFECT_FADEFROMBLACK;
1837            break;
1838
1839        case M4VSS3GPP_kVideoEffectType_FadeToBlack:
1840            effect = VIDEO_EFFECT_FADETOBLACK;
1841            break;
1842
1843        case M4VSS3GPP_kVideoEffectType_CurtainOpening:
1844            effect = VIDEO_EFFECT_CURTAINOPEN;
1845            break;
1846
1847        case M4VSS3GPP_kVideoEffectType_CurtainClosing:
1848            effect = VIDEO_EFFECT_CURTAINCLOSE;
1849            break;
1850
1851        case M4xVSS_kVideoEffectType_BlackAndWhite:
1852            effect = VIDEO_EFFECT_BLACKANDWHITE;
1853            break;
1854
1855        case M4xVSS_kVideoEffectType_Pink:
1856            effect = VIDEO_EFFECT_PINK;
1857            break;
1858
1859        case M4xVSS_kVideoEffectType_Green:
1860            effect = VIDEO_EFFECT_GREEN;
1861            break;
1862
1863        case M4xVSS_kVideoEffectType_Sepia:
1864            effect = VIDEO_EFFECT_SEPIA;
1865            break;
1866
1867        case M4xVSS_kVideoEffectType_Negative:
1868            effect = VIDEO_EFFECT_NEGATIVE;
1869            break;
1870
1871        case M4xVSS_kVideoEffectType_Framing:
1872            effect = VIDEO_EFFECT_FRAMING;
1873            break;
1874
1875        case M4xVSS_kVideoEffectType_Fifties:
1876            effect = VIDEO_EFFECT_FIFTIES;
1877            break;
1878
1879        case M4xVSS_kVideoEffectType_ColorRGB16:
1880            effect = VIDEO_EFFECT_COLOR_RGB16;
1881            break;
1882
1883        case M4xVSS_kVideoEffectType_Gradient:
1884            effect = VIDEO_EFFECT_GRADIENT;
1885            break;
1886
1887        default:
1888            effect = VIDEO_EFFECT_NONE;
1889            break;
1890    }
1891
1892    if(enable == M4OSA_TRUE) {
1893        //If already set, then no need to set again
1894        if(!(mCurrentVideoEffect & effect)) {
1895            mCurrentVideoEffect |= effect;
1896            if(effect == VIDEO_EFFECT_FIFTIES) {
1897                mIsFiftiesEffectStarted = true;
1898            }
1899        }
1900    }
1901    else  {
1902        //Reset only if already set
1903        if(mCurrentVideoEffect & effect) {
1904            mCurrentVideoEffect &= ~effect;
1905        }
1906    }
1907}
1908
1909status_t PreviewPlayer::setImageClipProperties(uint32_t width,uint32_t height) {
1910    mVideoWidth = width;
1911    mVideoHeight = height;
1912    return OK;
1913}
1914
1915
1916M4OSA_ERR PreviewPlayer::doVideoPostProcessing() {
1917    M4OSA_ERR err = M4NO_ERROR;
1918    vePostProcessParams postProcessParams;
1919    int32_t colorFormat = 0;
1920
1921
1922    if(!mIsVideoSourceJpg) {
1923        sp<MetaData> meta = mVideoSource->getFormat();
1924        CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
1925    }
1926    else {
1927        colorFormat = OMX_COLOR_FormatYUV420Planar;
1928    }
1929
1930    if((colorFormat == OMX_COLOR_FormatYUV420SemiPlanar) ||
1931       (colorFormat == 0x7FA30C00)) {
1932          LOGE("doVideoPostProcessing: colorFormat YUV420Sp not supported");
1933          return M4ERR_UNSUPPORTED_MEDIA_TYPE;
1934    }
1935
1936    postProcessParams.vidBuffer = (M4VIFI_UInt8*)mVideoBuffer->data()
1937        + mVideoBuffer->range_offset();
1938
1939    postProcessParams.videoWidth = mVideoWidth;
1940    postProcessParams.videoHeight = mVideoHeight;
1941    postProcessParams.timeMs = mDecodedVideoTs/1000;
1942    postProcessParams.timeOffset = mDecVideoTsStoryBoard/1000;
1943    postProcessParams.effectsSettings = mEffectsSettings;
1944    postProcessParams.numberEffects = mNumberEffects;
1945    postProcessParams.outVideoWidth = mOutputVideoWidth;
1946    postProcessParams.outVideoHeight = mOutputVideoHeight;
1947    postProcessParams.currentVideoEffect = mCurrentVideoEffect;
1948    postProcessParams.renderingMode = mRenderingMode;
1949    if(mIsFiftiesEffectStarted == M4OSA_TRUE) {
1950        postProcessParams.isFiftiesEffectStarted = M4OSA_TRUE;
1951        mIsFiftiesEffectStarted = M4OSA_FALSE;
1952    }
1953    else {
1954       postProcessParams.isFiftiesEffectStarted = M4OSA_FALSE;
1955    }
1956
1957    postProcessParams.overlayFrameRGBBuffer = mFrameRGBBuffer;
1958    postProcessParams.overlayFrameYUVBuffer = mFrameYUVBuffer;
1959    mVideoRenderer->getBuffer(&(postProcessParams.pOutBuffer), &(postProcessParams.outBufferStride));
1960    err = applyEffectsAndRenderingMode(&postProcessParams, mReportedWidth, mReportedHeight);
1961
1962    return err;
1963}
1964
1965status_t PreviewPlayer::readFirstVideoFrame() {
1966    LOGV("PreviewPlayer::readFirstVideoFrame");
1967
1968    if (!mVideoBuffer) {
1969        MediaSource::ReadOptions options;
1970        if (mSeeking != NO_SEEK) {
1971            LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
1972                    mSeekTimeUs / 1E6);
1973
1974            options.setSeekTo(
1975                    mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
1976        }
1977        for (;;) {
1978            status_t err = mVideoSource->read(&mVideoBuffer, &options);
1979            options.clearSeekTo();
1980
1981            if (err != OK) {
1982                CHECK_EQ(mVideoBuffer, NULL);
1983
1984                if (err == INFO_FORMAT_CHANGED) {
1985                    LOGV("LV PLAYER VideoSource signalled format change");
1986                    notifyVideoSize_l();
1987                    sp<MetaData> meta = mVideoSource->getFormat();
1988
1989                    CHECK(meta->findInt32(kKeyWidth, &mReportedWidth));
1990                    CHECK(meta->findInt32(kKeyHeight, &mReportedHeight));
1991
1992                    if (mVideoRenderer != NULL) {
1993                        mVideoRendererIsPreview = false;
1994                        err = initRenderer_l();
1995                        if (err != OK) {
1996                            postStreamDoneEvent_l(err);
1997                        }
1998                    }
1999                    continue;
2000                }
2001                LOGV("PreviewPlayer: onVideoEvent EOS reached.");
2002                mFlags |= VIDEO_AT_EOS;
2003                mFlags |= AUDIO_AT_EOS;
2004                postStreamDoneEvent_l(err);
2005                return OK;
2006            }
2007
2008            if (mVideoBuffer->range_length() == 0) {
2009                // Some decoders, notably the PV AVC software decoder
2010                // return spurious empty buffers that we just want to ignore.
2011
2012                mVideoBuffer->release();
2013                mVideoBuffer = NULL;
2014                continue;
2015            }
2016
2017            int64_t videoTimeUs;
2018            CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
2019            if (mSeeking != NO_SEEK) {
2020                if (videoTimeUs < mSeekTimeUs) {
2021                    // buffers are before seek time
2022                    // ignore them
2023                    mVideoBuffer->release();
2024                    mVideoBuffer = NULL;
2025                    continue;
2026                }
2027            } else {
2028                if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
2029                    // buffers are before begin cut time
2030                    // ignore them
2031                    mVideoBuffer->release();
2032                    mVideoBuffer = NULL;
2033                    continue;
2034                }
2035            }
2036            break;
2037        }
2038    }
2039
2040    int64_t timeUs;
2041    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
2042
2043    {
2044        Mutex::Autolock autoLock(mMiscStateLock);
2045        mVideoTimeUs = timeUs;
2046    }
2047
2048    mDecodedVideoTs = timeUs;
2049
2050    return OK;
2051
2052}
2053
2054status_t PreviewPlayer::getLastRenderedTimeMs(uint32_t *lastRenderedTimeMs) {
2055    *lastRenderedTimeMs = (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec);
2056    return OK;
2057}
2058
2059}  // namespace android
2060