VideoEditorVideoEncoder.cpp revision 2aa01fd002bba1dde45791c1138c1f71a8d0aa53
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16/**
17*************************************************************************
18* @file   VideoEditorVideoEncoder.cpp
19* @brief  StageFright shell video encoder
20*************************************************************************
21*/
22#define LOG_NDEBUG 1
23#define LOG_TAG "VIDEOEDITOR_VIDEOENCODER"
24
25/*******************
26 *     HEADERS     *
27 *******************/
28#include "M4OSA_Debug.h"
29#include "M4SYS_AccessUnit.h"
30#include "VideoEditorVideoEncoder.h"
31#include "VideoEditorUtils.h"
32#include <I420ColorConverter.h>
33
34#include "utils/Log.h"
35#include "utils/Vector.h"
36#include <media/stagefright/MediaSource.h>
37#include <media/stagefright/MediaDebug.h>
38#include <media/stagefright/MediaDefs.h>
39#include <media/stagefright/MetaData.h>
40#include <media/stagefright/OMXClient.h>
41#include <media/stagefright/OMXCodec.h>
42#include "OMX_Video.h"
43
44/********************
45 *   DEFINITIONS    *
46 ********************/
47
48// Force using hardware encoder
49#define VIDEOEDITOR_FORCECODEC kHardwareCodecsOnly
50
51#if !defined(VIDEOEDITOR_FORCECODEC)
52    #error "Cannot force DSI retrieval if codec type is not fixed"
53#endif
54
55/********************
56 *   SOURCE CLASS   *
57 ********************/
58
59namespace android {
60
61struct VideoEditorVideoEncoderSource : public MediaSource {
62    public:
63        static sp<VideoEditorVideoEncoderSource> Create(
64            const sp<MetaData> &format);
65        virtual status_t start(MetaData *params = NULL);
66        virtual status_t stop();
67        virtual sp<MetaData> getFormat();
68        virtual status_t read(MediaBuffer **buffer,
69            const ReadOptions *options = NULL);
70        virtual int32_t storeBuffer(MediaBuffer *buffer);
71
72    protected:
73        virtual ~VideoEditorVideoEncoderSource();
74
75    private:
76        struct MediaBufferChain {
77            MediaBuffer* buffer;
78            MediaBufferChain* nextLink;
79        };
80        enum State {
81            CREATED,
82            STARTED,
83            ERROR
84        };
85        VideoEditorVideoEncoderSource(const sp<MetaData> &format);
86
87        // Don't call me
88        VideoEditorVideoEncoderSource(const VideoEditorVideoEncoderSource &);
89        VideoEditorVideoEncoderSource &operator=(
90                const VideoEditorVideoEncoderSource &);
91
92        MediaBufferChain* mFirstBufferLink;
93        MediaBufferChain* mLastBufferLink;
94        int32_t           mNbBuffer;
95        bool              mIsEOS;
96        State             mState;
97        sp<MetaData>      mEncFormat;
98        Mutex             mLock;
99        Condition         mBufferCond;
100};
101
102sp<VideoEditorVideoEncoderSource> VideoEditorVideoEncoderSource::Create(
103    const sp<MetaData> &format) {
104
105    sp<VideoEditorVideoEncoderSource> aSource =
106        new VideoEditorVideoEncoderSource(format);
107    return aSource;
108}
109
110VideoEditorVideoEncoderSource::VideoEditorVideoEncoderSource(
111    const sp<MetaData> &format):
112        mFirstBufferLink(NULL),
113        mLastBufferLink(NULL),
114        mNbBuffer(0),
115        mIsEOS(false),
116        mState(CREATED),
117        mEncFormat(format) {
118    LOGV("VideoEditorVideoEncoderSource::VideoEditorVideoEncoderSource");
119}
120
121VideoEditorVideoEncoderSource::~VideoEditorVideoEncoderSource() {
122
123    // Safety clean up
124    if( STARTED == mState ) {
125        stop();
126    }
127}
128
129status_t VideoEditorVideoEncoderSource::start(MetaData *meta) {
130    status_t err = OK;
131
132    LOGV("VideoEditorVideoEncoderSource::start() begin");
133
134    if( CREATED != mState ) {
135        LOGV("VideoEditorVideoEncoderSource::start: invalid state %d", mState);
136        return UNKNOWN_ERROR;
137    }
138    mState = STARTED;
139
140    LOGV("VideoEditorVideoEncoderSource::start() END (0x%x)", err);
141    return err;
142}
143
144status_t VideoEditorVideoEncoderSource::stop() {
145    status_t err = OK;
146
147    LOGV("VideoEditorVideoEncoderSource::stop() begin");
148
149    if( STARTED != mState ) {
150        LOGV("VideoEditorVideoEncoderSource::stop: invalid state %d", mState);
151        return UNKNOWN_ERROR;
152    }
153
154    // Release the buffer chain
155    int32_t i = 0;
156    MediaBufferChain* tmpLink = NULL;
157    while( mFirstBufferLink ) {
158        i++;
159        tmpLink = mFirstBufferLink;
160        mFirstBufferLink = mFirstBufferLink->nextLink;
161        delete tmpLink;
162    }
163    LOGV("VideoEditorVideoEncoderSource::stop : %d buffer remained", i);
164    mFirstBufferLink = NULL;
165    mLastBufferLink = NULL;
166
167    mState = CREATED;
168
169    LOGV("VideoEditorVideoEncoderSource::stop() END (0x%x)", err);
170    return err;
171}
172
173sp<MetaData> VideoEditorVideoEncoderSource::getFormat() {
174
175    LOGV("VideoEditorVideoEncoderSource::getFormat");
176    return mEncFormat;
177}
178
179status_t VideoEditorVideoEncoderSource::read(MediaBuffer **buffer,
180        const ReadOptions *options) {
181    Mutex::Autolock autolock(mLock);
182    MediaSource::ReadOptions readOptions;
183    status_t err = OK;
184    MediaBufferChain* tmpLink = NULL;
185
186    LOGV("VideoEditorVideoEncoderSource::read() begin");
187
188    if ( STARTED != mState ) {
189        LOGV("VideoEditorVideoEncoderSource::read: invalid state %d", mState);
190        return UNKNOWN_ERROR;
191    }
192
193    while (mFirstBufferLink == NULL && !mIsEOS) {
194        mBufferCond.wait(mLock);
195    }
196
197    // End of stream?
198    if (mFirstBufferLink == NULL) {
199        *buffer = NULL;
200        LOGV("VideoEditorVideoEncoderSource::read : EOS");
201        return ERROR_END_OF_STREAM;
202    }
203
204    // Get a buffer from the chain
205    *buffer = mFirstBufferLink->buffer;
206    tmpLink = mFirstBufferLink;
207    mFirstBufferLink = mFirstBufferLink->nextLink;
208
209    if ( NULL == mFirstBufferLink ) {
210        mLastBufferLink = NULL;
211    }
212    delete tmpLink;
213    mNbBuffer--;
214
215    LOGV("VideoEditorVideoEncoderSource::read() END (0x%x)", err);
216    return err;
217}
218
219int32_t VideoEditorVideoEncoderSource::storeBuffer(MediaBuffer *buffer) {
220    Mutex::Autolock autolock(mLock);
221    status_t err = OK;
222
223    LOGV("VideoEditorVideoEncoderSource::storeBuffer() begin");
224
225    if( NULL == buffer ) {
226        LOGV("VideoEditorVideoEncoderSource::storeBuffer : reached EOS");
227        mIsEOS = true;
228    } else {
229        MediaBufferChain* newLink = new MediaBufferChain;
230        newLink->buffer = buffer;
231        newLink->nextLink = NULL;
232        if( NULL != mLastBufferLink ) {
233            mLastBufferLink->nextLink = newLink;
234        } else {
235            mFirstBufferLink = newLink;
236        }
237        mLastBufferLink = newLink;
238        mNbBuffer++;
239    }
240    mBufferCond.signal();
241    LOGV("VideoEditorVideoEncoderSource::storeBuffer() end");
242    return mNbBuffer;
243}
244
245/********************
246 *      PULLER      *
247 ********************/
248
249// Pulls media buffers from a MediaSource repeatedly.
250// The user can then get the buffers from that list.
251class VideoEditorVideoEncoderPuller {
252public:
253    VideoEditorVideoEncoderPuller(sp<MediaSource> source);
254    ~VideoEditorVideoEncoderPuller();
255    void start();
256    void stop();
257    MediaBuffer* getBufferBlocking();
258    MediaBuffer* getBufferNonBlocking();
259    void putBuffer(MediaBuffer* buffer);
260private:
261    static int acquireThreadStart(void* arg);
262    void acquireThreadFunc();
263
264    static int releaseThreadStart(void* arg);
265    void releaseThreadFunc();
266
267    sp<MediaSource> mSource;
268    Vector<MediaBuffer*> mBuffers;
269    Vector<MediaBuffer*> mReleaseBuffers;
270
271    Mutex mLock;
272    Condition mUserCond;     // for the user of this class
273    Condition mAcquireCond;  // for the acquire thread
274    Condition mReleaseCond;  // for the release thread
275
276    bool mAskToStart;      // Asks the threads to start
277    bool mAskToStop;       // Asks the threads to stop
278    bool mAcquireStopped;  // The acquire thread has stopped
279    bool mReleaseStopped;  // The release thread has stopped
280};
281
282VideoEditorVideoEncoderPuller::VideoEditorVideoEncoderPuller(
283    sp<MediaSource> source) {
284    mSource = source;
285    mAskToStart = false;
286    mAskToStop = false;
287    mAcquireStopped = false;
288    mReleaseStopped = false;
289    androidCreateThread(acquireThreadStart, this);
290    androidCreateThread(releaseThreadStart, this);
291}
292
293VideoEditorVideoEncoderPuller::~VideoEditorVideoEncoderPuller() {
294    stop();
295}
296
297void VideoEditorVideoEncoderPuller::start() {
298    Mutex::Autolock autolock(mLock);
299    mAskToStart = true;
300    mAcquireCond.signal();
301    mReleaseCond.signal();
302}
303
304void VideoEditorVideoEncoderPuller::stop() {
305    Mutex::Autolock autolock(mLock);
306    mAskToStop = true;
307    mAcquireCond.signal();
308    mReleaseCond.signal();
309    while (!mAcquireStopped || !mReleaseStopped) {
310        mUserCond.wait(mLock);
311    }
312
313    // Release remaining buffers
314    for (size_t i = 0; i < mBuffers.size(); i++) {
315        mBuffers.itemAt(i)->release();
316    }
317
318    for (size_t i = 0; i < mReleaseBuffers.size(); i++) {
319        mReleaseBuffers.itemAt(i)->release();
320    }
321
322    mBuffers.clear();
323    mReleaseBuffers.clear();
324}
325
326MediaBuffer* VideoEditorVideoEncoderPuller::getBufferNonBlocking() {
327    Mutex::Autolock autolock(mLock);
328    if (mBuffers.empty()) {
329        return NULL;
330    } else {
331        MediaBuffer* b = mBuffers.itemAt(0);
332        mBuffers.removeAt(0);
333        return b;
334    }
335}
336
337MediaBuffer* VideoEditorVideoEncoderPuller::getBufferBlocking() {
338    Mutex::Autolock autolock(mLock);
339    while (mBuffers.empty() && !mAcquireStopped) {
340        mUserCond.wait(mLock);
341    }
342
343    if (mBuffers.empty()) {
344        return NULL;
345    } else {
346        MediaBuffer* b = mBuffers.itemAt(0);
347        mBuffers.removeAt(0);
348        return b;
349    }
350}
351
352void VideoEditorVideoEncoderPuller::putBuffer(MediaBuffer* buffer) {
353    Mutex::Autolock autolock(mLock);
354    mReleaseBuffers.push(buffer);
355    mReleaseCond.signal();
356}
357
358int VideoEditorVideoEncoderPuller::acquireThreadStart(void* arg) {
359    VideoEditorVideoEncoderPuller* self = (VideoEditorVideoEncoderPuller*)arg;
360    self->acquireThreadFunc();
361    return 0;
362}
363
364int VideoEditorVideoEncoderPuller::releaseThreadStart(void* arg) {
365    VideoEditorVideoEncoderPuller* self = (VideoEditorVideoEncoderPuller*)arg;
366    self->releaseThreadFunc();
367    return 0;
368}
369
370void VideoEditorVideoEncoderPuller::acquireThreadFunc() {
371    mLock.lock();
372
373    // Wait for the start signal
374    while (!mAskToStart && !mAskToStop) {
375        mAcquireCond.wait(mLock);
376    }
377
378    // Loop until we are asked to stop, or there is nothing more to read
379    while (!mAskToStop) {
380        MediaBuffer* pBuffer;
381        mLock.unlock();
382        status_t result = mSource->read(&pBuffer, NULL);
383        mLock.lock();
384        if (result != OK) {
385            break;
386        }
387        mBuffers.push(pBuffer);
388        mUserCond.signal();
389    }
390
391    mAcquireStopped = true;
392    mUserCond.signal();
393    mLock.unlock();
394}
395
396void VideoEditorVideoEncoderPuller::releaseThreadFunc() {
397    mLock.lock();
398
399    // Wait for the start signal
400    while (!mAskToStart && !mAskToStop) {
401        mReleaseCond.wait(mLock);
402    }
403
404    // Loop until we are asked to stop
405    while (1) {
406        if (mReleaseBuffers.empty()) {
407            if (mAskToStop) {
408                break;
409            } else {
410                mReleaseCond.wait(mLock);
411                continue;
412            }
413        }
414        MediaBuffer* pBuffer = mReleaseBuffers.itemAt(0);
415        mReleaseBuffers.removeAt(0);
416        mLock.unlock();
417        pBuffer->release();
418        mLock.lock();
419    }
420
421    mReleaseStopped = true;
422    mUserCond.signal();
423    mLock.unlock();
424}
425
426/**
427 ******************************************************************************
428 * structure VideoEditorVideoEncoder_Context
429 * @brief    This structure defines the context of the StageFright video encoder
430 *           shell
431 ******************************************************************************
432*/
433typedef enum {
434    CREATED   = 0x1,
435    OPENED    = 0x2,
436    STARTED   = 0x4,
437    BUFFERING = 0x8,
438    READING   = 0x10
439} VideoEditorVideoEncoder_State;
440
441typedef struct {
442    VideoEditorVideoEncoder_State     mState;
443    M4ENCODER_Format                  mFormat;
444    M4WRITER_DataInterface*           mWriterDataInterface;
445    M4VPP_apply_fct*                  mPreProcFunction;
446    M4VPP_Context                     mPreProcContext;
447    M4SYS_AccessUnit*                 mAccessUnit;
448    M4ENCODER_Params*                 mCodecParams;
449    M4ENCODER_Header                  mHeader;
450    H264MCS_ProcessEncodedNALU_fct*   mH264NALUPostProcessFct;
451    M4OSA_Context                     mH264NALUPostProcessCtx;
452    M4OSA_UInt32                      mLastCTS;
453    sp<VideoEditorVideoEncoderSource> mEncoderSource;
454    OMXClient                         mClient;
455    sp<MediaSource>                   mEncoder;
456    OMX_COLOR_FORMATTYPE              mEncoderColorFormat;
457    VideoEditorVideoEncoderPuller*    mPuller;
458    I420ColorConverter*               mI420ColorConverter;
459
460    uint32_t                          mNbInputFrames;
461    double                            mFirstInputCts;
462    double                            mLastInputCts;
463    uint32_t                          mNbOutputFrames;
464    int64_t                           mFirstOutputCts;
465    int64_t                           mLastOutputCts;
466
467} VideoEditorVideoEncoder_Context;
468
469/********************
470 *      TOOLS       *
471 ********************/
472
473M4OSA_ERR VideoEditorVideoEncoder_getDSI(M4ENCODER_Context pContext,
474        sp<MetaData> metaData) {
475    M4OSA_ERR err = M4NO_ERROR;
476    VideoEditorVideoEncoder_Context*  pEncoderContext = M4OSA_NULL;
477    status_t result = OK;
478    int32_t nbBuffer = 0;
479    int32_t stride = 0;
480    int32_t height = 0;
481    int32_t framerate = 0;
482    int32_t isCodecConfig = 0;
483    size_t size = 0;
484    uint32_t codecFlags = 0;
485    MediaBuffer* inputBuffer = NULL;
486    MediaBuffer* outputBuffer = NULL;
487    sp<VideoEditorVideoEncoderSource> encoderSource = NULL;
488    sp<MediaSource> encoder = NULL;;
489    OMXClient client;
490
491    LOGV("VideoEditorVideoEncoder_getDSI begin");
492    // Input parameters check
493    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext,       M4ERR_PARAMETER);
494    VIDEOEDITOR_CHECK(M4OSA_NULL != metaData.get(), M4ERR_PARAMETER);
495
496    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
497    VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE);
498
499    // Create the encoder source
500    encoderSource = VideoEditorVideoEncoderSource::Create(metaData);
501    VIDEOEDITOR_CHECK(NULL != encoderSource.get(), M4ERR_STATE);
502
503    // Connect to the OMX client
504    result = client.connect();
505    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
506
507    // Create the OMX codec
508    // VIDEOEDITOR_FORCECODEC MUST be defined here
509    codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC;
510    encoder = OMXCodec::Create(client.interface(), metaData, true,
511        encoderSource, NULL, codecFlags);
512    VIDEOEDITOR_CHECK(NULL != encoder.get(), M4ERR_STATE);
513
514    /**
515     * Send fake frames and retrieve the DSI
516     */
517    // Send a fake frame to the source
518    metaData->findInt32(kKeyStride,     &stride);
519    metaData->findInt32(kKeyHeight,     &height);
520    metaData->findInt32(kKeySampleRate, &framerate);
521    size = (size_t)(stride*height*3)/2;
522    inputBuffer = new MediaBuffer(size);
523    inputBuffer->meta_data()->setInt64(kKeyTime, 0);
524    nbBuffer = encoderSource->storeBuffer(inputBuffer);
525    encoderSource->storeBuffer(NULL); // Signal EOS
526
527    // Call read once to get the DSI
528    result = encoder->start();;
529    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
530    result = encoder->read(&outputBuffer, NULL);
531    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
532    VIDEOEDITOR_CHECK(outputBuffer->meta_data()->findInt32(
533        kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig, M4ERR_STATE);
534
535    VIDEOEDITOR_CHECK(M4OSA_NULL == pEncoderContext->mHeader.pBuf, M4ERR_STATE);
536    if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) {
537        // For H264, format the DSI
538        result = buildAVCCodecSpecificData(
539            (uint8_t**)(&(pEncoderContext->mHeader.pBuf)),
540            (size_t*)(&(pEncoderContext->mHeader.Size)),
541            (const uint8_t*)outputBuffer->data() + outputBuffer->range_offset(),
542            outputBuffer->range_length(), encoder->getFormat().get());
543        outputBuffer->release();
544        VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
545    } else {
546        // For MPEG4, just copy the DSI
547        pEncoderContext->mHeader.Size =
548            (M4OSA_UInt32)outputBuffer->range_length();
549        SAFE_MALLOC(pEncoderContext->mHeader.pBuf, M4OSA_Int8,
550            pEncoderContext->mHeader.Size, "Encoder header");
551        memcpy((void *)pEncoderContext->mHeader.pBuf,
552            (void *)((M4OSA_MemAddr8)(outputBuffer->data())+outputBuffer->range_offset()),
553            pEncoderContext->mHeader.Size);
554        outputBuffer->release();
555    }
556
557    result = encoder->stop();
558    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
559
560cleanUp:
561    // Destroy the graph
562    if ( encoder != NULL ) { encoder.clear(); }
563    client.disconnect();
564    if ( encoderSource != NULL ) { encoderSource.clear(); }
565    if ( M4NO_ERROR == err ) {
566        LOGV("VideoEditorVideoEncoder_getDSI no error");
567    } else {
568        LOGV("VideoEditorVideoEncoder_getDSI ERROR 0x%X", err);
569    }
570    LOGV("VideoEditorVideoEncoder_getDSI end");
571    return err;
572}
573/********************
574 * ENGINE INTERFACE *
575 ********************/
576
577M4OSA_ERR VideoEditorVideoEncoder_cleanup(M4ENCODER_Context pContext) {
578    M4OSA_ERR err = M4NO_ERROR;
579    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
580
581    LOGV("VideoEditorVideoEncoder_cleanup begin");
582    // Input parameters check
583    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
584
585    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
586    VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE);
587
588    // Release memory
589    SAFE_FREE(pEncoderContext->mHeader.pBuf);
590    SAFE_FREE(pEncoderContext);
591    pContext = M4OSA_NULL;
592
593cleanUp:
594    if ( M4NO_ERROR == err ) {
595        LOGV("VideoEditorVideoEncoder_cleanup no error");
596    } else {
597        LOGV("VideoEditorVideoEncoder_cleanup ERROR 0x%X", err);
598    }
599    LOGV("VideoEditorVideoEncoder_cleanup end");
600    return err;
601}
602
603M4OSA_ERR VideoEditorVideoEncoder_init(M4ENCODER_Format format,
604        M4ENCODER_Context* pContext,
605        M4WRITER_DataInterface* pWriterDataInterface,
606        M4VPP_apply_fct* pVPPfct, M4VPP_Context pVPPctxt,
607        M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) {
608
609    M4OSA_ERR err = M4NO_ERROR;
610    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
611    int encoderInput = OMX_COLOR_FormatYUV420Planar;
612
613    LOGV("VideoEditorVideoEncoder_init begin: format  %d", format);
614    // Input parameters check
615    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
616    VIDEOEDITOR_CHECK(M4OSA_NULL != pWriterDataInterface, M4ERR_PARAMETER);
617    VIDEOEDITOR_CHECK(M4OSA_NULL != pVPPfct, M4ERR_PARAMETER);
618    VIDEOEDITOR_CHECK(M4OSA_NULL != pVPPctxt, M4ERR_PARAMETER);
619
620    // Context allocation & initialization
621    SAFE_MALLOC(pEncoderContext, VideoEditorVideoEncoder_Context, 1,
622        "VideoEditorVideoEncoder");
623    pEncoderContext->mState = CREATED;
624    pEncoderContext->mFormat = format;
625    pEncoderContext->mWriterDataInterface = pWriterDataInterface;
626    pEncoderContext->mPreProcFunction = pVPPfct;
627    pEncoderContext->mPreProcContext = pVPPctxt;
628    pEncoderContext->mPuller = NULL;
629
630    // Get color converter and determine encoder input format
631    pEncoderContext->mI420ColorConverter = new I420ColorConverter;
632    if (pEncoderContext->mI420ColorConverter->isLoaded()) {
633        encoderInput = pEncoderContext->mI420ColorConverter->getEncoderInputFormat();
634    }
635    if (encoderInput == OMX_COLOR_FormatYUV420Planar) {
636        delete pEncoderContext->mI420ColorConverter;
637        pEncoderContext->mI420ColorConverter = NULL;
638    }
639    pEncoderContext->mEncoderColorFormat = (OMX_COLOR_FORMATTYPE)encoderInput;
640    LOGI("encoder input format = 0x%X\n", encoderInput);
641
642    *pContext = pEncoderContext;
643
644cleanUp:
645    if ( M4NO_ERROR == err ) {
646        LOGV("VideoEditorVideoEncoder_init no error");
647    } else {
648        VideoEditorVideoEncoder_cleanup(pEncoderContext);
649        *pContext = M4OSA_NULL;
650        LOGV("VideoEditorVideoEncoder_init ERROR 0x%X", err);
651    }
652    LOGV("VideoEditorVideoEncoder_init end");
653    return err;
654}
655
656M4OSA_ERR VideoEditorVideoEncoder_init_H263(M4ENCODER_Context* pContext,
657        M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct,
658        M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData)
659        {
660
661    return VideoEditorVideoEncoder_init(M4ENCODER_kH263, pContext,
662        pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData);
663}
664
665
666M4OSA_ERR VideoEditorVideoEncoder_init_MPEG4(M4ENCODER_Context* pContext,
667        M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct,
668        M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData)
669        {
670
671    return VideoEditorVideoEncoder_init(M4ENCODER_kMPEG4, pContext,
672        pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData);
673}
674
675
676M4OSA_ERR VideoEditorVideoEncoder_init_H264(M4ENCODER_Context* pContext,
677        M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct,
678        M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData)
679        {
680
681    return VideoEditorVideoEncoder_init(M4ENCODER_kH264, pContext,
682        pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData);
683}
684
685M4OSA_ERR VideoEditorVideoEncoder_close(M4ENCODER_Context pContext) {
686    M4OSA_ERR err = M4NO_ERROR;
687    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
688
689    LOGV("VideoEditorVideoEncoder_close begin");
690    // Input parameters check
691    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
692
693    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
694    VIDEOEDITOR_CHECK(OPENED == pEncoderContext->mState, M4ERR_STATE);
695
696    // Release memory
697    SAFE_FREE(pEncoderContext->mCodecParams);
698
699    // Destroy the graph
700    pEncoderContext->mEncoder.clear();
701    pEncoderContext->mClient.disconnect();
702    pEncoderContext->mEncoderSource.clear();
703
704    delete pEncoderContext->mPuller;
705    pEncoderContext->mPuller = NULL;
706
707    delete pEncoderContext->mI420ColorConverter;
708    pEncoderContext->mI420ColorConverter = NULL;
709
710    // Set the new state
711    pEncoderContext->mState = CREATED;
712
713cleanUp:
714    if( M4NO_ERROR == err ) {
715        LOGV("VideoEditorVideoEncoder_close no error");
716    } else {
717        LOGV("VideoEditorVideoEncoder_close ERROR 0x%X", err);
718    }
719    LOGV("VideoEditorVideoEncoder_close end");
720    return err;
721}
722
723
724M4OSA_ERR VideoEditorVideoEncoder_open(M4ENCODER_Context pContext,
725        M4SYS_AccessUnit* pAU, M4OSA_Void* pParams) {
726    M4OSA_ERR err = M4NO_ERROR;
727    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
728    M4ENCODER_Params* pCodecParams = M4OSA_NULL;
729    status_t result = OK;
730    sp<MetaData> encoderMetadata = NULL;
731    const char* mime = NULL;
732    int32_t iProfile = 0;
733    int32_t iFrameRate = 0;
734    uint32_t codecFlags = 0;
735
736    LOGV(">>> VideoEditorVideoEncoder_open begin");
737    // Input parameters check
738    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
739    VIDEOEDITOR_CHECK(M4OSA_NULL != pAU,      M4ERR_PARAMETER);
740    VIDEOEDITOR_CHECK(M4OSA_NULL != pParams,  M4ERR_PARAMETER);
741
742    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
743    pCodecParams = (M4ENCODER_Params*)pParams;
744    VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE);
745
746    // Context initialization
747    pEncoderContext->mAccessUnit = pAU;
748
749    // Allocate & initialize the encoding parameters
750    SAFE_MALLOC(pEncoderContext->mCodecParams, M4ENCODER_Params, 1,
751        "VideoEditorVideoEncoder");
752
753
754    pEncoderContext->mCodecParams->InputFormat = pCodecParams->InputFormat;
755    pEncoderContext->mCodecParams->InputFrameWidth =
756        pCodecParams->InputFrameWidth;
757    pEncoderContext->mCodecParams->InputFrameHeight =
758        pCodecParams->InputFrameHeight;
759    pEncoderContext->mCodecParams->FrameWidth = pCodecParams->FrameWidth;
760    pEncoderContext->mCodecParams->FrameHeight = pCodecParams->FrameHeight;
761    pEncoderContext->mCodecParams->Bitrate = pCodecParams->Bitrate;
762    pEncoderContext->mCodecParams->FrameRate = pCodecParams->FrameRate;
763    pEncoderContext->mCodecParams->Format = pCodecParams->Format;
764
765    // Check output format consistency and resolution
766    VIDEOEDITOR_CHECK(
767        pEncoderContext->mCodecParams->Format == pEncoderContext->mFormat,
768        M4ERR_PARAMETER);
769    VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameWidth  % 16,
770        M4ERR_PARAMETER);
771    VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameHeight % 16,
772        M4ERR_PARAMETER);
773
774    /**
775     * StageFright graph building
776     */
777
778    // Create the meta data for the encoder
779    encoderMetadata = new MetaData;
780    switch( pEncoderContext->mCodecParams->Format ) {
781        case M4ENCODER_kH263:
782            mime     = MEDIA_MIMETYPE_VIDEO_H263;
783            iProfile = OMX_VIDEO_H263ProfileBaseline;
784            break;
785        case M4ENCODER_kMPEG4:
786            mime     = MEDIA_MIMETYPE_VIDEO_MPEG4;
787            iProfile = OMX_VIDEO_MPEG4ProfileSimple;
788            break;
789        case M4ENCODER_kH264:
790            mime     = MEDIA_MIMETYPE_VIDEO_AVC;
791            iProfile = OMX_VIDEO_AVCProfileBaseline;
792            break;
793        default:
794            VIDEOEDITOR_CHECK(!"VideoEncoder_open : incorrect input format",
795                M4ERR_PARAMETER);
796            break;
797    }
798    encoderMetadata->setCString(kKeyMIMEType, mime);
799    encoderMetadata->setInt32(kKeyVideoProfile, iProfile);
800    encoderMetadata->setInt32(kKeyWidth,
801        (int32_t)pEncoderContext->mCodecParams->FrameWidth);
802    encoderMetadata->setInt32(kKeyStride,
803        (int32_t)pEncoderContext->mCodecParams->FrameWidth);
804    encoderMetadata->setInt32(kKeyHeight,
805        (int32_t)pEncoderContext->mCodecParams->FrameHeight);
806    encoderMetadata->setInt32(kKeySliceHeight,
807        (int32_t)pEncoderContext->mCodecParams->FrameHeight);
808
809    switch( pEncoderContext->mCodecParams->FrameRate ) {
810        case M4ENCODER_k5_FPS:    iFrameRate = 5;  break;
811        case M4ENCODER_k7_5_FPS:  iFrameRate = 8;  break;
812        case M4ENCODER_k10_FPS:   iFrameRate = 10; break;
813        case M4ENCODER_k12_5_FPS: iFrameRate = 13; break;
814        case M4ENCODER_k15_FPS:   iFrameRate = 15; break;
815        case M4ENCODER_k20_FPS:   iFrameRate = 20; break;
816        case M4ENCODER_k25_FPS:   iFrameRate = 25; break;
817        case M4ENCODER_k30_FPS:   iFrameRate = 30; break;
818        case M4ENCODER_kVARIABLE_FPS:
819            iFrameRate = 30;
820            LOGI("Frame rate set to M4ENCODER_kVARIABLE_FPS: set to 30");
821          break;
822        case M4ENCODER_kUSE_TIMESCALE:
823            iFrameRate = 30;
824            LOGI("Frame rate set to M4ENCODER_kUSE_TIMESCALE:  set to 30");
825            break;
826
827        default:
828            VIDEOEDITOR_CHECK(!"VideoEncoder_open:incorrect framerate",
829                M4ERR_STATE);
830            break;
831    }
832    encoderMetadata->setInt32(kKeyFrameRate, iFrameRate);
833    encoderMetadata->setInt32(kKeyBitRate,
834        (int32_t)pEncoderContext->mCodecParams->Bitrate);
835    encoderMetadata->setInt32(kKeyIFramesInterval, 1);
836
837    encoderMetadata->setInt32(kKeyColorFormat,
838        pEncoderContext->mEncoderColorFormat);
839
840    // Get the encoder DSI
841    err = VideoEditorVideoEncoder_getDSI(pEncoderContext, encoderMetadata);
842    VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
843
844    // Create the encoder source
845    pEncoderContext->mEncoderSource = VideoEditorVideoEncoderSource::Create(
846        encoderMetadata);
847    VIDEOEDITOR_CHECK(
848        NULL != pEncoderContext->mEncoderSource.get(), M4ERR_STATE);
849
850    // Connect to the OMX client
851    result = pEncoderContext->mClient.connect();
852    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
853
854    // Create the OMX codec
855#ifdef VIDEOEDITOR_FORCECODEC
856    codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC;
857#endif /* VIDEOEDITOR_FORCECODEC */
858    pEncoderContext->mEncoder = OMXCodec::Create(
859        pEncoderContext->mClient.interface(), encoderMetadata, true,
860        pEncoderContext->mEncoderSource, NULL, codecFlags);
861    VIDEOEDITOR_CHECK(NULL != pEncoderContext->mEncoder.get(), M4ERR_STATE);
862    LOGV("VideoEditorVideoEncoder_open : DONE");
863    pEncoderContext->mPuller = new VideoEditorVideoEncoderPuller(
864        pEncoderContext->mEncoder);
865
866    // Set the new state
867    pEncoderContext->mState = OPENED;
868
869cleanUp:
870    if( M4NO_ERROR == err ) {
871        LOGV("VideoEditorVideoEncoder_open no error");
872    } else {
873        VideoEditorVideoEncoder_close(pEncoderContext);
874        LOGV("VideoEditorVideoEncoder_open ERROR 0x%X", err);
875    }
876    LOGV("VideoEditorVideoEncoder_open end");
877    return err;
878}
879
880M4OSA_ERR VideoEditorVideoEncoder_processInputBuffer(
881        M4ENCODER_Context pContext, M4OSA_Double Cts,
882        M4OSA_Bool bReachedEOS) {
883    M4OSA_ERR err = M4NO_ERROR;
884    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
885    M4VIFI_ImagePlane pOutPlane[3];
886    MediaBuffer* buffer = NULL;
887    int32_t nbBuffer = 0;
888
889    LOGV("VideoEditorVideoEncoder_processInputBuffer begin: cts  %f", Cts);
890    // Input parameters check
891    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
892
893    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
894    pOutPlane[0].pac_data = M4OSA_NULL;
895    pOutPlane[1].pac_data = M4OSA_NULL;
896    pOutPlane[2].pac_data = M4OSA_NULL;
897
898    if ( M4OSA_FALSE == bReachedEOS ) {
899        M4OSA_UInt32 sizeY = pEncoderContext->mCodecParams->FrameWidth *
900            pEncoderContext->mCodecParams->FrameHeight;
901        M4OSA_UInt32 sizeU = sizeY >> 2;
902        M4OSA_UInt32 size  = sizeY + 2*sizeU;
903        M4OSA_UInt8* pData = M4OSA_NULL;
904        buffer = new MediaBuffer((size_t)size);
905        pData = (M4OSA_UInt8*)buffer->data() + buffer->range_offset();
906
907        // Prepare the output image for pre-processing
908        pOutPlane[0].u_width   = pEncoderContext->mCodecParams->FrameWidth;
909        pOutPlane[0].u_height  = pEncoderContext->mCodecParams->FrameHeight;
910        pOutPlane[0].u_topleft = 0;
911        pOutPlane[0].u_stride  = pOutPlane[0].u_width;
912        pOutPlane[1].u_width   = pOutPlane[0].u_width/2;
913        pOutPlane[1].u_height  = pOutPlane[0].u_height/2;
914        pOutPlane[1].u_topleft = 0;
915        pOutPlane[1].u_stride  = pOutPlane[0].u_stride/2;
916        pOutPlane[2].u_width   = pOutPlane[1].u_width;
917        pOutPlane[2].u_height  = pOutPlane[1].u_height;
918        pOutPlane[2].u_topleft = 0;
919        pOutPlane[2].u_stride  = pOutPlane[1].u_stride;
920
921        pOutPlane[0].pac_data = pData;
922        pOutPlane[1].pac_data = pData + sizeY;
923        pOutPlane[2].pac_data = pData + sizeY + sizeU;
924
925        // Apply pre-processing
926        err = pEncoderContext->mPreProcFunction(
927            pEncoderContext->mPreProcContext, M4OSA_NULL, pOutPlane);
928        VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
929
930        // Convert MediaBuffer to the encoder input format if necessary
931        if (pEncoderContext->mI420ColorConverter) {
932            I420ColorConverter* converter = pEncoderContext->mI420ColorConverter;
933            int actualWidth = pEncoderContext->mCodecParams->FrameWidth;
934            int actualHeight = pEncoderContext->mCodecParams->FrameHeight;
935
936            int encoderWidth, encoderHeight;
937            ARect encoderRect;
938            int encoderBufferSize;
939
940            if (converter->getEncoderInputBufferInfo(
941                actualWidth, actualHeight,
942                &encoderWidth, &encoderHeight,
943                &encoderRect, &encoderBufferSize) == 0) {
944
945                MediaBuffer* newBuffer = new MediaBuffer(encoderBufferSize);
946
947                if (converter->convertI420ToEncoderInput(
948                    pData,  // srcBits
949                    actualWidth, actualHeight,
950                    encoderWidth, encoderHeight,
951                    encoderRect,
952                    (uint8_t*)newBuffer->data() + newBuffer->range_offset()) < 0) {
953                    LOGE("convertI420ToEncoderInput failed");
954                }
955
956                // switch to new buffer
957                buffer->release();
958                buffer = newBuffer;
959            }
960        }
961
962        // Set the metadata
963        buffer->meta_data()->setInt64(kKeyTime, (int64_t)(Cts*1000));
964    }
965
966    // Push the buffer to the source, a NULL buffer, notifies the source of EOS
967    nbBuffer = pEncoderContext->mEncoderSource->storeBuffer(buffer);
968
969cleanUp:
970    if ( M4NO_ERROR == err ) {
971        LOGV("VideoEditorVideoEncoder_processInputBuffer error 0x%X", err);
972    } else {
973        if( NULL != buffer ) {
974            buffer->release();
975        }
976        LOGV("VideoEditorVideoEncoder_processInputBuffer ERROR 0x%X", err);
977    }
978    LOGV("VideoEditorVideoEncoder_processInputBuffer end");
979    return err;
980}
981
982M4OSA_ERR VideoEditorVideoEncoder_processOutputBuffer(
983        M4ENCODER_Context pContext, MediaBuffer* buffer) {
984    M4OSA_ERR err = M4NO_ERROR;
985    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
986    M4OSA_UInt32 Cts = 0;
987    int32_t i32Tmp = 0;
988    int64_t i64Tmp = 0;
989    status_t result = OK;
990
991    LOGV("VideoEditorVideoEncoder_processOutputBuffer begin");
992    // Input parameters check
993    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
994    VIDEOEDITOR_CHECK(M4OSA_NULL != buffer,   M4ERR_PARAMETER);
995
996    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
997
998    // Process the returned AU
999    if ( 0 == buffer->range_length() ) {
1000        // Encoder has no data yet, nothing unusual
1001        LOGV("VideoEditorVideoEncoder_processOutputBuffer : buffer is empty");
1002        goto cleanUp;
1003    }
1004    VIDEOEDITOR_CHECK(0 == ((M4OSA_UInt32)buffer->data())%4, M4ERR_PARAMETER);
1005    VIDEOEDITOR_CHECK(buffer->meta_data().get(), M4ERR_PARAMETER);
1006    if ( buffer->meta_data()->findInt32(kKeyIsCodecConfig, &i32Tmp) && i32Tmp ){
1007        {   // Display the DSI
1008            LOGV("VideoEditorVideoEncoder_processOutputBuffer DSI %d",
1009                buffer->range_length());
1010            uint8_t* tmp = (uint8_t*)(buffer->data());
1011            for( uint32_t i=0; i<buffer->range_length(); i++ ) {
1012                LOGV("DSI [%d] %.2X", i, tmp[i]);
1013            }
1014        }
1015    } else {
1016        // Check the CTS
1017        VIDEOEDITOR_CHECK(buffer->meta_data()->findInt64(kKeyTime, &i64Tmp),
1018            M4ERR_STATE);
1019
1020        pEncoderContext->mNbOutputFrames++;
1021        if ( 0 > pEncoderContext->mFirstOutputCts ) {
1022            pEncoderContext->mFirstOutputCts = i64Tmp;
1023        }
1024        pEncoderContext->mLastOutputCts = i64Tmp;
1025
1026        Cts = (M4OSA_Int32)(i64Tmp/1000);
1027        LOGV("[TS_CHECK] VI/ENC WRITE frame %d @ %lld -> %d (last %d)",
1028            pEncoderContext->mNbOutputFrames, i64Tmp, Cts,
1029            pEncoderContext->mLastCTS);
1030        if ( Cts < pEncoderContext->mLastCTS ) {
1031            LOGV("VideoEncoder_processOutputBuffer WARNING : Cts is going "
1032            "backwards %d < %d", Cts, pEncoderContext->mLastCTS);
1033            goto cleanUp;
1034        }
1035        LOGV("VideoEditorVideoEncoder_processOutputBuffer : %d %d",
1036            Cts, pEncoderContext->mLastCTS);
1037
1038        // Retrieve the AU container
1039        err = pEncoderContext->mWriterDataInterface->pStartAU(
1040            pEncoderContext->mWriterDataInterface->pWriterContext,
1041            pEncoderContext->mAccessUnit->stream->streamID,
1042            pEncoderContext->mAccessUnit);
1043        VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1044
1045        // Format the AU
1046        VIDEOEDITOR_CHECK(
1047            buffer->range_length() <= pEncoderContext->mAccessUnit->size,
1048            M4ERR_PARAMETER);
1049        // Remove H264 AU start code
1050        if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) {
1051            if (!memcmp((const uint8_t *)buffer->data() + \
1052                    buffer->range_offset(), "\x00\x00\x00\x01", 4) ) {
1053                buffer->set_range(buffer->range_offset() + 4,
1054                    buffer->range_length() - 4);
1055            }
1056        }
1057
1058        if ( (M4ENCODER_kH264 == pEncoderContext->mFormat) &&
1059            (M4OSA_NULL != pEncoderContext->mH264NALUPostProcessFct) ) {
1060        // H264 trimming case, NALU post processing is needed
1061        M4OSA_Int32 outputSize = pEncoderContext->mAccessUnit->size;
1062        err = pEncoderContext->mH264NALUPostProcessFct(
1063            pEncoderContext->mH264NALUPostProcessCtx,
1064            (M4OSA_UInt8*)buffer->data()+buffer->range_offset(),
1065            buffer->range_length(),
1066            (M4OSA_UInt8*)pEncoderContext->mAccessUnit->dataAddress,
1067            &outputSize);
1068        VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1069        pEncoderContext->mAccessUnit->size = (M4OSA_UInt32)outputSize;
1070        } else {
1071            // The AU can just be copied
1072            memcpy((void *)pEncoderContext->mAccessUnit->\
1073                dataAddress, (void *)((M4OSA_MemAddr8)(buffer->data())+buffer->\
1074                range_offset()), buffer->range_length());
1075            pEncoderContext->mAccessUnit->size =
1076                (M4OSA_UInt32)buffer->range_length();
1077        }
1078
1079        if ( buffer->meta_data()->findInt32(kKeyIsSyncFrame,&i32Tmp) && i32Tmp){
1080            pEncoderContext->mAccessUnit->attribute = AU_RAP;
1081        } else {
1082            pEncoderContext->mAccessUnit->attribute = AU_P_Frame;
1083        }
1084        pEncoderContext->mLastCTS = Cts;
1085        pEncoderContext->mAccessUnit->CTS = Cts;
1086        pEncoderContext->mAccessUnit->DTS = Cts;
1087
1088        LOGV("VideoEditorVideoEncoder_processOutputBuffer: AU @ 0x%X 0x%X %d %d",
1089            pEncoderContext->mAccessUnit->dataAddress,
1090            *pEncoderContext->mAccessUnit->dataAddress,
1091            pEncoderContext->mAccessUnit->size,
1092            pEncoderContext->mAccessUnit->CTS);
1093
1094        // Write the AU
1095        err = pEncoderContext->mWriterDataInterface->pProcessAU(
1096            pEncoderContext->mWriterDataInterface->pWriterContext,
1097            pEncoderContext->mAccessUnit->stream->streamID,
1098            pEncoderContext->mAccessUnit);
1099        VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1100    }
1101
1102cleanUp:
1103    if( M4NO_ERROR == err ) {
1104        LOGV("VideoEditorVideoEncoder_processOutputBuffer no error");
1105    } else {
1106        SAFE_FREE(pEncoderContext->mHeader.pBuf);
1107        pEncoderContext->mHeader.Size = 0;
1108        LOGV("VideoEditorVideoEncoder_processOutputBuffer ERROR 0x%X", err);
1109    }
1110    LOGV("VideoEditorVideoEncoder_processOutputBuffer end");
1111    return err;
1112}
1113
1114M4OSA_ERR VideoEditorVideoEncoder_encode(M4ENCODER_Context pContext,
1115        M4VIFI_ImagePlane* pInPlane, M4OSA_Double Cts,
1116        M4ENCODER_FrameMode FrameMode) {
1117    M4OSA_ERR err = M4NO_ERROR;
1118    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
1119    status_t result = OK;
1120    MediaBuffer* outputBuffer = NULL;
1121
1122    LOGV("VideoEditorVideoEncoder_encode 0x%X %f %d", pInPlane, Cts, FrameMode);
1123    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
1124
1125    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
1126    if ( STARTED == pEncoderContext->mState ) {
1127        pEncoderContext->mState = BUFFERING;
1128    }
1129    VIDEOEDITOR_CHECK(
1130        (BUFFERING | READING) & pEncoderContext->mState, M4ERR_STATE);
1131
1132    pEncoderContext->mNbInputFrames++;
1133    if ( 0 > pEncoderContext->mFirstInputCts ) {
1134        pEncoderContext->mFirstInputCts = Cts;
1135    }
1136    pEncoderContext->mLastInputCts = Cts;
1137
1138    LOGV("VideoEditorVideoEncoder_encode 0x%X %d %f (%d)", pInPlane, FrameMode,
1139        Cts, pEncoderContext->mLastCTS);
1140
1141    // Push the input buffer to the encoder source
1142    err = VideoEditorVideoEncoder_processInputBuffer(pEncoderContext, Cts,
1143        M4OSA_FALSE);
1144    VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1145
1146    // Notify the source in case of EOS
1147    if ( M4ENCODER_kLastFrame == FrameMode ) {
1148        err = VideoEditorVideoEncoder_processInputBuffer(
1149            pEncoderContext, 0, M4OSA_TRUE);
1150        VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1151    }
1152
1153    if ( BUFFERING == pEncoderContext->mState ) {
1154        // Prefetch is complete, start reading
1155        pEncoderContext->mState = READING;
1156    }
1157    // Read
1158    while (1)  {
1159        MediaBuffer *outputBuffer =
1160                pEncoderContext->mPuller->getBufferNonBlocking();
1161
1162        if (outputBuffer == NULL) break;
1163
1164        // Provide the encoded AU to the writer
1165        err = VideoEditorVideoEncoder_processOutputBuffer(pEncoderContext,
1166            outputBuffer);
1167        VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1168
1169        pEncoderContext->mPuller->putBuffer(outputBuffer);
1170    }
1171
1172cleanUp:
1173    if( M4NO_ERROR == err ) {
1174        LOGV("VideoEditorVideoEncoder_encode no error");
1175    } else {
1176        LOGV("VideoEditorVideoEncoder_encode ERROR 0x%X", err);
1177    }
1178    LOGV("VideoEditorVideoEncoder_encode end");
1179    return err;
1180}
1181
1182M4OSA_ERR VideoEditorVideoEncoder_start(M4ENCODER_Context pContext) {
1183    M4OSA_ERR                  err             = M4NO_ERROR;
1184    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
1185    status_t                   result          = OK;
1186
1187    LOGV("VideoEditorVideoEncoder_start begin");
1188    // Input parameters check
1189    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
1190
1191    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
1192    VIDEOEDITOR_CHECK(OPENED == pEncoderContext->mState, M4ERR_STATE);
1193
1194    pEncoderContext->mNbInputFrames  = 0;
1195    pEncoderContext->mFirstInputCts  = -1.0;
1196    pEncoderContext->mLastInputCts   = -1.0;
1197    pEncoderContext->mNbOutputFrames = 0;
1198    pEncoderContext->mFirstOutputCts = -1;
1199    pEncoderContext->mLastOutputCts  = -1;
1200
1201    result = pEncoderContext->mEncoder->start();
1202    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
1203
1204    pEncoderContext->mPuller->start();
1205
1206    // Set the new state
1207    pEncoderContext->mState = STARTED;
1208
1209cleanUp:
1210    if ( M4NO_ERROR == err ) {
1211        LOGV("VideoEditorVideoEncoder_start no error");
1212    } else {
1213        LOGV("VideoEditorVideoEncoder_start ERROR 0x%X", err);
1214    }
1215    LOGV("VideoEditorVideoEncoder_start end");
1216    return err;
1217}
1218
1219M4OSA_ERR VideoEditorVideoEncoder_stop(M4ENCODER_Context pContext) {
1220    M4OSA_ERR err = M4NO_ERROR;
1221    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
1222    MediaBuffer* outputBuffer = NULL;
1223    status_t result = OK;
1224
1225    LOGV("VideoEditorVideoEncoder_stop begin");
1226    // Input parameters check
1227    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
1228    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
1229
1230    // Send EOS again to make sure the source doesn't block.
1231    err = VideoEditorVideoEncoder_processInputBuffer(pEncoderContext, 0,
1232        M4OSA_TRUE);
1233    VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1234
1235    // Process the remaining buffers if necessary
1236    if ( (BUFFERING | READING) & pEncoderContext->mState ) {
1237        while (1)  {
1238            MediaBuffer *outputBuffer =
1239                pEncoderContext->mPuller->getBufferBlocking();
1240
1241            if (outputBuffer == NULL) break;
1242
1243            err = VideoEditorVideoEncoder_processOutputBuffer(
1244                pEncoderContext, outputBuffer);
1245            VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1246
1247            pEncoderContext->mPuller->putBuffer(outputBuffer);
1248        }
1249
1250        pEncoderContext->mState = STARTED;
1251    }
1252
1253    // Stop the graph module if necessary
1254    if ( STARTED == pEncoderContext->mState ) {
1255        pEncoderContext->mPuller->stop();
1256        pEncoderContext->mEncoder->stop();
1257        pEncoderContext->mState = OPENED;
1258    }
1259
1260    if (pEncoderContext->mNbInputFrames != pEncoderContext->mNbOutputFrames) {
1261        LOGW("Some frames were not encoded: input(%d) != output(%d)",
1262            pEncoderContext->mNbInputFrames, pEncoderContext->mNbOutputFrames);
1263    }
1264
1265cleanUp:
1266    if ( M4NO_ERROR == err ) {
1267        LOGV("VideoEditorVideoEncoder_stop no error");
1268    } else {
1269        LOGV("VideoEditorVideoEncoder_stop ERROR 0x%X", err);
1270    }
1271    LOGV("VideoEditorVideoEncoder_stop end");
1272    return err;
1273}
1274
1275M4OSA_ERR VideoEditorVideoEncoder_regulBitRate(M4ENCODER_Context pContext) {
1276    LOGW("regulBitRate is not implemented");
1277    return M4NO_ERROR;
1278}
1279
1280M4OSA_ERR VideoEditorVideoEncoder_setOption(M4ENCODER_Context pContext,
1281        M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) {
1282    M4OSA_ERR err = M4NO_ERROR;
1283    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
1284
1285    LOGV("VideoEditorVideoEncoder_setOption start optionID 0x%X", optionID);
1286    // Input parameters check
1287    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
1288
1289    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
1290
1291    switch( optionID ) {
1292        case M4ENCODER_kOptionID_SetH264ProcessNALUfctsPtr:
1293            pEncoderContext->mH264NALUPostProcessFct =
1294                (H264MCS_ProcessEncodedNALU_fct*)optionValue;
1295            break;
1296        case M4ENCODER_kOptionID_H264ProcessNALUContext:
1297            pEncoderContext->mH264NALUPostProcessCtx =
1298                (M4OSA_Context)optionValue;
1299            break;
1300        default:
1301            LOGV("VideoEditorVideoEncoder_setOption: unsupported optionId 0x%X",
1302                optionID);
1303            VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID);
1304            break;
1305    }
1306
1307cleanUp:
1308    if ( M4NO_ERROR == err ) {
1309        LOGV("VideoEditorVideoEncoder_setOption no error");
1310    } else {
1311        LOGV("VideoEditorVideoEncoder_setOption ERROR 0x%X", err);
1312    }
1313    LOGV("VideoEditorVideoEncoder_setOption end");
1314    return err;
1315}
1316
1317M4OSA_ERR VideoEditorVideoEncoder_getOption(M4ENCODER_Context pContext,
1318        M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) {
1319    M4OSA_ERR err = M4NO_ERROR;
1320    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
1321
1322    LOGV("VideoEditorVideoEncoder_getOption begin optinId 0x%X", optionID);
1323    // Input parameters check
1324    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
1325    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
1326
1327    switch( optionID ) {
1328        case M4ENCODER_kOptionID_EncoderHeader:
1329            VIDEOEDITOR_CHECK(
1330                    M4OSA_NULL != pEncoderContext->mHeader.pBuf, M4ERR_STATE);
1331            *(M4ENCODER_Header**)optionValue = &(pEncoderContext->mHeader);
1332            break;
1333        default:
1334            LOGV("VideoEditorVideoEncoder_getOption: unsupported optionId 0x%X",
1335                optionID);
1336            VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID);
1337            break;
1338    }
1339
1340cleanUp:
1341    if ( M4NO_ERROR == err ) {
1342        LOGV("VideoEditorVideoEncoder_getOption no error");
1343    } else {
1344        LOGV("VideoEditorVideoEncoder_getOption ERROR 0x%X", err);
1345    }
1346    return err;
1347}
1348
1349M4OSA_ERR VideoEditorVideoEncoder_getInterface(M4ENCODER_Format format,
1350        M4ENCODER_Format* pFormat,
1351        M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){
1352    M4OSA_ERR err = M4NO_ERROR;
1353
1354    // Input parameters check
1355    VIDEOEDITOR_CHECK(M4OSA_NULL != pFormat,           M4ERR_PARAMETER);
1356    VIDEOEDITOR_CHECK(M4OSA_NULL != pEncoderInterface, M4ERR_PARAMETER);
1357
1358    LOGV("VideoEditorVideoEncoder_getInterface begin 0x%x 0x%x %d", pFormat,
1359        pEncoderInterface, mode);
1360
1361    SAFE_MALLOC(*pEncoderInterface, M4ENCODER_GlobalInterface, 1,
1362        "VideoEditorVideoEncoder");
1363
1364    *pFormat = format;
1365
1366    switch( format ) {
1367        case M4ENCODER_kH263:
1368            {
1369                (*pEncoderInterface)->pFctInit =
1370                    VideoEditorVideoEncoder_init_H263;
1371                break;
1372            }
1373        case M4ENCODER_kMPEG4:
1374            {
1375                (*pEncoderInterface)->pFctInit =
1376                    VideoEditorVideoEncoder_init_MPEG4;
1377                break;
1378            }
1379        case M4ENCODER_kH264:
1380            {
1381                (*pEncoderInterface)->pFctInit =
1382                    VideoEditorVideoEncoder_init_H264;
1383                break;
1384            }
1385        default:
1386            LOGV("VideoEditorVideoEncoder_getInterface : unsupported format %d",
1387                format);
1388            VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER);
1389        break;
1390    }
1391    (*pEncoderInterface)->pFctOpen         = VideoEditorVideoEncoder_open;
1392    (*pEncoderInterface)->pFctStart        = VideoEditorVideoEncoder_start;
1393    (*pEncoderInterface)->pFctStop         = VideoEditorVideoEncoder_stop;
1394    (*pEncoderInterface)->pFctPause        = M4OSA_NULL;
1395    (*pEncoderInterface)->pFctResume       = M4OSA_NULL;
1396    (*pEncoderInterface)->pFctClose        = VideoEditorVideoEncoder_close;
1397    (*pEncoderInterface)->pFctCleanup      = VideoEditorVideoEncoder_cleanup;
1398    (*pEncoderInterface)->pFctRegulBitRate =
1399        VideoEditorVideoEncoder_regulBitRate;
1400    (*pEncoderInterface)->pFctEncode       = VideoEditorVideoEncoder_encode;
1401    (*pEncoderInterface)->pFctSetOption    = VideoEditorVideoEncoder_setOption;
1402    (*pEncoderInterface)->pFctGetOption    = VideoEditorVideoEncoder_getOption;
1403
1404cleanUp:
1405    if( M4NO_ERROR == err ) {
1406        LOGV("VideoEditorVideoEncoder_getInterface no error");
1407    } else {
1408        *pEncoderInterface = M4OSA_NULL;
1409        LOGV("VideoEditorVideoEncoder_getInterface ERROR 0x%X", err);
1410    }
1411    return err;
1412}
1413
1414extern "C" {
1415
1416M4OSA_ERR VideoEditorVideoEncoder_getInterface_H263(M4ENCODER_Format* pFormat,
1417        M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){
1418    return VideoEditorVideoEncoder_getInterface(M4ENCODER_kH263, pFormat,
1419            pEncoderInterface, mode);
1420}
1421
1422M4OSA_ERR VideoEditorVideoEncoder_getInterface_MPEG4(M4ENCODER_Format* pFormat,
1423        M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){
1424    return VideoEditorVideoEncoder_getInterface(M4ENCODER_kMPEG4, pFormat,
1425           pEncoderInterface, mode);
1426}
1427
1428M4OSA_ERR VideoEditorVideoEncoder_getInterface_H264(M4ENCODER_Format* pFormat,
1429        M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){
1430    return VideoEditorVideoEncoder_getInterface(M4ENCODER_kH264, pFormat,
1431           pEncoderInterface, mode);
1432
1433}
1434
1435}  // extern "C"
1436
1437}  // namespace android
1438