VideoEditorVideoEncoder.cpp revision 7efb8efc88ba529c1c57366a305855c2051ebb8b
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16/**
17*************************************************************************
18* @file   VideoEditorVideoEncoder.cpp
19* @brief  StageFright shell video encoder
20*************************************************************************
21*/
22#define LOG_NDEBUG 1
23#define LOG_TAG "VIDEOEDITOR_VIDEOENCODER"
24
25/*******************
26 *     HEADERS     *
27 *******************/
28#include "M4OSA_Debug.h"
29#include "M4SYS_AccessUnit.h"
30#include "VideoEditorVideoEncoder.h"
31#include "VideoEditorUtils.h"
32#include <YV12ColorConverter.h>
33
34#include "utils/Log.h"
35#include "utils/Vector.h"
36#include <media/stagefright/MediaSource.h>
37#include <media/stagefright/MediaDebug.h>
38#include <media/stagefright/MediaDefs.h>
39#include <media/stagefright/MetaData.h>
40#include <media/stagefright/OMXClient.h>
41#include <media/stagefright/OMXCodec.h>
42#include "OMX_Video.h"
43
44/********************
45 *   DEFINITIONS    *
46 ********************/
47
48// Force using hardware encoder
49#define VIDEOEDITOR_FORCECODEC kHardwareCodecsOnly
50
51#if !defined(VIDEOEDITOR_FORCECODEC)
52    #error "Cannot force DSI retrieval if codec type is not fixed"
53#endif
54
55/********************
56 *   SOURCE CLASS   *
57 ********************/
58
59namespace android {
60
61struct VideoEditorVideoEncoderSource : public MediaSource {
62    public:
63        static sp<VideoEditorVideoEncoderSource> Create(
64            const sp<MetaData> &format);
65        virtual status_t start(MetaData *params = NULL);
66        virtual status_t stop();
67        virtual sp<MetaData> getFormat();
68        virtual status_t read(MediaBuffer **buffer,
69            const ReadOptions *options = NULL);
70        virtual int32_t storeBuffer(MediaBuffer *buffer);
71
72    protected:
73        virtual ~VideoEditorVideoEncoderSource();
74
75    private:
76        struct MediaBufferChain {
77            MediaBuffer* buffer;
78            MediaBufferChain* nextLink;
79        };
80        enum State {
81            CREATED,
82            STARTED,
83            ERROR
84        };
85        VideoEditorVideoEncoderSource(const sp<MetaData> &format);
86
87        // Don't call me
88        VideoEditorVideoEncoderSource(const VideoEditorVideoEncoderSource &);
89        VideoEditorVideoEncoderSource &operator=(
90                const VideoEditorVideoEncoderSource &);
91
92        MediaBufferChain* mFirstBufferLink;
93        MediaBufferChain* mLastBufferLink;
94        int32_t           mNbBuffer;
95        bool              mIsEOS;
96        State             mState;
97        sp<MetaData>      mEncFormat;
98        Mutex             mLock;
99        Condition         mBufferCond;
100};
101
102sp<VideoEditorVideoEncoderSource> VideoEditorVideoEncoderSource::Create(
103    const sp<MetaData> &format) {
104
105    sp<VideoEditorVideoEncoderSource> aSource =
106        new VideoEditorVideoEncoderSource(format);
107    return aSource;
108}
109
110VideoEditorVideoEncoderSource::VideoEditorVideoEncoderSource(
111    const sp<MetaData> &format):
112        mFirstBufferLink(NULL),
113        mLastBufferLink(NULL),
114        mNbBuffer(0),
115        mIsEOS(false),
116        mState(CREATED),
117        mEncFormat(format) {
118    LOGV("VideoEditorVideoEncoderSource::VideoEditorVideoEncoderSource");
119}
120
121VideoEditorVideoEncoderSource::~VideoEditorVideoEncoderSource() {
122
123    // Safety clean up
124    if( STARTED == mState ) {
125        stop();
126    }
127}
128
129status_t VideoEditorVideoEncoderSource::start(MetaData *meta) {
130    status_t err = OK;
131
132    LOGV("VideoEditorVideoEncoderSource::start() begin");
133
134    if( CREATED != mState ) {
135        LOGV("VideoEditorVideoEncoderSource::start: invalid state %d", mState);
136        return UNKNOWN_ERROR;
137    }
138    mState = STARTED;
139
140    LOGV("VideoEditorVideoEncoderSource::start() END (0x%x)", err);
141    return err;
142}
143
144status_t VideoEditorVideoEncoderSource::stop() {
145    status_t err = OK;
146
147    LOGV("VideoEditorVideoEncoderSource::stop() begin");
148
149    if( STARTED != mState ) {
150        LOGV("VideoEditorVideoEncoderSource::stop: invalid state %d", mState);
151        return UNKNOWN_ERROR;
152    }
153
154    // Release the buffer chain
155    int32_t i = 0;
156    MediaBufferChain* tmpLink = NULL;
157    while( mFirstBufferLink ) {
158        i++;
159        tmpLink = mFirstBufferLink;
160        mFirstBufferLink = mFirstBufferLink->nextLink;
161        delete tmpLink;
162    }
163    LOGV("VideoEditorVideoEncoderSource::stop : %d buffer remained", i);
164    mFirstBufferLink = NULL;
165    mLastBufferLink = NULL;
166
167    mState = CREATED;
168
169    LOGV("VideoEditorVideoEncoderSource::stop() END (0x%x)", err);
170    return err;
171}
172
173sp<MetaData> VideoEditorVideoEncoderSource::getFormat() {
174
175    LOGV("VideoEditorVideoEncoderSource::getFormat");
176    return mEncFormat;
177}
178
179status_t VideoEditorVideoEncoderSource::read(MediaBuffer **buffer,
180        const ReadOptions *options) {
181    Mutex::Autolock autolock(mLock);
182    MediaSource::ReadOptions readOptions;
183    status_t err = OK;
184    MediaBufferChain* tmpLink = NULL;
185
186    LOGV("VideoEditorVideoEncoderSource::read() begin");
187
188    if ( STARTED != mState ) {
189        LOGV("VideoEditorVideoEncoderSource::read: invalid state %d", mState);
190        return UNKNOWN_ERROR;
191    }
192
193    while (mFirstBufferLink == NULL && !mIsEOS) {
194        mBufferCond.wait(mLock);
195    }
196
197    // End of stream?
198    if (mFirstBufferLink == NULL) {
199        *buffer = NULL;
200        LOGV("VideoEditorVideoEncoderSource::read : EOS");
201        return ERROR_END_OF_STREAM;
202    }
203
204    // Get a buffer from the chain
205    *buffer = mFirstBufferLink->buffer;
206    tmpLink = mFirstBufferLink;
207    mFirstBufferLink = mFirstBufferLink->nextLink;
208
209    if ( NULL == mFirstBufferLink ) {
210        mLastBufferLink = NULL;
211    }
212    delete tmpLink;
213    mNbBuffer--;
214
215    LOGV("VideoEditorVideoEncoderSource::read() END (0x%x)", err);
216    return err;
217}
218
219int32_t VideoEditorVideoEncoderSource::storeBuffer(MediaBuffer *buffer) {
220    Mutex::Autolock autolock(mLock);
221    status_t err = OK;
222
223    LOGV("VideoEditorVideoEncoderSource::storeBuffer() begin");
224
225    if( NULL == buffer ) {
226        LOGV("VideoEditorVideoEncoderSource::storeBuffer : reached EOS");
227        mIsEOS = true;
228    } else {
229        MediaBufferChain* newLink = new MediaBufferChain;
230        newLink->buffer = buffer;
231        newLink->nextLink = NULL;
232        if( NULL != mLastBufferLink ) {
233            mLastBufferLink->nextLink = newLink;
234        } else {
235            mFirstBufferLink = newLink;
236        }
237        mLastBufferLink = newLink;
238        mNbBuffer++;
239    }
240    mBufferCond.signal();
241    LOGV("VideoEditorVideoEncoderSource::storeBuffer() end");
242    return mNbBuffer;
243}
244
245/********************
246 *      PULLER      *
247 ********************/
248
249// Pulls media buffers from a MediaSource repeatedly.
250// The user can then get the buffers from that list.
251class VideoEditorVideoEncoderPuller {
252public:
253    VideoEditorVideoEncoderPuller(sp<MediaSource> source);
254    ~VideoEditorVideoEncoderPuller();
255    void start();
256    void stop();
257    MediaBuffer* getBufferBlocking();
258    MediaBuffer* getBufferNonBlocking();
259    void putBuffer(MediaBuffer* buffer);
260private:
261    static int acquireThreadStart(void* arg);
262    void acquireThreadFunc();
263
264    static int releaseThreadStart(void* arg);
265    void releaseThreadFunc();
266
267    sp<MediaSource> mSource;
268    Vector<MediaBuffer*> mBuffers;
269    Vector<MediaBuffer*> mReleaseBuffers;
270
271    Mutex mLock;
272    Condition mUserCond;     // for the user of this class
273    Condition mAcquireCond;  // for the acquire thread
274    Condition mReleaseCond;  // for the release thread
275
276    bool mAskToStart;      // Asks the threads to start
277    bool mAskToStop;       // Asks the threads to stop
278    bool mAcquireStopped;  // The acquire thread has stopped
279    bool mReleaseStopped;  // The release thread has stopped
280};
281
282VideoEditorVideoEncoderPuller::VideoEditorVideoEncoderPuller(
283    sp<MediaSource> source) {
284    mSource = source;
285    mAskToStart = false;
286    mAskToStop = false;
287    mAcquireStopped = false;
288    mReleaseStopped = false;
289    androidCreateThread(acquireThreadStart, this);
290    androidCreateThread(releaseThreadStart, this);
291}
292
293VideoEditorVideoEncoderPuller::~VideoEditorVideoEncoderPuller() {
294    stop();
295}
296
297void VideoEditorVideoEncoderPuller::start() {
298    Mutex::Autolock autolock(mLock);
299    mAskToStart = true;
300    mAcquireCond.signal();
301    mReleaseCond.signal();
302}
303
304void VideoEditorVideoEncoderPuller::stop() {
305    Mutex::Autolock autolock(mLock);
306    mAskToStop = true;
307    mAcquireCond.signal();
308    mReleaseCond.signal();
309    while (!mAcquireStopped || !mReleaseStopped) {
310        mUserCond.wait(mLock);
311    }
312
313    // Release remaining buffers
314    for (size_t i = 0; i < mBuffers.size(); i++) {
315        mBuffers.itemAt(i)->release();
316    }
317
318    for (size_t i = 0; i < mReleaseBuffers.size(); i++) {
319        mReleaseBuffers.itemAt(i)->release();
320    }
321
322    mBuffers.clear();
323    mReleaseBuffers.clear();
324}
325
326MediaBuffer* VideoEditorVideoEncoderPuller::getBufferNonBlocking() {
327    Mutex::Autolock autolock(mLock);
328    if (mBuffers.empty()) {
329        return NULL;
330    } else {
331        MediaBuffer* b = mBuffers.itemAt(0);
332        mBuffers.removeAt(0);
333        return b;
334    }
335}
336
337MediaBuffer* VideoEditorVideoEncoderPuller::getBufferBlocking() {
338    Mutex::Autolock autolock(mLock);
339    while (mBuffers.empty() && !mAcquireStopped) {
340        mUserCond.wait(mLock);
341    }
342
343    if (mBuffers.empty()) {
344        return NULL;
345    } else {
346        MediaBuffer* b = mBuffers.itemAt(0);
347        mBuffers.removeAt(0);
348        return b;
349    }
350}
351
352void VideoEditorVideoEncoderPuller::putBuffer(MediaBuffer* buffer) {
353    Mutex::Autolock autolock(mLock);
354    mReleaseBuffers.push(buffer);
355    mReleaseCond.signal();
356}
357
358int VideoEditorVideoEncoderPuller::acquireThreadStart(void* arg) {
359    VideoEditorVideoEncoderPuller* self = (VideoEditorVideoEncoderPuller*)arg;
360    self->acquireThreadFunc();
361    return 0;
362}
363
364int VideoEditorVideoEncoderPuller::releaseThreadStart(void* arg) {
365    VideoEditorVideoEncoderPuller* self = (VideoEditorVideoEncoderPuller*)arg;
366    self->releaseThreadFunc();
367    return 0;
368}
369
370void VideoEditorVideoEncoderPuller::acquireThreadFunc() {
371    mLock.lock();
372
373    // Wait for the start signal
374    while (!mAskToStart && !mAskToStop) {
375        mAcquireCond.wait(mLock);
376    }
377
378    // Loop until we are asked to stop, or there is nothing more to read
379    while (!mAskToStop) {
380        MediaBuffer* pBuffer;
381        mLock.unlock();
382        status_t result = mSource->read(&pBuffer, NULL);
383        mLock.lock();
384        if (result != OK) {
385            break;
386        }
387        mBuffers.push(pBuffer);
388        mUserCond.signal();
389    }
390
391    mAcquireStopped = true;
392    mUserCond.signal();
393    mLock.unlock();
394}
395
396void VideoEditorVideoEncoderPuller::releaseThreadFunc() {
397    mLock.lock();
398
399    // Wait for the start signal
400    while (!mAskToStart && !mAskToStop) {
401        mReleaseCond.wait(mLock);
402    }
403
404    // Loop until we are asked to stop
405    while (1) {
406        if (mReleaseBuffers.empty()) {
407            if (mAskToStop) {
408                break;
409            } else {
410                mReleaseCond.wait(mLock);
411                continue;
412            }
413        }
414        MediaBuffer* pBuffer = mReleaseBuffers.itemAt(0);
415        mReleaseBuffers.removeAt(0);
416        mLock.unlock();
417        pBuffer->release();
418        mLock.lock();
419    }
420
421    mReleaseStopped = true;
422    mUserCond.signal();
423    mLock.unlock();
424}
425
426/**
427 ******************************************************************************
428 * structure VideoEditorVideoEncoder_Context
429 * @brief    This structure defines the context of the StageFright video encoder
430 *           shell
431 ******************************************************************************
432*/
433typedef enum {
434    CREATED   = 0x1,
435    OPENED    = 0x2,
436    STARTED   = 0x4,
437    BUFFERING = 0x8,
438    READING   = 0x10
439} VideoEditorVideoEncoder_State;
440
441typedef struct {
442    VideoEditorVideoEncoder_State     mState;
443    M4ENCODER_Format                  mFormat;
444    M4WRITER_DataInterface*           mWriterDataInterface;
445    M4VPP_apply_fct*                  mPreProcFunction;
446    M4VPP_Context                     mPreProcContext;
447    M4SYS_AccessUnit*                 mAccessUnit;
448    M4ENCODER_Params*                 mCodecParams;
449    M4ENCODER_Header                  mHeader;
450    H264MCS_ProcessEncodedNALU_fct*   mH264NALUPostProcessFct;
451    M4OSA_Context                     mH264NALUPostProcessCtx;
452    M4OSA_UInt32                      mLastCTS;
453    sp<VideoEditorVideoEncoderSource> mEncoderSource;
454    OMXClient                         mClient;
455    sp<MediaSource>                   mEncoder;
456    OMX_COLOR_FORMATTYPE              mEncoderColorFormat;
457    VideoEditorVideoEncoderPuller*    mPuller;
458    YV12ColorConverter*               mYV12ColorConverter;
459
460    uint32_t                          mNbInputFrames;
461    double                            mFirstInputCts;
462    double                            mLastInputCts;
463    uint32_t                          mNbOutputFrames;
464    int64_t                           mFirstOutputCts;
465    int64_t                           mLastOutputCts;
466
467} VideoEditorVideoEncoder_Context;
468
469/********************
470 *      TOOLS       *
471 ********************/
472
473M4OSA_ERR VideoEditorVideoEncoder_getDSI(M4ENCODER_Context pContext,
474        sp<MetaData> metaData) {
475    M4OSA_ERR err = M4NO_ERROR;
476    VideoEditorVideoEncoder_Context*  pEncoderContext = M4OSA_NULL;
477    status_t result = OK;
478    int32_t nbBuffer = 0;
479    int32_t stride = 0;
480    int32_t height = 0;
481    int32_t framerate = 0;
482    int32_t isCodecConfig = 0;
483    size_t size = 0;
484    uint32_t codecFlags = 0;
485    MediaBuffer* inputBuffer = NULL;
486    MediaBuffer* outputBuffer = NULL;
487    sp<VideoEditorVideoEncoderSource> encoderSource = NULL;
488    sp<MediaSource> encoder = NULL;;
489    OMXClient client;
490
491    LOGV("VideoEditorVideoEncoder_getDSI begin");
492    // Input parameters check
493    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext,       M4ERR_PARAMETER);
494    VIDEOEDITOR_CHECK(M4OSA_NULL != metaData.get(), M4ERR_PARAMETER);
495
496    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
497    VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE);
498
499    // Create the encoder source
500    encoderSource = VideoEditorVideoEncoderSource::Create(metaData);
501    VIDEOEDITOR_CHECK(NULL != encoderSource.get(), M4ERR_STATE);
502
503    // Connect to the OMX client
504    result = client.connect();
505    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
506
507    // Create the OMX codec
508    // VIDEOEDITOR_FORCECODEC MUST be defined here
509    codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC;
510    encoder = OMXCodec::Create(client.interface(), metaData, true,
511        encoderSource, NULL, codecFlags);
512    VIDEOEDITOR_CHECK(NULL != encoder.get(), M4ERR_STATE);
513
514    /**
515     * Send fake frames and retrieve the DSI
516     */
517    // Send a fake frame to the source
518    metaData->findInt32(kKeyStride,     &stride);
519    metaData->findInt32(kKeyHeight,     &height);
520    metaData->findInt32(kKeySampleRate, &framerate);
521    size = (size_t)(stride*height*3)/2;
522    inputBuffer = new MediaBuffer(size);
523    inputBuffer->meta_data()->setInt64(kKeyTime, 0);
524    nbBuffer = encoderSource->storeBuffer(inputBuffer);
525    encoderSource->storeBuffer(NULL); // Signal EOS
526
527    // Call read once to get the DSI
528    result = encoder->start();;
529    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
530    result = encoder->read(&outputBuffer, NULL);
531    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
532    VIDEOEDITOR_CHECK(outputBuffer->meta_data()->findInt32(
533        kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig, M4ERR_STATE);
534
535    VIDEOEDITOR_CHECK(M4OSA_NULL == pEncoderContext->mHeader.pBuf, M4ERR_STATE);
536    if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) {
537        // For H264, format the DSI
538        result = buildAVCCodecSpecificData(
539            (uint8_t**)(&(pEncoderContext->mHeader.pBuf)),
540            (size_t*)(&(pEncoderContext->mHeader.Size)),
541            (const uint8_t*)outputBuffer->data() + outputBuffer->range_offset(),
542            outputBuffer->range_length(), encoder->getFormat().get());
543        outputBuffer->release();
544        VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
545    } else {
546        // For MPEG4, just copy the DSI
547        pEncoderContext->mHeader.Size =
548            (M4OSA_UInt32)outputBuffer->range_length();
549        SAFE_MALLOC(pEncoderContext->mHeader.pBuf, M4OSA_Int8,
550            pEncoderContext->mHeader.Size, "Encoder header");
551        memcpy((void *)pEncoderContext->mHeader.pBuf,
552            (void *)((M4OSA_MemAddr8)(outputBuffer->data())+outputBuffer->range_offset()),
553            pEncoderContext->mHeader.Size);
554        outputBuffer->release();
555    }
556
557    result = encoder->stop();
558    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
559
560cleanUp:
561    // Destroy the graph
562    if ( encoder != NULL ) { encoder.clear(); }
563    client.disconnect();
564    if ( encoderSource != NULL ) { encoderSource.clear(); }
565    if ( M4NO_ERROR == err ) {
566        LOGV("VideoEditorVideoEncoder_getDSI no error");
567    } else {
568        LOGV("VideoEditorVideoEncoder_getDSI ERROR 0x%X", err);
569    }
570    LOGV("VideoEditorVideoEncoder_getDSI end");
571    return err;
572}
573/********************
574 * ENGINE INTERFACE *
575 ********************/
576
577M4OSA_ERR VideoEditorVideoEncoder_cleanup(M4ENCODER_Context pContext) {
578    M4OSA_ERR err = M4NO_ERROR;
579    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
580
581    LOGV("VideoEditorVideoEncoder_cleanup begin");
582    // Input parameters check
583    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
584
585    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
586    VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE);
587
588    // Release memory
589    SAFE_FREE(pEncoderContext->mHeader.pBuf);
590    SAFE_FREE(pEncoderContext);
591    pContext = M4OSA_NULL;
592
593cleanUp:
594    if ( M4NO_ERROR == err ) {
595        LOGV("VideoEditorVideoEncoder_cleanup no error");
596    } else {
597        LOGV("VideoEditorVideoEncoder_cleanup ERROR 0x%X", err);
598    }
599    LOGV("VideoEditorVideoEncoder_cleanup end");
600    return err;
601}
602
603M4OSA_ERR VideoEditorVideoEncoder_init(M4ENCODER_Format format,
604        M4ENCODER_Context* pContext,
605        M4WRITER_DataInterface* pWriterDataInterface,
606        M4VPP_apply_fct* pVPPfct, M4VPP_Context pVPPctxt,
607        M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) {
608
609    M4OSA_ERR err = M4NO_ERROR;
610    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
611    int encoderInput = OMX_COLOR_FormatYUV420Planar;
612
613    LOGV("VideoEditorVideoEncoder_init begin: format  %d", format);
614    // Input parameters check
615    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
616    VIDEOEDITOR_CHECK(M4OSA_NULL != pWriterDataInterface, M4ERR_PARAMETER);
617    VIDEOEDITOR_CHECK(M4OSA_NULL != pVPPfct, M4ERR_PARAMETER);
618    VIDEOEDITOR_CHECK(M4OSA_NULL != pVPPctxt, M4ERR_PARAMETER);
619
620    // Context allocation & initialization
621    SAFE_MALLOC(pEncoderContext, VideoEditorVideoEncoder_Context, 1,
622        "VideoEditorVideoEncoder");
623    pEncoderContext->mState = CREATED;
624    pEncoderContext->mFormat = format;
625    pEncoderContext->mWriterDataInterface = pWriterDataInterface;
626    pEncoderContext->mPreProcFunction = pVPPfct;
627    pEncoderContext->mPreProcContext = pVPPctxt;
628    pEncoderContext->mPuller = NULL;
629
630    // Get color converter and determine encoder input format
631    pEncoderContext->mYV12ColorConverter = new YV12ColorConverter;
632    if (pEncoderContext->mYV12ColorConverter->isLoaded()) {
633        encoderInput = pEncoderContext->mYV12ColorConverter->getEncoderInputFormat();
634    }
635    if (encoderInput == OMX_COLOR_FormatYUV420Planar) {
636        delete pEncoderContext->mYV12ColorConverter;
637        pEncoderContext->mYV12ColorConverter = NULL;
638    }
639    pEncoderContext->mEncoderColorFormat = (OMX_COLOR_FORMATTYPE)encoderInput;
640    LOGI("encoder input format = 0x%X\n", encoderInput);
641
642    *pContext = pEncoderContext;
643
644cleanUp:
645    if ( M4NO_ERROR == err ) {
646        LOGV("VideoEditorVideoEncoder_init no error");
647    } else {
648        VideoEditorVideoEncoder_cleanup(pEncoderContext);
649        *pContext = M4OSA_NULL;
650        LOGV("VideoEditorVideoEncoder_init ERROR 0x%X", err);
651    }
652    LOGV("VideoEditorVideoEncoder_init end");
653    return err;
654}
655
656M4OSA_ERR VideoEditorVideoEncoder_init_H263(M4ENCODER_Context* pContext,
657        M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct,
658        M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData)
659        {
660
661    return VideoEditorVideoEncoder_init(M4ENCODER_kH263, pContext,
662        pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData);
663}
664
665
666M4OSA_ERR VideoEditorVideoEncoder_init_MPEG4(M4ENCODER_Context* pContext,
667        M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct,
668        M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData)
669        {
670
671    return VideoEditorVideoEncoder_init(M4ENCODER_kMPEG4, pContext,
672        pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData);
673}
674
675
676M4OSA_ERR VideoEditorVideoEncoder_init_H264(M4ENCODER_Context* pContext,
677        M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct,
678        M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData)
679        {
680
681    return VideoEditorVideoEncoder_init(M4ENCODER_kH264, pContext,
682        pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData);
683}
684
685M4OSA_ERR VideoEditorVideoEncoder_close(M4ENCODER_Context pContext) {
686    M4OSA_ERR err = M4NO_ERROR;
687    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
688
689    LOGV("VideoEditorVideoEncoder_close begin");
690    // Input parameters check
691    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
692
693    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
694    VIDEOEDITOR_CHECK(OPENED == pEncoderContext->mState, M4ERR_STATE);
695
696    // Release memory
697    SAFE_FREE(pEncoderContext->mCodecParams);
698
699    // Destroy the graph
700    pEncoderContext->mEncoder.clear();
701    pEncoderContext->mClient.disconnect();
702    pEncoderContext->mEncoderSource.clear();
703
704    delete pEncoderContext->mPuller;
705    pEncoderContext->mPuller = NULL;
706
707    delete pEncoderContext->mYV12ColorConverter;
708    pEncoderContext->mYV12ColorConverter = NULL;
709
710    // Set the new state
711    pEncoderContext->mState = CREATED;
712
713cleanUp:
714    if( M4NO_ERROR == err ) {
715        LOGV("VideoEditorVideoEncoder_close no error");
716    } else {
717        LOGV("VideoEditorVideoEncoder_close ERROR 0x%X", err);
718    }
719    LOGV("VideoEditorVideoEncoder_close end");
720    return err;
721}
722
723
724M4OSA_ERR VideoEditorVideoEncoder_open(M4ENCODER_Context pContext,
725        M4SYS_AccessUnit* pAU, M4OSA_Void* pParams) {
726    M4OSA_ERR err = M4NO_ERROR;
727    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
728    M4ENCODER_Params* pCodecParams = M4OSA_NULL;
729    status_t result = OK;
730    sp<MetaData> encoderMetadata = NULL;
731    const char* mime = NULL;
732    int32_t iProfile = 0;
733    int32_t iFrameRate = 0;
734    uint32_t codecFlags = 0;
735
736    LOGV(">>> VideoEditorVideoEncoder_open begin");
737    // Input parameters check
738    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
739    VIDEOEDITOR_CHECK(M4OSA_NULL != pAU,      M4ERR_PARAMETER);
740    VIDEOEDITOR_CHECK(M4OSA_NULL != pParams,  M4ERR_PARAMETER);
741
742    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
743    pCodecParams = (M4ENCODER_Params*)pParams;
744    VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE);
745
746    // Context initialization
747    pEncoderContext->mAccessUnit = pAU;
748
749    // Allocate & initialize the encoding parameters
750    SAFE_MALLOC(pEncoderContext->mCodecParams, M4ENCODER_Params, 1,
751        "VideoEditorVideoEncoder");
752
753
754    pEncoderContext->mCodecParams->InputFormat = pCodecParams->InputFormat;
755    pEncoderContext->mCodecParams->InputFrameWidth =
756        pCodecParams->InputFrameWidth;
757    pEncoderContext->mCodecParams->InputFrameHeight =
758        pCodecParams->InputFrameHeight;
759    pEncoderContext->mCodecParams->FrameWidth = pCodecParams->FrameWidth;
760    pEncoderContext->mCodecParams->FrameHeight = pCodecParams->FrameHeight;
761    pEncoderContext->mCodecParams->Bitrate = pCodecParams->Bitrate;
762    pEncoderContext->mCodecParams->FrameRate = pCodecParams->FrameRate;
763    pEncoderContext->mCodecParams->Format = pCodecParams->Format;
764
765    // Check output format consistency and resolution
766    VIDEOEDITOR_CHECK(
767        pEncoderContext->mCodecParams->Format == pEncoderContext->mFormat,
768        M4ERR_PARAMETER);
769    VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameWidth  % 16,
770        M4ERR_PARAMETER);
771    VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameHeight % 16,
772        M4ERR_PARAMETER);
773
774    /**
775     * StageFright graph building
776     */
777
778    // Create the meta data for the encoder
779    encoderMetadata = new MetaData;
780    switch( pEncoderContext->mCodecParams->Format ) {
781        case M4ENCODER_kH263:
782            mime     = MEDIA_MIMETYPE_VIDEO_H263;
783            iProfile = OMX_VIDEO_H263ProfileBaseline;
784            break;
785        case M4ENCODER_kMPEG4:
786            mime     = MEDIA_MIMETYPE_VIDEO_MPEG4;
787            iProfile = OMX_VIDEO_MPEG4ProfileSimple;
788            break;
789        case M4ENCODER_kH264:
790            mime     = MEDIA_MIMETYPE_VIDEO_AVC;
791            iProfile = OMX_VIDEO_AVCProfileBaseline;
792            break;
793        default:
794            VIDEOEDITOR_CHECK(!"VideoEncoder_open : incorrect input format",
795                M4ERR_PARAMETER);
796            break;
797    }
798    encoderMetadata->setCString(kKeyMIMEType, mime);
799    encoderMetadata->setInt32(kKeyVideoProfile, iProfile);
800    encoderMetadata->setInt32(kKeyWidth,
801        (int32_t)pEncoderContext->mCodecParams->FrameWidth);
802    encoderMetadata->setInt32(kKeyStride,
803        (int32_t)pEncoderContext->mCodecParams->FrameWidth);
804    encoderMetadata->setInt32(kKeyHeight,
805        (int32_t)pEncoderContext->mCodecParams->FrameHeight);
806    encoderMetadata->setInt32(kKeySliceHeight,
807        (int32_t)pEncoderContext->mCodecParams->FrameHeight);
808
809    switch( pEncoderContext->mCodecParams->FrameRate ) {
810        case M4ENCODER_k5_FPS:    iFrameRate = 5;  break;
811        case M4ENCODER_k7_5_FPS:  iFrameRate = 8;  break;
812        case M4ENCODER_k10_FPS:   iFrameRate = 10; break;
813        case M4ENCODER_k12_5_FPS: iFrameRate = 13; break;
814        case M4ENCODER_k15_FPS:   iFrameRate = 15; break;
815        case M4ENCODER_k20_FPS:   iFrameRate = 20; break;
816        case M4ENCODER_k25_FPS:   iFrameRate = 25; break;
817        case M4ENCODER_k30_FPS:   iFrameRate = 30; break;
818        case M4ENCODER_kVARIABLE_FPS:
819            iFrameRate = 30;
820            LOGI("Frame rate set to M4ENCODER_kVARIABLE_FPS: set to 30");
821          break;
822        case M4ENCODER_kUSE_TIMESCALE:
823            iFrameRate = 30;
824            LOGI("Frame rate set to M4ENCODER_kUSE_TIMESCALE:  set to 30");
825            break;
826
827        default:
828            VIDEOEDITOR_CHECK(!"VideoEncoder_open:incorrect framerate",
829                M4ERR_STATE);
830            break;
831    }
832    encoderMetadata->setInt32(kKeyFrameRate, iFrameRate);
833    encoderMetadata->setInt32(kKeyBitRate,
834        (int32_t)pEncoderContext->mCodecParams->Bitrate);
835    encoderMetadata->setInt32(kKeyIFramesInterval, 1);
836
837    encoderMetadata->setInt32(kKeyColorFormat,
838        pEncoderContext->mEncoderColorFormat);
839
840    // Get the encoder DSI
841    err = VideoEditorVideoEncoder_getDSI(pEncoderContext, encoderMetadata);
842    VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
843
844    // Create the encoder source
845    pEncoderContext->mEncoderSource = VideoEditorVideoEncoderSource::Create(
846        encoderMetadata);
847    VIDEOEDITOR_CHECK(
848        NULL != pEncoderContext->mEncoderSource.get(), M4ERR_STATE);
849
850    // Connect to the OMX client
851    result = pEncoderContext->mClient.connect();
852    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
853
854    // Create the OMX codec
855#ifdef VIDEOEDITOR_FORCECODEC
856    codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC;
857#endif /* VIDEOEDITOR_FORCECODEC */
858    pEncoderContext->mEncoder = OMXCodec::Create(
859        pEncoderContext->mClient.interface(), encoderMetadata, true,
860        pEncoderContext->mEncoderSource, NULL, codecFlags);
861    VIDEOEDITOR_CHECK(NULL != pEncoderContext->mEncoder.get(), M4ERR_STATE);
862    LOGV("VideoEditorVideoEncoder_open : DONE");
863    pEncoderContext->mPuller = new VideoEditorVideoEncoderPuller(
864        pEncoderContext->mEncoder);
865
866    // Set the new state
867    pEncoderContext->mState = OPENED;
868
869cleanUp:
870    if( M4NO_ERROR == err ) {
871        LOGV("VideoEditorVideoEncoder_open no error");
872    } else {
873        VideoEditorVideoEncoder_close(pEncoderContext);
874        LOGV("VideoEditorVideoEncoder_open ERROR 0x%X", err);
875    }
876    LOGV("VideoEditorVideoEncoder_open end");
877    return err;
878}
879
880M4OSA_ERR VideoEditorVideoEncoder_processInputBuffer(
881        M4ENCODER_Context pContext, M4OSA_Double Cts,
882        M4OSA_Bool bReachedEOS) {
883    M4OSA_ERR err = M4NO_ERROR;
884    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
885    M4VIFI_ImagePlane pOutPlane[3];
886    MediaBuffer* buffer = NULL;
887    int32_t nbBuffer = 0;
888
889    LOGV("VideoEditorVideoEncoder_processInputBuffer begin: cts  %f", Cts);
890    // Input parameters check
891    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
892
893    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
894    pOutPlane[0].pac_data = M4OSA_NULL;
895    pOutPlane[1].pac_data = M4OSA_NULL;
896    pOutPlane[2].pac_data = M4OSA_NULL;
897
898    if ( M4OSA_FALSE == bReachedEOS ) {
899        M4OSA_UInt32 sizeY = pEncoderContext->mCodecParams->FrameWidth *
900            pEncoderContext->mCodecParams->FrameHeight;
901        M4OSA_UInt32 sizeU = sizeY >> 2;
902        M4OSA_UInt32 size  = sizeY + 2*sizeU;
903        M4OSA_UInt8* pData = M4OSA_NULL;
904        buffer = new MediaBuffer((size_t)size);
905        pData = (M4OSA_UInt8*)buffer->data() + buffer->range_offset();
906
907        // Prepare the output image for pre-processing
908        pOutPlane[0].u_width   = pEncoderContext->mCodecParams->FrameWidth;
909        pOutPlane[0].u_height  = pEncoderContext->mCodecParams->FrameHeight;
910        pOutPlane[0].u_topleft = 0;
911        pOutPlane[0].u_stride  = pOutPlane[0].u_width;
912        pOutPlane[1].u_width   = pOutPlane[0].u_width/2;
913        pOutPlane[1].u_height  = pOutPlane[0].u_height/2;
914        pOutPlane[1].u_topleft = 0;
915        pOutPlane[1].u_stride  = pOutPlane[0].u_stride/2;
916        pOutPlane[2].u_width   = pOutPlane[1].u_width;
917        pOutPlane[2].u_height  = pOutPlane[1].u_height;
918        pOutPlane[2].u_topleft = 0;
919        pOutPlane[2].u_stride  = pOutPlane[1].u_stride;
920
921        switch( pEncoderContext->mEncoderColorFormat ) {
922            case OMX_COLOR_FormatYUV420Planar:
923                pOutPlane[0].pac_data = pData;
924                pOutPlane[1].pac_data = pData + sizeY;
925                pOutPlane[2].pac_data = pData + sizeY + sizeU;
926            break;
927            case OMX_COLOR_FormatYUV420SemiPlanar:
928                pOutPlane[0].pac_data = pData;
929                SAFE_MALLOC(pOutPlane[1].pac_data, M4VIFI_UInt8,
930                    pOutPlane[1].u_height*pOutPlane[1].u_stride,"OutputPlaneU");
931                SAFE_MALLOC(pOutPlane[2].pac_data, M4VIFI_UInt8,
932                    pOutPlane[2].u_height*pOutPlane[2].u_stride,"OutputPlaneV");
933            break;
934            default:
935                LOGV("VideoEditorVideoEncoder_processInputBuffer : unsupported "
936                    "color format 0x%X", pEncoderContext->mEncoderColorFormat);
937                VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER);
938            break;
939        }
940
941        // Apply pre-processing
942        err = pEncoderContext->mPreProcFunction(
943            pEncoderContext->mPreProcContext, M4OSA_NULL, pOutPlane);
944        VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
945
946        // Convert MediaBuffer to the encoder input format if necessary
947        if (pEncoderContext->mYV12ColorConverter) {
948            YV12ColorConverter* converter = pEncoderContext->mYV12ColorConverter;
949            int actualWidth = pEncoderContext->mCodecParams->FrameWidth;
950            int actualHeight = pEncoderContext->mCodecParams->FrameHeight;
951
952            int encoderWidth, encoderHeight;
953            ARect encoderRect;
954            int encoderBufferSize;
955
956            if (converter->getEncoderInputBufferInfo(
957                actualWidth, actualHeight,
958                &encoderWidth, &encoderHeight,
959                &encoderRect, &encoderBufferSize) == 0) {
960
961                MediaBuffer* newBuffer = new MediaBuffer(encoderBufferSize);
962
963                if (converter->convertYV12ToEncoderInput(
964                    pData,  // srcBits
965                    actualWidth, actualHeight,
966                    encoderWidth, encoderHeight,
967                    encoderRect,
968                    (uint8_t*)newBuffer->data() + newBuffer->range_offset()) < 0) {
969                    LOGE("convertYV12ToEncoderInput failed");
970                }
971
972                // switch to new buffer
973                buffer->release();
974                buffer = newBuffer;
975            }
976        }
977
978        // Set the metadata
979        buffer->meta_data()->setInt64(kKeyTime, (int64_t)(Cts*1000));
980    }
981
982    // Push the buffer to the source, a NULL buffer, notifies the source of EOS
983    nbBuffer = pEncoderContext->mEncoderSource->storeBuffer(buffer);
984
985cleanUp:
986    if ( OMX_COLOR_FormatYUV420SemiPlanar == \
987            pEncoderContext->mEncoderColorFormat ) {
988        // Y plane has not been allocated
989        if ( pOutPlane[1].pac_data ) {
990            SAFE_FREE(pOutPlane[1].pac_data);
991        }
992        if ( pOutPlane[2].pac_data ) {
993            SAFE_FREE(pOutPlane[2].pac_data);
994        }
995    }
996    if ( M4NO_ERROR == err ) {
997        LOGV("VideoEditorVideoEncoder_processInputBuffer error 0x%X", err);
998    } else {
999        if( NULL != buffer ) {
1000            buffer->release();
1001        }
1002        LOGV("VideoEditorVideoEncoder_processInputBuffer ERROR 0x%X", err);
1003    }
1004    LOGV("VideoEditorVideoEncoder_processInputBuffer end");
1005    return err;
1006}
1007
1008M4OSA_ERR VideoEditorVideoEncoder_processOutputBuffer(
1009        M4ENCODER_Context pContext, MediaBuffer* buffer) {
1010    M4OSA_ERR err = M4NO_ERROR;
1011    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
1012    M4OSA_UInt32 Cts = 0;
1013    int32_t i32Tmp = 0;
1014    int64_t i64Tmp = 0;
1015    status_t result = OK;
1016
1017    LOGV("VideoEditorVideoEncoder_processOutputBuffer begin");
1018    // Input parameters check
1019    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
1020    VIDEOEDITOR_CHECK(M4OSA_NULL != buffer,   M4ERR_PARAMETER);
1021
1022    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
1023
1024    // Process the returned AU
1025    if ( 0 == buffer->range_length() ) {
1026        // Encoder has no data yet, nothing unusual
1027        LOGV("VideoEditorVideoEncoder_processOutputBuffer : buffer is empty");
1028        goto cleanUp;
1029    }
1030    VIDEOEDITOR_CHECK(0 == ((M4OSA_UInt32)buffer->data())%4, M4ERR_PARAMETER);
1031    VIDEOEDITOR_CHECK(buffer->meta_data().get(), M4ERR_PARAMETER);
1032    if ( buffer->meta_data()->findInt32(kKeyIsCodecConfig, &i32Tmp) && i32Tmp ){
1033        {   // Display the DSI
1034            LOGV("VideoEditorVideoEncoder_processOutputBuffer DSI %d",
1035                buffer->range_length());
1036            uint8_t* tmp = (uint8_t*)(buffer->data());
1037            for( uint32_t i=0; i<buffer->range_length(); i++ ) {
1038                LOGV("DSI [%d] %.2X", i, tmp[i]);
1039            }
1040        }
1041    } else {
1042        // Check the CTS
1043        VIDEOEDITOR_CHECK(buffer->meta_data()->findInt64(kKeyTime, &i64Tmp),
1044            M4ERR_STATE);
1045
1046        pEncoderContext->mNbOutputFrames++;
1047        if ( 0 > pEncoderContext->mFirstOutputCts ) {
1048            pEncoderContext->mFirstOutputCts = i64Tmp;
1049        }
1050        pEncoderContext->mLastOutputCts = i64Tmp;
1051
1052        Cts = (M4OSA_Int32)(i64Tmp/1000);
1053        LOGV("[TS_CHECK] VI/ENC WRITE frame %d @ %lld -> %d (last %d)",
1054            pEncoderContext->mNbOutputFrames, i64Tmp, Cts,
1055            pEncoderContext->mLastCTS);
1056        if ( Cts < pEncoderContext->mLastCTS ) {
1057            LOGV("VideoEncoder_processOutputBuffer WARNING : Cts is going "
1058            "backwards %d < %d", Cts, pEncoderContext->mLastCTS);
1059            goto cleanUp;
1060        }
1061        LOGV("VideoEditorVideoEncoder_processOutputBuffer : %d %d",
1062            Cts, pEncoderContext->mLastCTS);
1063
1064        // Retrieve the AU container
1065        err = pEncoderContext->mWriterDataInterface->pStartAU(
1066            pEncoderContext->mWriterDataInterface->pWriterContext,
1067            pEncoderContext->mAccessUnit->stream->streamID,
1068            pEncoderContext->mAccessUnit);
1069        VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1070
1071        // Format the AU
1072        VIDEOEDITOR_CHECK(
1073            buffer->range_length() <= pEncoderContext->mAccessUnit->size,
1074            M4ERR_PARAMETER);
1075        // Remove H264 AU start code
1076        if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) {
1077            if (!memcmp((const uint8_t *)buffer->data() + \
1078                    buffer->range_offset(), "\x00\x00\x00\x01", 4) ) {
1079                buffer->set_range(buffer->range_offset() + 4,
1080                    buffer->range_length() - 4);
1081            }
1082        }
1083
1084        if ( (M4ENCODER_kH264 == pEncoderContext->mFormat) &&
1085            (M4OSA_NULL != pEncoderContext->mH264NALUPostProcessFct) ) {
1086        // H264 trimming case, NALU post processing is needed
1087        M4OSA_Int32 outputSize = pEncoderContext->mAccessUnit->size;
1088        err = pEncoderContext->mH264NALUPostProcessFct(
1089            pEncoderContext->mH264NALUPostProcessCtx,
1090            (M4OSA_UInt8*)buffer->data()+buffer->range_offset(),
1091            buffer->range_length(),
1092            (M4OSA_UInt8*)pEncoderContext->mAccessUnit->dataAddress,
1093            &outputSize);
1094        VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1095        pEncoderContext->mAccessUnit->size = (M4OSA_UInt32)outputSize;
1096        } else {
1097            // The AU can just be copied
1098            memcpy((void *)pEncoderContext->mAccessUnit->\
1099                dataAddress, (void *)((M4OSA_MemAddr8)(buffer->data())+buffer->\
1100                range_offset()), buffer->range_length());
1101            pEncoderContext->mAccessUnit->size =
1102                (M4OSA_UInt32)buffer->range_length();
1103        }
1104
1105        if ( buffer->meta_data()->findInt32(kKeyIsSyncFrame,&i32Tmp) && i32Tmp){
1106            pEncoderContext->mAccessUnit->attribute = AU_RAP;
1107        } else {
1108            pEncoderContext->mAccessUnit->attribute = AU_P_Frame;
1109        }
1110        pEncoderContext->mLastCTS = Cts;
1111        pEncoderContext->mAccessUnit->CTS = Cts;
1112        pEncoderContext->mAccessUnit->DTS = Cts;
1113
1114        LOGV("VideoEditorVideoEncoder_processOutputBuffer: AU @ 0x%X 0x%X %d %d",
1115            pEncoderContext->mAccessUnit->dataAddress,
1116            *pEncoderContext->mAccessUnit->dataAddress,
1117            pEncoderContext->mAccessUnit->size,
1118            pEncoderContext->mAccessUnit->CTS);
1119
1120        // Write the AU
1121        err = pEncoderContext->mWriterDataInterface->pProcessAU(
1122            pEncoderContext->mWriterDataInterface->pWriterContext,
1123            pEncoderContext->mAccessUnit->stream->streamID,
1124            pEncoderContext->mAccessUnit);
1125        VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1126    }
1127
1128cleanUp:
1129    if( M4NO_ERROR == err ) {
1130        LOGV("VideoEditorVideoEncoder_processOutputBuffer no error");
1131    } else {
1132        SAFE_FREE(pEncoderContext->mHeader.pBuf);
1133        pEncoderContext->mHeader.Size = 0;
1134        LOGV("VideoEditorVideoEncoder_processOutputBuffer ERROR 0x%X", err);
1135    }
1136    LOGV("VideoEditorVideoEncoder_processOutputBuffer end");
1137    return err;
1138}
1139
1140M4OSA_ERR VideoEditorVideoEncoder_encode(M4ENCODER_Context pContext,
1141        M4VIFI_ImagePlane* pInPlane, M4OSA_Double Cts,
1142        M4ENCODER_FrameMode FrameMode) {
1143    M4OSA_ERR err = M4NO_ERROR;
1144    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
1145    status_t result = OK;
1146    MediaBuffer* outputBuffer = NULL;
1147
1148    LOGV("VideoEditorVideoEncoder_encode 0x%X %f %d", pInPlane, Cts, FrameMode);
1149    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
1150
1151    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
1152    if ( STARTED == pEncoderContext->mState ) {
1153        pEncoderContext->mState = BUFFERING;
1154    }
1155    VIDEOEDITOR_CHECK(
1156        (BUFFERING | READING) & pEncoderContext->mState, M4ERR_STATE);
1157
1158    pEncoderContext->mNbInputFrames++;
1159    if ( 0 > pEncoderContext->mFirstInputCts ) {
1160        pEncoderContext->mFirstInputCts = Cts;
1161    }
1162    pEncoderContext->mLastInputCts = Cts;
1163
1164    LOGV("VideoEditorVideoEncoder_encode 0x%X %d %f (%d)", pInPlane, FrameMode,
1165        Cts, pEncoderContext->mLastCTS);
1166
1167    // Push the input buffer to the encoder source
1168    err = VideoEditorVideoEncoder_processInputBuffer(pEncoderContext, Cts,
1169        M4OSA_FALSE);
1170    VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1171
1172    // Notify the source in case of EOS
1173    if ( M4ENCODER_kLastFrame == FrameMode ) {
1174        err = VideoEditorVideoEncoder_processInputBuffer(
1175            pEncoderContext, 0, M4OSA_TRUE);
1176        VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1177    }
1178
1179    if ( BUFFERING == pEncoderContext->mState ) {
1180        // Prefetch is complete, start reading
1181        pEncoderContext->mState = READING;
1182    }
1183    // Read
1184    while (1)  {
1185        MediaBuffer *outputBuffer =
1186                pEncoderContext->mPuller->getBufferNonBlocking();
1187
1188        if (outputBuffer == NULL) break;
1189
1190        // Provide the encoded AU to the writer
1191        err = VideoEditorVideoEncoder_processOutputBuffer(pEncoderContext,
1192            outputBuffer);
1193        VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1194
1195        pEncoderContext->mPuller->putBuffer(outputBuffer);
1196    }
1197
1198cleanUp:
1199    if( M4NO_ERROR == err ) {
1200        LOGV("VideoEditorVideoEncoder_encode no error");
1201    } else {
1202        LOGV("VideoEditorVideoEncoder_encode ERROR 0x%X", err);
1203    }
1204    LOGV("VideoEditorVideoEncoder_encode end");
1205    return err;
1206}
1207
1208M4OSA_ERR VideoEditorVideoEncoder_start(M4ENCODER_Context pContext) {
1209    M4OSA_ERR                  err             = M4NO_ERROR;
1210    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
1211    status_t                   result          = OK;
1212
1213    LOGV("VideoEditorVideoEncoder_start begin");
1214    // Input parameters check
1215    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
1216
1217    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
1218    VIDEOEDITOR_CHECK(OPENED == pEncoderContext->mState, M4ERR_STATE);
1219
1220    pEncoderContext->mNbInputFrames  = 0;
1221    pEncoderContext->mFirstInputCts  = -1.0;
1222    pEncoderContext->mLastInputCts   = -1.0;
1223    pEncoderContext->mNbOutputFrames = 0;
1224    pEncoderContext->mFirstOutputCts = -1;
1225    pEncoderContext->mLastOutputCts  = -1;
1226
1227    result = pEncoderContext->mEncoder->start();
1228    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
1229
1230    pEncoderContext->mPuller->start();
1231
1232    // Set the new state
1233    pEncoderContext->mState = STARTED;
1234
1235cleanUp:
1236    if ( M4NO_ERROR == err ) {
1237        LOGV("VideoEditorVideoEncoder_start no error");
1238    } else {
1239        LOGV("VideoEditorVideoEncoder_start ERROR 0x%X", err);
1240    }
1241    LOGV("VideoEditorVideoEncoder_start end");
1242    return err;
1243}
1244
1245M4OSA_ERR VideoEditorVideoEncoder_stop(M4ENCODER_Context pContext) {
1246    M4OSA_ERR err = M4NO_ERROR;
1247    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
1248    MediaBuffer* outputBuffer = NULL;
1249    status_t result = OK;
1250
1251    LOGV("VideoEditorVideoEncoder_stop begin");
1252    // Input parameters check
1253    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
1254    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
1255
1256    // Send EOS again to make sure the source doesn't block.
1257    err = VideoEditorVideoEncoder_processInputBuffer(pEncoderContext, 0,
1258        M4OSA_TRUE);
1259    VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1260
1261    // Process the remaining buffers if necessary
1262    if ( (BUFFERING | READING) & pEncoderContext->mState ) {
1263        while (1)  {
1264            MediaBuffer *outputBuffer =
1265                pEncoderContext->mPuller->getBufferBlocking();
1266
1267            if (outputBuffer == NULL) break;
1268
1269            err = VideoEditorVideoEncoder_processOutputBuffer(
1270                pEncoderContext, outputBuffer);
1271            VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1272
1273            pEncoderContext->mPuller->putBuffer(outputBuffer);
1274        }
1275
1276        pEncoderContext->mState = STARTED;
1277    }
1278
1279    // Stop the graph module if necessary
1280    if ( STARTED == pEncoderContext->mState ) {
1281        pEncoderContext->mPuller->stop();
1282        pEncoderContext->mEncoder->stop();
1283        pEncoderContext->mState = OPENED;
1284    }
1285
1286    if (pEncoderContext->mNbInputFrames != pEncoderContext->mNbOutputFrames) {
1287        LOGW("Some frames were not encoded: input(%d) != output(%d)",
1288            pEncoderContext->mNbInputFrames, pEncoderContext->mNbOutputFrames);
1289    }
1290
1291cleanUp:
1292    if ( M4NO_ERROR == err ) {
1293        LOGV("VideoEditorVideoEncoder_stop no error");
1294    } else {
1295        LOGV("VideoEditorVideoEncoder_stop ERROR 0x%X", err);
1296    }
1297    LOGV("VideoEditorVideoEncoder_stop end");
1298    return err;
1299}
1300
1301M4OSA_ERR VideoEditorVideoEncoder_regulBitRate(M4ENCODER_Context pContext) {
1302    LOGW("regulBitRate is not implemented");
1303    return M4NO_ERROR;
1304}
1305
1306M4OSA_ERR VideoEditorVideoEncoder_setOption(M4ENCODER_Context pContext,
1307        M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) {
1308    M4OSA_ERR err = M4NO_ERROR;
1309    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
1310
1311    LOGV("VideoEditorVideoEncoder_setOption start optionID 0x%X", optionID);
1312    // Input parameters check
1313    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
1314
1315    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
1316
1317    switch( optionID ) {
1318        case M4ENCODER_kOptionID_SetH264ProcessNALUfctsPtr:
1319            pEncoderContext->mH264NALUPostProcessFct =
1320                (H264MCS_ProcessEncodedNALU_fct*)optionValue;
1321            break;
1322        case M4ENCODER_kOptionID_H264ProcessNALUContext:
1323            pEncoderContext->mH264NALUPostProcessCtx =
1324                (M4OSA_Context)optionValue;
1325            break;
1326        default:
1327            LOGV("VideoEditorVideoEncoder_setOption: unsupported optionId 0x%X",
1328                optionID);
1329            VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID);
1330            break;
1331    }
1332
1333cleanUp:
1334    if ( M4NO_ERROR == err ) {
1335        LOGV("VideoEditorVideoEncoder_setOption no error");
1336    } else {
1337        LOGV("VideoEditorVideoEncoder_setOption ERROR 0x%X", err);
1338    }
1339    LOGV("VideoEditorVideoEncoder_setOption end");
1340    return err;
1341}
1342
1343M4OSA_ERR VideoEditorVideoEncoder_getOption(M4ENCODER_Context pContext,
1344        M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) {
1345    M4OSA_ERR err = M4NO_ERROR;
1346    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
1347
1348    LOGV("VideoEditorVideoEncoder_getOption begin optinId 0x%X", optionID);
1349    // Input parameters check
1350    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
1351    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
1352
1353    switch( optionID ) {
1354        case M4ENCODER_kOptionID_EncoderHeader:
1355            VIDEOEDITOR_CHECK(
1356                    M4OSA_NULL != pEncoderContext->mHeader.pBuf, M4ERR_STATE);
1357            *(M4ENCODER_Header**)optionValue = &(pEncoderContext->mHeader);
1358            break;
1359        default:
1360            LOGV("VideoEditorVideoEncoder_getOption: unsupported optionId 0x%X",
1361                optionID);
1362            VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID);
1363            break;
1364    }
1365
1366cleanUp:
1367    if ( M4NO_ERROR == err ) {
1368        LOGV("VideoEditorVideoEncoder_getOption no error");
1369    } else {
1370        LOGV("VideoEditorVideoEncoder_getOption ERROR 0x%X", err);
1371    }
1372    return err;
1373}
1374
1375M4OSA_ERR VideoEditorVideoEncoder_getInterface(M4ENCODER_Format format,
1376        M4ENCODER_Format* pFormat,
1377        M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){
1378    M4OSA_ERR err = M4NO_ERROR;
1379
1380    // Input parameters check
1381    VIDEOEDITOR_CHECK(M4OSA_NULL != pFormat,           M4ERR_PARAMETER);
1382    VIDEOEDITOR_CHECK(M4OSA_NULL != pEncoderInterface, M4ERR_PARAMETER);
1383
1384    LOGV("VideoEditorVideoEncoder_getInterface begin 0x%x 0x%x %d", pFormat,
1385        pEncoderInterface, mode);
1386
1387    SAFE_MALLOC(*pEncoderInterface, M4ENCODER_GlobalInterface, 1,
1388        "VideoEditorVideoEncoder");
1389
1390    *pFormat = format;
1391
1392    switch( format ) {
1393        case M4ENCODER_kH263:
1394            {
1395                (*pEncoderInterface)->pFctInit =
1396                    VideoEditorVideoEncoder_init_H263;
1397                break;
1398            }
1399        case M4ENCODER_kMPEG4:
1400            {
1401                (*pEncoderInterface)->pFctInit =
1402                    VideoEditorVideoEncoder_init_MPEG4;
1403                break;
1404            }
1405        case M4ENCODER_kH264:
1406            {
1407                (*pEncoderInterface)->pFctInit =
1408                    VideoEditorVideoEncoder_init_H264;
1409                break;
1410            }
1411        default:
1412            LOGV("VideoEditorVideoEncoder_getInterface : unsupported format %d",
1413                format);
1414            VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER);
1415        break;
1416    }
1417    (*pEncoderInterface)->pFctOpen         = VideoEditorVideoEncoder_open;
1418    (*pEncoderInterface)->pFctStart        = VideoEditorVideoEncoder_start;
1419    (*pEncoderInterface)->pFctStop         = VideoEditorVideoEncoder_stop;
1420    (*pEncoderInterface)->pFctPause        = M4OSA_NULL;
1421    (*pEncoderInterface)->pFctResume       = M4OSA_NULL;
1422    (*pEncoderInterface)->pFctClose        = VideoEditorVideoEncoder_close;
1423    (*pEncoderInterface)->pFctCleanup      = VideoEditorVideoEncoder_cleanup;
1424    (*pEncoderInterface)->pFctRegulBitRate =
1425        VideoEditorVideoEncoder_regulBitRate;
1426    (*pEncoderInterface)->pFctEncode       = VideoEditorVideoEncoder_encode;
1427    (*pEncoderInterface)->pFctSetOption    = VideoEditorVideoEncoder_setOption;
1428    (*pEncoderInterface)->pFctGetOption    = VideoEditorVideoEncoder_getOption;
1429
1430cleanUp:
1431    if( M4NO_ERROR == err ) {
1432        LOGV("VideoEditorVideoEncoder_getInterface no error");
1433    } else {
1434        *pEncoderInterface = M4OSA_NULL;
1435        LOGV("VideoEditorVideoEncoder_getInterface ERROR 0x%X", err);
1436    }
1437    return err;
1438}
1439
1440extern "C" {
1441
1442M4OSA_ERR VideoEditorVideoEncoder_getInterface_H263(M4ENCODER_Format* pFormat,
1443        M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){
1444    return VideoEditorVideoEncoder_getInterface(M4ENCODER_kH263, pFormat,
1445            pEncoderInterface, mode);
1446}
1447
1448M4OSA_ERR VideoEditorVideoEncoder_getInterface_MPEG4(M4ENCODER_Format* pFormat,
1449        M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){
1450    return VideoEditorVideoEncoder_getInterface(M4ENCODER_kMPEG4, pFormat,
1451           pEncoderInterface, mode);
1452}
1453
1454M4OSA_ERR VideoEditorVideoEncoder_getInterface_H264(M4ENCODER_Format* pFormat,
1455        M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){
1456    return VideoEditorVideoEncoder_getInterface(M4ENCODER_kH264, pFormat,
1457           pEncoderInterface, mode);
1458
1459}
1460
1461}  // extern "C"
1462
1463}  // namespace android
1464