VideoEditorVideoEncoder.cpp revision 1b37de259e00e22f261f2711e0a318aa386581c7
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16/**
17*************************************************************************
18* @file   VideoEditorVideoEncoder.cpp
19* @brief  StageFright shell video encoder
20*************************************************************************
21*/
22#define LOG_NDEBUG 1
23#define LOG_TAG "VIDEOEDITOR_VIDEOENCODER"
24
25/*******************
26 *     HEADERS     *
27 *******************/
28#include "M4OSA_Debug.h"
29#include "M4SYS_AccessUnit.h"
30#include "VideoEditorVideoEncoder.h"
31#include "VideoEditorUtils.h"
32
33#include "utils/Log.h"
34#include "utils/Vector.h"
35#include <media/stagefright/MediaSource.h>
36#include <media/stagefright/MediaDebug.h>
37#include <media/stagefright/MediaDefs.h>
38#include <media/stagefright/MetaData.h>
39#include <media/stagefright/OMXClient.h>
40#include <media/stagefright/OMXCodec.h>
41#include "OMX_Video.h"
42
43/********************
44 *   DEFINITIONS    *
45 ********************/
46
47// Encoder color format
48#define VIDEOEDITOR_ENCODER_COLOR_FORMAT OMX_COLOR_FormatYUV420Planar
49
50// Force using hardware encoder
51#define VIDEOEDITOR_FORCECODEC kHardwareCodecsOnly
52
53#if !defined(VIDEOEDITOR_FORCECODEC)
54    #error "Cannot force DSI retrieval if codec type is not fixed"
55#endif
56
57/********************
58 *   SOURCE CLASS   *
59 ********************/
60
61namespace android {
62
63struct VideoEditorVideoEncoderSource : public MediaSource {
64    public:
65        static sp<VideoEditorVideoEncoderSource> Create(
66            const sp<MetaData> &format);
67        virtual status_t start(MetaData *params = NULL);
68        virtual status_t stop();
69        virtual sp<MetaData> getFormat();
70        virtual status_t read(MediaBuffer **buffer,
71            const ReadOptions *options = NULL);
72        virtual int32_t storeBuffer(MediaBuffer *buffer);
73
74    protected:
75        virtual ~VideoEditorVideoEncoderSource();
76
77    private:
78        struct MediaBufferChain {
79            MediaBuffer* buffer;
80            MediaBufferChain* nextLink;
81        };
82        enum State {
83            CREATED,
84            STARTED,
85            ERROR
86        };
87        VideoEditorVideoEncoderSource(const sp<MetaData> &format);
88
89        // Don't call me
90        VideoEditorVideoEncoderSource(const VideoEditorVideoEncoderSource &);
91        VideoEditorVideoEncoderSource &operator=(
92                const VideoEditorVideoEncoderSource &);
93
94        MediaBufferChain* mFirstBufferLink;
95        MediaBufferChain* mLastBufferLink;
96        int32_t           mNbBuffer;
97        bool              mIsEOS;
98        State             mState;
99        sp<MetaData>      mEncFormat;
100        Mutex             mLock;
101        Condition         mBufferCond;
102};
103
104sp<VideoEditorVideoEncoderSource> VideoEditorVideoEncoderSource::Create(
105    const sp<MetaData> &format) {
106
107    sp<VideoEditorVideoEncoderSource> aSource =
108        new VideoEditorVideoEncoderSource(format);
109    return aSource;
110}
111
112VideoEditorVideoEncoderSource::VideoEditorVideoEncoderSource(
113    const sp<MetaData> &format):
114        mFirstBufferLink(NULL),
115        mLastBufferLink(NULL),
116        mNbBuffer(0),
117        mIsEOS(false),
118        mState(CREATED),
119        mEncFormat(format) {
120    LOGV("VideoEditorVideoEncoderSource::VideoEditorVideoEncoderSource");
121}
122
123VideoEditorVideoEncoderSource::~VideoEditorVideoEncoderSource() {
124
125    // Safety clean up
126    if( STARTED == mState ) {
127        stop();
128    }
129}
130
131status_t VideoEditorVideoEncoderSource::start(MetaData *meta) {
132    status_t err = OK;
133
134    LOGV("VideoEditorVideoEncoderSource::start() begin");
135
136    if( CREATED != mState ) {
137        LOGV("VideoEditorVideoEncoderSource::start: invalid state %d", mState);
138        return UNKNOWN_ERROR;
139    }
140    mState = STARTED;
141
142    LOGV("VideoEditorVideoEncoderSource::start() END (0x%x)", err);
143    return err;
144}
145
146status_t VideoEditorVideoEncoderSource::stop() {
147    status_t err = OK;
148
149    LOGV("VideoEditorVideoEncoderSource::stop() begin");
150
151    if( STARTED != mState ) {
152        LOGV("VideoEditorVideoEncoderSource::stop: invalid state %d", mState);
153        return UNKNOWN_ERROR;
154    }
155
156    // Release the buffer chain
157    int32_t i = 0;
158    MediaBufferChain* tmpLink = NULL;
159    while( mFirstBufferLink ) {
160        i++;
161        tmpLink = mFirstBufferLink;
162        mFirstBufferLink = mFirstBufferLink->nextLink;
163        delete tmpLink;
164    }
165    LOGV("VideoEditorVideoEncoderSource::stop : %d buffer remained", i);
166    mFirstBufferLink = NULL;
167    mLastBufferLink = NULL;
168
169    mState = CREATED;
170
171    LOGV("VideoEditorVideoEncoderSource::stop() END (0x%x)", err);
172    return err;
173}
174
175sp<MetaData> VideoEditorVideoEncoderSource::getFormat() {
176
177    LOGV("VideoEditorVideoEncoderSource::getFormat");
178    return mEncFormat;
179}
180
181status_t VideoEditorVideoEncoderSource::read(MediaBuffer **buffer,
182        const ReadOptions *options) {
183    Mutex::Autolock autolock(mLock);
184    MediaSource::ReadOptions readOptions;
185    status_t err = OK;
186    MediaBufferChain* tmpLink = NULL;
187
188    LOGV("VideoEditorVideoEncoderSource::read() begin");
189
190    if ( STARTED != mState ) {
191        LOGV("VideoEditorVideoEncoderSource::read: invalid state %d", mState);
192        return UNKNOWN_ERROR;
193    }
194
195    while (mFirstBufferLink == NULL && !mIsEOS) {
196        mBufferCond.wait(mLock);
197    }
198
199    // End of stream?
200    if (mFirstBufferLink == NULL) {
201        *buffer = NULL;
202        LOGV("VideoEditorVideoEncoderSource::read : EOS");
203        return ERROR_END_OF_STREAM;
204    }
205
206    // Get a buffer from the chain
207    *buffer = mFirstBufferLink->buffer;
208    tmpLink = mFirstBufferLink;
209    mFirstBufferLink = mFirstBufferLink->nextLink;
210
211    if ( NULL == mFirstBufferLink ) {
212        mLastBufferLink = NULL;
213    }
214    delete tmpLink;
215    mNbBuffer--;
216
217    LOGV("VideoEditorVideoEncoderSource::read() END (0x%x)", err);
218    return err;
219}
220
221int32_t VideoEditorVideoEncoderSource::storeBuffer(MediaBuffer *buffer) {
222    Mutex::Autolock autolock(mLock);
223    status_t err = OK;
224
225    LOGV("VideoEditorVideoEncoderSource::storeBuffer() begin");
226
227    if( NULL == buffer ) {
228        LOGV("VideoEditorVideoEncoderSource::storeBuffer : reached EOS");
229        mIsEOS = true;
230    } else {
231        MediaBufferChain* newLink = new MediaBufferChain;
232        newLink->buffer = buffer;
233        newLink->nextLink = NULL;
234        if( NULL != mLastBufferLink ) {
235            mLastBufferLink->nextLink = newLink;
236        } else {
237            mFirstBufferLink = newLink;
238        }
239        mLastBufferLink = newLink;
240        mNbBuffer++;
241    }
242    mBufferCond.signal();
243    LOGV("VideoEditorVideoEncoderSource::storeBuffer() end");
244    return mNbBuffer;
245}
246
247/********************
248 *      PULLER      *
249 ********************/
250
251// Pulls media buffers from a MediaSource repeatedly.
252// The user can then get the buffers from that list.
253class VideoEditorVideoEncoderPuller {
254public:
255    VideoEditorVideoEncoderPuller(sp<MediaSource> source);
256    ~VideoEditorVideoEncoderPuller();
257    void start();
258    void stop();
259    MediaBuffer* getBufferBlocking();
260    MediaBuffer* getBufferNonBlocking();
261    void putBuffer(MediaBuffer* buffer);
262private:
263    static int acquireThreadStart(void* arg);
264    void acquireThreadFunc();
265
266    static int releaseThreadStart(void* arg);
267    void releaseThreadFunc();
268
269    sp<MediaSource> mSource;
270    Vector<MediaBuffer*> mBuffers;
271    Vector<MediaBuffer*> mReleaseBuffers;
272
273    Mutex mLock;
274    Condition mUserCond;     // for the user of this class
275    Condition mAcquireCond;  // for the acquire thread
276    Condition mReleaseCond;  // for the release thread
277
278    bool mAskToStart;      // Asks the threads to start
279    bool mAskToStop;       // Asks the threads to stop
280    bool mAcquireStopped;  // The acquire thread has stopped
281    bool mReleaseStopped;  // The release thread has stopped
282};
283
284VideoEditorVideoEncoderPuller::VideoEditorVideoEncoderPuller(
285    sp<MediaSource> source) {
286    mSource = source;
287    mAskToStart = false;
288    mAskToStop = false;
289    mAcquireStopped = false;
290    mReleaseStopped = false;
291    androidCreateThread(acquireThreadStart, this);
292    androidCreateThread(releaseThreadStart, this);
293}
294
295VideoEditorVideoEncoderPuller::~VideoEditorVideoEncoderPuller() {
296    stop();
297}
298
299void VideoEditorVideoEncoderPuller::start() {
300    Mutex::Autolock autolock(mLock);
301    mAskToStart = true;
302    mAcquireCond.signal();
303    mReleaseCond.signal();
304}
305
306void VideoEditorVideoEncoderPuller::stop() {
307    Mutex::Autolock autolock(mLock);
308    mAskToStop = true;
309    mAcquireCond.signal();
310    mReleaseCond.signal();
311    while (!mAcquireStopped || !mReleaseStopped) {
312        mUserCond.wait(mLock);
313    }
314
315    // Release remaining buffers
316    for (size_t i = 0; i < mBuffers.size(); i++) {
317        mBuffers.itemAt(i)->release();
318    }
319
320    for (size_t i = 0; i < mReleaseBuffers.size(); i++) {
321        mReleaseBuffers.itemAt(i)->release();
322    }
323}
324
325MediaBuffer* VideoEditorVideoEncoderPuller::getBufferNonBlocking() {
326    Mutex::Autolock autolock(mLock);
327    if (mBuffers.empty()) {
328        return NULL;
329    } else {
330        MediaBuffer* b = mBuffers.itemAt(0);
331        mBuffers.removeAt(0);
332        return b;
333    }
334}
335
336MediaBuffer* VideoEditorVideoEncoderPuller::getBufferBlocking() {
337    Mutex::Autolock autolock(mLock);
338    while (mBuffers.empty() && !mAcquireStopped) {
339        mUserCond.wait(mLock);
340    }
341
342    if (mBuffers.empty()) {
343        return NULL;
344    } else {
345        MediaBuffer* b = mBuffers.itemAt(0);
346        mBuffers.removeAt(0);
347        return b;
348    }
349}
350
351void VideoEditorVideoEncoderPuller::putBuffer(MediaBuffer* buffer) {
352    Mutex::Autolock autolock(mLock);
353    mReleaseBuffers.push(buffer);
354    mReleaseCond.signal();
355}
356
357int VideoEditorVideoEncoderPuller::acquireThreadStart(void* arg) {
358    VideoEditorVideoEncoderPuller* self = (VideoEditorVideoEncoderPuller*)arg;
359    self->acquireThreadFunc();
360    return 0;
361}
362
363int VideoEditorVideoEncoderPuller::releaseThreadStart(void* arg) {
364    VideoEditorVideoEncoderPuller* self = (VideoEditorVideoEncoderPuller*)arg;
365    self->releaseThreadFunc();
366    return 0;
367}
368
369void VideoEditorVideoEncoderPuller::acquireThreadFunc() {
370    mLock.lock();
371
372    // Wait for the start signal
373    while (!mAskToStart && !mAskToStop) {
374        mAcquireCond.wait(mLock);
375    }
376
377    // Loop until we are asked to stop, or there is nothing more to read
378    while (!mAskToStop) {
379        MediaBuffer* pBuffer;
380        mLock.unlock();
381        status_t result = mSource->read(&pBuffer, NULL);
382        mLock.lock();
383        if (result != OK) {
384            break;
385        }
386        mBuffers.push(pBuffer);
387        mUserCond.signal();
388    }
389
390    mAcquireStopped = true;
391    mUserCond.signal();
392    mLock.unlock();
393}
394
395void VideoEditorVideoEncoderPuller::releaseThreadFunc() {
396    mLock.lock();
397
398    // Wait for the start signal
399    while (!mAskToStart && !mAskToStop) {
400        mReleaseCond.wait(mLock);
401    }
402
403    // Loop until we are asked to stop
404    while (1) {
405        if (mReleaseBuffers.empty()) {
406            if (mAskToStop) {
407                break;
408            } else {
409                mReleaseCond.wait(mLock);
410                continue;
411            }
412        }
413        MediaBuffer* pBuffer = mReleaseBuffers.itemAt(0);
414        mReleaseBuffers.removeAt(0);
415        mLock.unlock();
416        pBuffer->release();
417        mLock.lock();
418    }
419
420    mReleaseStopped = true;
421    mUserCond.signal();
422    mLock.unlock();
423}
424
425/**
426 ******************************************************************************
427 * structure VideoEditorVideoEncoder_Context
428 * @brief    This structure defines the context of the StageFright video encoder
429 *           shell
430 ******************************************************************************
431*/
432typedef enum {
433    CREATED   = 0x1,
434    OPENED    = 0x2,
435    STARTED   = 0x4,
436    BUFFERING = 0x8,
437    READING   = 0x10
438} VideoEditorVideoEncoder_State;
439
440typedef struct {
441    VideoEditorVideoEncoder_State     mState;
442    M4ENCODER_Format                  mFormat;
443    M4WRITER_DataInterface*           mWriterDataInterface;
444    M4VPP_apply_fct*                  mPreProcFunction;
445    M4VPP_Context                     mPreProcContext;
446    M4SYS_AccessUnit*                 mAccessUnit;
447    M4ENCODER_Params*                 mCodecParams;
448    M4ENCODER_Header                  mHeader;
449    H264MCS_ProcessEncodedNALU_fct*   mH264NALUPostProcessFct;
450    M4OSA_Context                     mH264NALUPostProcessCtx;
451    M4OSA_UInt32                      mLastCTS;
452    sp<VideoEditorVideoEncoderSource> mEncoderSource;
453    OMXClient                         mClient;
454    sp<MediaSource>                   mEncoder;
455    OMX_COLOR_FORMATTYPE              mEncoderColorFormat;
456    VideoEditorVideoEncoderPuller*    mPuller;
457
458    uint32_t                          mNbInputFrames;
459    double                            mFirstInputCts;
460    double                            mLastInputCts;
461    uint32_t                          mNbOutputFrames;
462    int64_t                           mFirstOutputCts;
463    int64_t                           mLastOutputCts;
464
465} VideoEditorVideoEncoder_Context;
466
467/********************
468 *      TOOLS       *
469 ********************/
470
471M4OSA_ERR VideoEditorVideoEncoder_getDSI(M4ENCODER_Context pContext,
472        sp<MetaData> metaData) {
473    M4OSA_ERR err = M4NO_ERROR;
474    VideoEditorVideoEncoder_Context*  pEncoderContext = M4OSA_NULL;
475    status_t result = OK;
476    int32_t nbBuffer = 0;
477    int32_t stride = 0;
478    int32_t height = 0;
479    int32_t framerate = 0;
480    int32_t isCodecConfig = 0;
481    size_t size = 0;
482    uint32_t codecFlags = 0;
483    MediaBuffer* inputBuffer = NULL;
484    MediaBuffer* outputBuffer = NULL;
485    sp<VideoEditorVideoEncoderSource> encoderSource = NULL;
486    sp<MediaSource> encoder = NULL;;
487    OMXClient client;
488
489    LOGV("VideoEditorVideoEncoder_getDSI begin");
490    // Input parameters check
491    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext,       M4ERR_PARAMETER);
492    VIDEOEDITOR_CHECK(M4OSA_NULL != metaData.get(), M4ERR_PARAMETER);
493
494    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
495    VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE);
496
497    // Create the encoder source
498    encoderSource = VideoEditorVideoEncoderSource::Create(metaData);
499    VIDEOEDITOR_CHECK(NULL != encoderSource.get(), M4ERR_STATE);
500
501    // Connect to the OMX client
502    result = client.connect();
503    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
504
505    // Create the OMX codec
506    // VIDEOEDITOR_FORCECODEC MUST be defined here
507    codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC;
508    encoder = OMXCodec::Create(client.interface(), metaData, true,
509        encoderSource, NULL, codecFlags);
510    VIDEOEDITOR_CHECK(NULL != encoder.get(), M4ERR_STATE);
511
512    /**
513     * Send fake frames and retrieve the DSI
514     */
515    // Send a fake frame to the source
516    metaData->findInt32(kKeyStride,     &stride);
517    metaData->findInt32(kKeyHeight,     &height);
518    metaData->findInt32(kKeySampleRate, &framerate);
519    size = (size_t)(stride*height*3)/2;
520    inputBuffer = new MediaBuffer(size);
521    inputBuffer->meta_data()->setInt64(kKeyTime, 0);
522    nbBuffer = encoderSource->storeBuffer(inputBuffer);
523    encoderSource->storeBuffer(NULL); // Signal EOS
524
525    // Call read once to get the DSI
526    result = encoder->start();;
527    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
528    result = encoder->read(&outputBuffer, NULL);
529    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
530    VIDEOEDITOR_CHECK(outputBuffer->meta_data()->findInt32(
531        kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig, M4ERR_STATE);
532
533    VIDEOEDITOR_CHECK(M4OSA_NULL == pEncoderContext->mHeader.pBuf, M4ERR_STATE);
534    if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) {
535        // For H264, format the DSI
536        result = buildAVCCodecSpecificData(
537            (uint8_t**)(&(pEncoderContext->mHeader.pBuf)),
538            (size_t*)(&(pEncoderContext->mHeader.Size)),
539            (const uint8_t*)outputBuffer->data() + outputBuffer->range_offset(),
540            outputBuffer->range_length(), encoder->getFormat().get());
541        outputBuffer->release();
542        VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
543    } else {
544        // For MPEG4, just copy the DSI
545        pEncoderContext->mHeader.Size =
546            (M4OSA_UInt32)outputBuffer->range_length();
547        SAFE_MALLOC(pEncoderContext->mHeader.pBuf, M4OSA_Int8,
548            pEncoderContext->mHeader.Size, "Encoder header");
549        memcpy((void *)pEncoderContext->mHeader.pBuf,
550            (void *)((M4OSA_MemAddr8)(outputBuffer->data())+outputBuffer->range_offset()),
551            pEncoderContext->mHeader.Size);
552        outputBuffer->release();
553    }
554
555    result = encoder->stop();
556    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
557
558cleanUp:
559    // Destroy the graph
560    if ( encoder != NULL ) { encoder.clear(); }
561    client.disconnect();
562    if ( encoderSource != NULL ) { encoderSource.clear(); }
563    if ( M4NO_ERROR == err ) {
564        LOGV("VideoEditorVideoEncoder_getDSI no error");
565    } else {
566        LOGV("VideoEditorVideoEncoder_getDSI ERROR 0x%X", err);
567    }
568    LOGV("VideoEditorVideoEncoder_getDSI end");
569    return err;
570}
571/********************
572 * ENGINE INTERFACE *
573 ********************/
574
575M4OSA_ERR VideoEditorVideoEncoder_cleanup(M4ENCODER_Context pContext) {
576    M4OSA_ERR err = M4NO_ERROR;
577    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
578
579    LOGV("VideoEditorVideoEncoder_cleanup begin");
580    // Input parameters check
581    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
582
583    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
584    VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE);
585
586    // Release memory
587    SAFE_FREE(pEncoderContext->mHeader.pBuf);
588    SAFE_FREE(pEncoderContext);
589    pContext = M4OSA_NULL;
590
591cleanUp:
592    if ( M4NO_ERROR == err ) {
593        LOGV("VideoEditorVideoEncoder_cleanup no error");
594    } else {
595        LOGV("VideoEditorVideoEncoder_cleanup ERROR 0x%X", err);
596    }
597    LOGV("VideoEditorVideoEncoder_cleanup end");
598    return err;
599}
600
601M4OSA_ERR VideoEditorVideoEncoder_init(M4ENCODER_Format format,
602        M4ENCODER_Context* pContext,
603        M4WRITER_DataInterface* pWriterDataInterface,
604        M4VPP_apply_fct* pVPPfct, M4VPP_Context pVPPctxt,
605        M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) {
606
607    M4OSA_ERR err = M4NO_ERROR;
608    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
609
610    LOGV("VideoEditorVideoEncoder_init begin: format  %d", format);
611    // Input parameters check
612    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
613    VIDEOEDITOR_CHECK(M4OSA_NULL != pWriterDataInterface, M4ERR_PARAMETER);
614    VIDEOEDITOR_CHECK(M4OSA_NULL != pVPPfct, M4ERR_PARAMETER);
615    VIDEOEDITOR_CHECK(M4OSA_NULL != pVPPctxt, M4ERR_PARAMETER);
616
617    // Context allocation & initialization
618    SAFE_MALLOC(pEncoderContext, VideoEditorVideoEncoder_Context, 1,
619        "VideoEditorVideoEncoder");
620    pEncoderContext->mState = CREATED;
621    pEncoderContext->mFormat = format;
622    pEncoderContext->mWriterDataInterface = pWriterDataInterface;
623    pEncoderContext->mPreProcFunction = pVPPfct;
624    pEncoderContext->mPreProcContext = pVPPctxt;
625    pEncoderContext->mPuller = NULL;
626
627    *pContext = pEncoderContext;
628
629cleanUp:
630    if ( M4NO_ERROR == err ) {
631        LOGV("VideoEditorVideoEncoder_init no error");
632    } else {
633        VideoEditorVideoEncoder_cleanup(pEncoderContext);
634        *pContext = M4OSA_NULL;
635        LOGV("VideoEditorVideoEncoder_init ERROR 0x%X", err);
636    }
637    LOGV("VideoEditorVideoEncoder_init end");
638    return err;
639}
640
641M4OSA_ERR VideoEditorVideoEncoder_init_H263(M4ENCODER_Context* pContext,
642        M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct,
643        M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData)
644        {
645
646    return VideoEditorVideoEncoder_init(M4ENCODER_kH263, pContext,
647        pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData);
648}
649
650
651M4OSA_ERR VideoEditorVideoEncoder_init_MPEG4(M4ENCODER_Context* pContext,
652        M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct,
653        M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData)
654        {
655
656    return VideoEditorVideoEncoder_init(M4ENCODER_kMPEG4, pContext,
657        pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData);
658}
659
660
661M4OSA_ERR VideoEditorVideoEncoder_init_H264(M4ENCODER_Context* pContext,
662        M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct,
663        M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData)
664        {
665
666    return VideoEditorVideoEncoder_init(M4ENCODER_kH264, pContext,
667        pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData);
668}
669
670M4OSA_ERR VideoEditorVideoEncoder_close(M4ENCODER_Context pContext) {
671    M4OSA_ERR err = M4NO_ERROR;
672    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
673
674    LOGV("VideoEditorVideoEncoder_close begin");
675    // Input parameters check
676    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
677
678    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
679    VIDEOEDITOR_CHECK(OPENED == pEncoderContext->mState, M4ERR_STATE);
680
681    // Release memory
682    SAFE_FREE(pEncoderContext->mCodecParams);
683
684    // Destroy the graph
685    pEncoderContext->mEncoder.clear();
686    pEncoderContext->mClient.disconnect();
687    pEncoderContext->mEncoderSource.clear();
688
689    delete pEncoderContext->mPuller;
690    pEncoderContext->mPuller = NULL;
691
692    // Set the new state
693    pEncoderContext->mState = CREATED;
694
695cleanUp:
696    if( M4NO_ERROR == err ) {
697        LOGV("VideoEditorVideoEncoder_close no error");
698    } else {
699        LOGV("VideoEditorVideoEncoder_close ERROR 0x%X", err);
700    }
701    LOGV("VideoEditorVideoEncoder_close end");
702    return err;
703}
704
705
706M4OSA_ERR VideoEditorVideoEncoder_open(M4ENCODER_Context pContext,
707        M4SYS_AccessUnit* pAU, M4OSA_Void* pParams) {
708    M4OSA_ERR err = M4NO_ERROR;
709    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
710    M4ENCODER_Params* pCodecParams = M4OSA_NULL;
711    status_t result = OK;
712    sp<MetaData> encoderMetadata = NULL;
713    const char* mime = NULL;
714    int32_t iProfile = 0;
715    int32_t iFrameRate = 0;
716    uint32_t codecFlags = 0;
717
718    LOGV(">>> VideoEditorVideoEncoder_open begin");
719    // Input parameters check
720    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
721    VIDEOEDITOR_CHECK(M4OSA_NULL != pAU,      M4ERR_PARAMETER);
722    VIDEOEDITOR_CHECK(M4OSA_NULL != pParams,  M4ERR_PARAMETER);
723
724    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
725    pCodecParams = (M4ENCODER_Params*)pParams;
726    VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE);
727
728    // Context initialization
729    pEncoderContext->mAccessUnit = pAU;
730
731    // Allocate & initialize the encoding parameters
732    SAFE_MALLOC(pEncoderContext->mCodecParams, M4ENCODER_Params, 1,
733        "VideoEditorVideoEncoder");
734
735
736    pEncoderContext->mCodecParams->InputFormat = pCodecParams->InputFormat;
737    pEncoderContext->mCodecParams->InputFrameWidth =
738        pCodecParams->InputFrameWidth;
739    pEncoderContext->mCodecParams->InputFrameHeight =
740        pCodecParams->InputFrameHeight;
741    pEncoderContext->mCodecParams->FrameWidth = pCodecParams->FrameWidth;
742    pEncoderContext->mCodecParams->FrameHeight = pCodecParams->FrameHeight;
743    pEncoderContext->mCodecParams->Bitrate = pCodecParams->Bitrate;
744    pEncoderContext->mCodecParams->FrameRate = pCodecParams->FrameRate;
745    pEncoderContext->mCodecParams->Format = pCodecParams->Format;
746
747    // Check output format consistency and resolution
748    VIDEOEDITOR_CHECK(
749        pEncoderContext->mCodecParams->Format == pEncoderContext->mFormat,
750        M4ERR_PARAMETER);
751    VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameWidth  % 16,
752        M4ERR_PARAMETER);
753    VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameHeight % 16,
754        M4ERR_PARAMETER);
755
756    /**
757     * StageFright graph building
758     */
759
760    // Create the meta data for the encoder
761    encoderMetadata = new MetaData;
762    switch( pEncoderContext->mCodecParams->Format ) {
763        case M4ENCODER_kH263:
764            mime     = MEDIA_MIMETYPE_VIDEO_H263;
765            iProfile = OMX_VIDEO_H263ProfileBaseline;
766            break;
767        case M4ENCODER_kMPEG4:
768            mime     = MEDIA_MIMETYPE_VIDEO_MPEG4;
769            iProfile = OMX_VIDEO_MPEG4ProfileSimple;
770            break;
771        case M4ENCODER_kH264:
772            mime     = MEDIA_MIMETYPE_VIDEO_AVC;
773            iProfile = OMX_VIDEO_AVCProfileBaseline;
774            break;
775        default:
776            VIDEOEDITOR_CHECK(!"VideoEncoder_open : incorrect input format",
777                M4ERR_PARAMETER);
778            break;
779    }
780    encoderMetadata->setCString(kKeyMIMEType, mime);
781    encoderMetadata->setInt32(kKeyVideoProfile, iProfile);
782    encoderMetadata->setInt32(kKeyWidth,
783        (int32_t)pEncoderContext->mCodecParams->FrameWidth);
784    encoderMetadata->setInt32(kKeyStride,
785        (int32_t)pEncoderContext->mCodecParams->FrameWidth);
786    encoderMetadata->setInt32(kKeyHeight,
787        (int32_t)pEncoderContext->mCodecParams->FrameHeight);
788    encoderMetadata->setInt32(kKeySliceHeight,
789        (int32_t)pEncoderContext->mCodecParams->FrameHeight);
790
791    switch( pEncoderContext->mCodecParams->FrameRate ) {
792        case M4ENCODER_k5_FPS:    iFrameRate = 5;  break;
793        case M4ENCODER_k7_5_FPS:  iFrameRate = 8;  break;
794        case M4ENCODER_k10_FPS:   iFrameRate = 10; break;
795        case M4ENCODER_k12_5_FPS: iFrameRate = 13; break;
796        case M4ENCODER_k15_FPS:   iFrameRate = 15; break;
797        case M4ENCODER_k20_FPS:   iFrameRate = 20; break;
798        case M4ENCODER_k25_FPS:   iFrameRate = 25; break;
799        case M4ENCODER_k30_FPS:   iFrameRate = 30; break;
800        case M4ENCODER_kVARIABLE_FPS:
801            iFrameRate = 30;
802            LOGI("Frame rate set to M4ENCODER_kVARIABLE_FPS: set to 30");
803          break;
804        case M4ENCODER_kUSE_TIMESCALE:
805            iFrameRate = 30;
806            LOGI("Frame rate set to M4ENCODER_kUSE_TIMESCALE:  set to 30");
807            break;
808
809        default:
810            VIDEOEDITOR_CHECK(!"VideoEncoder_open:incorrect framerate",
811                M4ERR_STATE);
812            break;
813    }
814    encoderMetadata->setInt32(kKeyFrameRate, iFrameRate);
815    encoderMetadata->setInt32(kKeyBitRate,
816        (int32_t)pEncoderContext->mCodecParams->Bitrate);
817    encoderMetadata->setInt32(kKeyIFramesInterval, 1);
818
819    pEncoderContext->mEncoderColorFormat = VIDEOEDITOR_ENCODER_COLOR_FORMAT;
820    encoderMetadata->setInt32(kKeyColorFormat,
821        pEncoderContext->mEncoderColorFormat);
822
823    // Get the encoder DSI
824    err = VideoEditorVideoEncoder_getDSI(pEncoderContext, encoderMetadata);
825    VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
826
827    // Create the encoder source
828    pEncoderContext->mEncoderSource = VideoEditorVideoEncoderSource::Create(
829        encoderMetadata);
830    VIDEOEDITOR_CHECK(
831        NULL != pEncoderContext->mEncoderSource.get(), M4ERR_STATE);
832
833    // Connect to the OMX client
834    result = pEncoderContext->mClient.connect();
835    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
836
837    // Create the OMX codec
838#ifdef VIDEOEDITOR_FORCECODEC
839    codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC;
840#endif /* VIDEOEDITOR_FORCECODEC */
841    pEncoderContext->mEncoder = OMXCodec::Create(
842        pEncoderContext->mClient.interface(), encoderMetadata, true,
843        pEncoderContext->mEncoderSource, NULL, codecFlags);
844    VIDEOEDITOR_CHECK(NULL != pEncoderContext->mEncoder.get(), M4ERR_STATE);
845    LOGV("VideoEditorVideoEncoder_open : DONE");
846    pEncoderContext->mPuller = new VideoEditorVideoEncoderPuller(
847        pEncoderContext->mEncoder);
848
849    // Set the new state
850    pEncoderContext->mState = OPENED;
851
852cleanUp:
853    if( M4NO_ERROR == err ) {
854        LOGV("VideoEditorVideoEncoder_open no error");
855    } else {
856        VideoEditorVideoEncoder_close(pEncoderContext);
857        LOGV("VideoEditorVideoEncoder_open ERROR 0x%X", err);
858    }
859    LOGV("VideoEditorVideoEncoder_open end");
860    return err;
861}
862
863M4OSA_ERR VideoEditorVideoEncoder_processInputBuffer(
864        M4ENCODER_Context pContext, M4OSA_Double Cts,
865        M4OSA_Bool bReachedEOS) {
866    M4OSA_ERR err = M4NO_ERROR;
867    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
868    M4VIFI_ImagePlane pOutPlane[3];
869    MediaBuffer* buffer = NULL;
870    int32_t nbBuffer = 0;
871
872    LOGV("VideoEditorVideoEncoder_processInputBuffer begin: cts  %f", Cts);
873    // Input parameters check
874    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
875
876    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
877    pOutPlane[0].pac_data = M4OSA_NULL;
878    pOutPlane[1].pac_data = M4OSA_NULL;
879    pOutPlane[2].pac_data = M4OSA_NULL;
880
881    if ( M4OSA_FALSE == bReachedEOS ) {
882        M4OSA_UInt32 sizeY = pEncoderContext->mCodecParams->FrameWidth *
883            pEncoderContext->mCodecParams->FrameHeight;
884        M4OSA_UInt32 sizeU = sizeY >> 2;
885        M4OSA_UInt32 size  = sizeY + 2*sizeU;
886        M4OSA_UInt8* pData = M4OSA_NULL;
887        buffer = new MediaBuffer((size_t)size);
888        pData = (M4OSA_UInt8*)buffer->data() + buffer->range_offset();
889
890        // Prepare the output image for pre-processing
891        pOutPlane[0].u_width   = pEncoderContext->mCodecParams->FrameWidth;
892        pOutPlane[0].u_height  = pEncoderContext->mCodecParams->FrameHeight;
893        pOutPlane[0].u_topleft = 0;
894        pOutPlane[0].u_stride  = pOutPlane[0].u_width;
895        pOutPlane[1].u_width   = pOutPlane[0].u_width/2;
896        pOutPlane[1].u_height  = pOutPlane[0].u_height/2;
897        pOutPlane[1].u_topleft = 0;
898        pOutPlane[1].u_stride  = pOutPlane[0].u_stride/2;
899        pOutPlane[2].u_width   = pOutPlane[1].u_width;
900        pOutPlane[2].u_height  = pOutPlane[1].u_height;
901        pOutPlane[2].u_topleft = 0;
902        pOutPlane[2].u_stride  = pOutPlane[1].u_stride;
903
904        switch( pEncoderContext->mEncoderColorFormat ) {
905            case OMX_COLOR_FormatYUV420Planar:
906                pOutPlane[0].pac_data = pData;
907                pOutPlane[1].pac_data = pData + sizeY;
908                pOutPlane[2].pac_data = pData + sizeY + sizeU;
909            break;
910            case OMX_COLOR_FormatYUV420SemiPlanar:
911                pOutPlane[0].pac_data = pData;
912                SAFE_MALLOC(pOutPlane[1].pac_data, M4VIFI_UInt8,
913                    pOutPlane[1].u_height*pOutPlane[1].u_stride,"OutputPlaneU");
914                SAFE_MALLOC(pOutPlane[2].pac_data, M4VIFI_UInt8,
915                    pOutPlane[2].u_height*pOutPlane[2].u_stride,"OutputPlaneV");
916            break;
917            default:
918                LOGV("VideoEditorVideoEncoder_processInputBuffer : unsupported "
919                    "color format 0x%X", pEncoderContext->mEncoderColorFormat);
920                VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER);
921            break;
922        }
923
924        // Apply pre-processing
925        err = pEncoderContext->mPreProcFunction(
926            pEncoderContext->mPreProcContext, M4OSA_NULL, pOutPlane);
927        VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
928
929        // Convert to MediaBuffer format if necessary
930        if( OMX_COLOR_FormatYUV420SemiPlanar == \
931                pEncoderContext->mEncoderColorFormat ) {
932            M4OSA_UInt8* pTmpData = M4OSA_NULL;
933            pTmpData = pData + sizeY;
934            // Highly unoptimized copy...
935            for( M4OSA_UInt32 i=0; i<sizeU; i++ ) {
936                *pTmpData = pOutPlane[2].pac_data[i]; pTmpData++;
937                *pTmpData = pOutPlane[1].pac_data[i]; pTmpData++;
938            }
939        }
940
941        // Set the metadata
942        buffer->meta_data()->setInt64(kKeyTime, (int64_t)(Cts*1000));
943    }
944
945    // Push the buffer to the source, a NULL buffer, notifies the source of EOS
946    nbBuffer = pEncoderContext->mEncoderSource->storeBuffer(buffer);
947
948cleanUp:
949    if ( OMX_COLOR_FormatYUV420SemiPlanar == \
950            pEncoderContext->mEncoderColorFormat ) {
951        // Y plane has not been allocated
952        if ( pOutPlane[1].pac_data ) {
953            SAFE_FREE(pOutPlane[1].pac_data);
954        }
955        if ( pOutPlane[2].pac_data ) {
956            SAFE_FREE(pOutPlane[2].pac_data);
957        }
958    }
959    if ( M4NO_ERROR == err ) {
960        LOGV("VideoEditorVideoEncoder_processInputBuffer error 0x%X", err);
961    } else {
962        if( NULL != buffer ) {
963            buffer->release();
964        }
965        LOGV("VideoEditorVideoEncoder_processInputBuffer ERROR 0x%X", err);
966    }
967    LOGV("VideoEditorVideoEncoder_processInputBuffer end");
968    return err;
969}
970
971M4OSA_ERR VideoEditorVideoEncoder_processOutputBuffer(
972        M4ENCODER_Context pContext, MediaBuffer* buffer) {
973    M4OSA_ERR err = M4NO_ERROR;
974    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
975    M4OSA_UInt32 Cts = 0;
976    int32_t i32Tmp = 0;
977    int64_t i64Tmp = 0;
978    status_t result = OK;
979
980    LOGV("VideoEditorVideoEncoder_processOutputBuffer begin");
981    // Input parameters check
982    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
983    VIDEOEDITOR_CHECK(M4OSA_NULL != buffer,   M4ERR_PARAMETER);
984
985    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
986
987    // Process the returned AU
988    if ( 0 == buffer->range_length() ) {
989        // Encoder has no data yet, nothing unusual
990        LOGV("VideoEditorVideoEncoder_processOutputBuffer : buffer is empty");
991        goto cleanUp;
992    }
993    VIDEOEDITOR_CHECK(0 == ((M4OSA_UInt32)buffer->data())%4, M4ERR_PARAMETER);
994    VIDEOEDITOR_CHECK(buffer->meta_data().get(), M4ERR_PARAMETER);
995    if ( buffer->meta_data()->findInt32(kKeyIsCodecConfig, &i32Tmp) && i32Tmp ){
996        {   // Display the DSI
997            LOGV("VideoEditorVideoEncoder_processOutputBuffer DSI %d",
998                buffer->range_length());
999            uint8_t* tmp = (uint8_t*)(buffer->data());
1000            for( uint32_t i=0; i<buffer->range_length(); i++ ) {
1001                LOGV("DSI [%d] %.2X", i, tmp[i]);
1002            }
1003        }
1004    } else {
1005        // Check the CTS
1006        VIDEOEDITOR_CHECK(buffer->meta_data()->findInt64(kKeyTime, &i64Tmp),
1007            M4ERR_STATE);
1008
1009        pEncoderContext->mNbOutputFrames++;
1010        if ( 0 > pEncoderContext->mFirstOutputCts ) {
1011            pEncoderContext->mFirstOutputCts = i64Tmp;
1012        }
1013        pEncoderContext->mLastOutputCts = i64Tmp;
1014
1015        Cts = (M4OSA_Int32)(i64Tmp/1000);
1016        LOGV("[TS_CHECK] VI/ENC WRITE frame %d @ %lld -> %d (last %d)",
1017            pEncoderContext->mNbOutputFrames, i64Tmp, Cts,
1018            pEncoderContext->mLastCTS);
1019        if ( Cts < pEncoderContext->mLastCTS ) {
1020            LOGV("VideoEncoder_processOutputBuffer WARNING : Cts is going "
1021            "backwards %d < %d", Cts, pEncoderContext->mLastCTS);
1022            goto cleanUp;
1023        }
1024        LOGV("VideoEditorVideoEncoder_processOutputBuffer : %d %d",
1025            Cts, pEncoderContext->mLastCTS);
1026
1027        // Retrieve the AU container
1028        err = pEncoderContext->mWriterDataInterface->pStartAU(
1029            pEncoderContext->mWriterDataInterface->pWriterContext,
1030            pEncoderContext->mAccessUnit->stream->streamID,
1031            pEncoderContext->mAccessUnit);
1032        VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1033
1034        // Format the AU
1035        VIDEOEDITOR_CHECK(
1036            buffer->range_length() <= pEncoderContext->mAccessUnit->size,
1037            M4ERR_PARAMETER);
1038        // Remove H264 AU start code
1039        if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) {
1040            if (!memcmp((const uint8_t *)buffer->data() + \
1041                    buffer->range_offset(), "\x00\x00\x00\x01", 4) ) {
1042                buffer->set_range(buffer->range_offset() + 4,
1043                    buffer->range_length() - 4);
1044            }
1045        }
1046
1047        if ( (M4ENCODER_kH264 == pEncoderContext->mFormat) &&
1048            (M4OSA_NULL != pEncoderContext->mH264NALUPostProcessFct) ) {
1049        // H264 trimming case, NALU post processing is needed
1050        M4OSA_Int32 outputSize = pEncoderContext->mAccessUnit->size;
1051        err = pEncoderContext->mH264NALUPostProcessFct(
1052            pEncoderContext->mH264NALUPostProcessCtx,
1053            (M4OSA_UInt8*)buffer->data()+buffer->range_offset(),
1054            buffer->range_length(),
1055            (M4OSA_UInt8*)pEncoderContext->mAccessUnit->dataAddress,
1056            &outputSize);
1057        VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1058        pEncoderContext->mAccessUnit->size = (M4OSA_UInt32)outputSize;
1059        } else {
1060            // The AU can just be copied
1061            memcpy((void *)pEncoderContext->mAccessUnit->\
1062                dataAddress, (void *)((M4OSA_MemAddr8)(buffer->data())+buffer->\
1063                range_offset()), buffer->range_length());
1064            pEncoderContext->mAccessUnit->size =
1065                (M4OSA_UInt32)buffer->range_length();
1066        }
1067
1068        if ( buffer->meta_data()->findInt32(kKeyIsSyncFrame,&i32Tmp) && i32Tmp){
1069            pEncoderContext->mAccessUnit->attribute = AU_RAP;
1070        } else {
1071            pEncoderContext->mAccessUnit->attribute = AU_P_Frame;
1072        }
1073        pEncoderContext->mLastCTS = Cts;
1074        pEncoderContext->mAccessUnit->CTS = Cts;
1075        pEncoderContext->mAccessUnit->DTS = Cts;
1076
1077        LOGV("VideoEditorVideoEncoder_processOutputBuffer: AU @ 0x%X 0x%X %d %d",
1078            pEncoderContext->mAccessUnit->dataAddress,
1079            *pEncoderContext->mAccessUnit->dataAddress,
1080            pEncoderContext->mAccessUnit->size,
1081            pEncoderContext->mAccessUnit->CTS);
1082
1083        // Write the AU
1084        err = pEncoderContext->mWriterDataInterface->pProcessAU(
1085            pEncoderContext->mWriterDataInterface->pWriterContext,
1086            pEncoderContext->mAccessUnit->stream->streamID,
1087            pEncoderContext->mAccessUnit);
1088        VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1089    }
1090
1091cleanUp:
1092    if( M4NO_ERROR == err ) {
1093        LOGV("VideoEditorVideoEncoder_processOutputBuffer no error");
1094    } else {
1095        SAFE_FREE(pEncoderContext->mHeader.pBuf);
1096        pEncoderContext->mHeader.Size = 0;
1097        LOGV("VideoEditorVideoEncoder_processOutputBuffer ERROR 0x%X", err);
1098    }
1099    LOGV("VideoEditorVideoEncoder_processOutputBuffer end");
1100    return err;
1101}
1102
1103M4OSA_ERR VideoEditorVideoEncoder_encode(M4ENCODER_Context pContext,
1104        M4VIFI_ImagePlane* pInPlane, M4OSA_Double Cts,
1105        M4ENCODER_FrameMode FrameMode) {
1106    M4OSA_ERR err = M4NO_ERROR;
1107    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
1108    status_t result = OK;
1109    MediaBuffer* outputBuffer = NULL;
1110
1111    LOGV("VideoEditorVideoEncoder_encode 0x%X %f %d", pInPlane, Cts, FrameMode);
1112    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
1113
1114    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
1115    if ( STARTED == pEncoderContext->mState ) {
1116        pEncoderContext->mState = BUFFERING;
1117    }
1118    VIDEOEDITOR_CHECK(
1119        (BUFFERING | READING) & pEncoderContext->mState, M4ERR_STATE);
1120
1121    pEncoderContext->mNbInputFrames++;
1122    if ( 0 > pEncoderContext->mFirstInputCts ) {
1123        pEncoderContext->mFirstInputCts = Cts;
1124    }
1125    pEncoderContext->mLastInputCts = Cts;
1126
1127    LOGV("VideoEditorVideoEncoder_encode 0x%X %d %f (%d)", pInPlane, FrameMode,
1128        Cts, pEncoderContext->mLastCTS);
1129
1130    // Push the input buffer to the encoder source
1131    err = VideoEditorVideoEncoder_processInputBuffer(pEncoderContext, Cts,
1132        M4OSA_FALSE);
1133    VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1134
1135    // Notify the source in case of EOS
1136    if ( M4ENCODER_kLastFrame == FrameMode ) {
1137        err = VideoEditorVideoEncoder_processInputBuffer(
1138            pEncoderContext, 0, M4OSA_TRUE);
1139        VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1140    }
1141
1142    if ( BUFFERING == pEncoderContext->mState ) {
1143        // Prefetch is complete, start reading
1144        pEncoderContext->mState = READING;
1145    }
1146    // Read
1147    while (1)  {
1148        MediaBuffer *outputBuffer =
1149                pEncoderContext->mPuller->getBufferNonBlocking();
1150
1151        if (outputBuffer == NULL) break;
1152
1153        // Provide the encoded AU to the writer
1154        err = VideoEditorVideoEncoder_processOutputBuffer(pEncoderContext,
1155            outputBuffer);
1156        VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1157
1158        pEncoderContext->mPuller->putBuffer(outputBuffer);
1159    }
1160
1161cleanUp:
1162    if( M4NO_ERROR == err ) {
1163        LOGV("VideoEditorVideoEncoder_encode no error");
1164    } else {
1165        LOGV("VideoEditorVideoEncoder_encode ERROR 0x%X", err);
1166    }
1167    LOGV("VideoEditorVideoEncoder_encode end");
1168    return err;
1169}
1170
1171M4OSA_ERR VideoEditorVideoEncoder_start(M4ENCODER_Context pContext) {
1172    M4OSA_ERR                  err             = M4NO_ERROR;
1173    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
1174    status_t                   result          = OK;
1175
1176    LOGV("VideoEditorVideoEncoder_start begin");
1177    // Input parameters check
1178    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
1179
1180    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
1181    VIDEOEDITOR_CHECK(OPENED == pEncoderContext->mState, M4ERR_STATE);
1182
1183    pEncoderContext->mNbInputFrames  = 0;
1184    pEncoderContext->mFirstInputCts  = -1.0;
1185    pEncoderContext->mLastInputCts   = -1.0;
1186    pEncoderContext->mNbOutputFrames = 0;
1187    pEncoderContext->mFirstOutputCts = -1;
1188    pEncoderContext->mLastOutputCts  = -1;
1189
1190    result = pEncoderContext->mEncoder->start();
1191    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
1192
1193    pEncoderContext->mPuller->start();
1194
1195    // Set the new state
1196    pEncoderContext->mState = STARTED;
1197
1198cleanUp:
1199    if ( M4NO_ERROR == err ) {
1200        LOGV("VideoEditorVideoEncoder_start no error");
1201    } else {
1202        LOGV("VideoEditorVideoEncoder_start ERROR 0x%X", err);
1203    }
1204    LOGV("VideoEditorVideoEncoder_start end");
1205    return err;
1206}
1207
1208M4OSA_ERR VideoEditorVideoEncoder_stop(M4ENCODER_Context pContext) {
1209    M4OSA_ERR err = M4NO_ERROR;
1210    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
1211    MediaBuffer* outputBuffer = NULL;
1212    status_t result = OK;
1213
1214    LOGV("VideoEditorVideoEncoder_stop begin");
1215    // Input parameters check
1216    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
1217    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
1218
1219    // Send EOS again to make sure the source doesn't block.
1220    err = VideoEditorVideoEncoder_processInputBuffer(pEncoderContext, 0,
1221        M4OSA_TRUE);
1222    VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1223
1224    // Process the remaining buffers if necessary
1225    if ( (BUFFERING | READING) & pEncoderContext->mState ) {
1226        while (1)  {
1227            MediaBuffer *outputBuffer =
1228                pEncoderContext->mPuller->getBufferBlocking();
1229
1230            if (outputBuffer == NULL) break;
1231
1232            err = VideoEditorVideoEncoder_processOutputBuffer(
1233                pEncoderContext, outputBuffer);
1234            VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);
1235
1236            pEncoderContext->mPuller->putBuffer(outputBuffer);
1237        }
1238
1239        pEncoderContext->mState = STARTED;
1240    }
1241
1242    // Stop the graph module if necessary
1243    if ( STARTED == pEncoderContext->mState ) {
1244        pEncoderContext->mPuller->stop();
1245        pEncoderContext->mEncoder->stop();
1246        pEncoderContext->mState = OPENED;
1247    }
1248
1249    if (pEncoderContext->mNbInputFrames != pEncoderContext->mNbOutputFrames) {
1250        LOGW("Some frames were not encoded: input(%d) != output(%d)",
1251            pEncoderContext->mNbInputFrames, pEncoderContext->mNbOutputFrames);
1252    }
1253
1254cleanUp:
1255    if ( M4NO_ERROR == err ) {
1256        LOGV("VideoEditorVideoEncoder_stop no error");
1257    } else {
1258        LOGV("VideoEditorVideoEncoder_stop ERROR 0x%X", err);
1259    }
1260    LOGV("VideoEditorVideoEncoder_stop end");
1261    return err;
1262}
1263
1264M4OSA_ERR VideoEditorVideoEncoder_regulBitRate(M4ENCODER_Context pContext) {
1265    LOGW("regulBitRate is not implemented");
1266    return M4NO_ERROR;
1267}
1268
1269M4OSA_ERR VideoEditorVideoEncoder_setOption(M4ENCODER_Context pContext,
1270        M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) {
1271    M4OSA_ERR err = M4NO_ERROR;
1272    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
1273
1274    LOGV("VideoEditorVideoEncoder_setOption start optionID 0x%X", optionID);
1275    // Input parameters check
1276    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
1277
1278    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
1279
1280    switch( optionID ) {
1281        case M4ENCODER_kOptionID_SetH264ProcessNALUfctsPtr:
1282            pEncoderContext->mH264NALUPostProcessFct =
1283                (H264MCS_ProcessEncodedNALU_fct*)optionValue;
1284            break;
1285        case M4ENCODER_kOptionID_H264ProcessNALUContext:
1286            pEncoderContext->mH264NALUPostProcessCtx =
1287                (M4OSA_Context)optionValue;
1288            break;
1289        default:
1290            LOGV("VideoEditorVideoEncoder_setOption: unsupported optionId 0x%X",
1291                optionID);
1292            VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID);
1293            break;
1294    }
1295
1296cleanUp:
1297    if ( M4NO_ERROR == err ) {
1298        LOGV("VideoEditorVideoEncoder_setOption no error");
1299    } else {
1300        LOGV("VideoEditorVideoEncoder_setOption ERROR 0x%X", err);
1301    }
1302    LOGV("VideoEditorVideoEncoder_setOption end");
1303    return err;
1304}
1305
1306M4OSA_ERR VideoEditorVideoEncoder_getOption(M4ENCODER_Context pContext,
1307        M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) {
1308    M4OSA_ERR err = M4NO_ERROR;
1309    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
1310
1311    LOGV("VideoEditorVideoEncoder_getOption begin optinId 0x%X", optionID);
1312    // Input parameters check
1313    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
1314    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
1315
1316    switch( optionID ) {
1317        case M4ENCODER_kOptionID_EncoderHeader:
1318            VIDEOEDITOR_CHECK(
1319                    M4OSA_NULL != pEncoderContext->mHeader.pBuf, M4ERR_STATE);
1320            *(M4ENCODER_Header**)optionValue = &(pEncoderContext->mHeader);
1321            break;
1322        default:
1323            LOGV("VideoEditorVideoEncoder_getOption: unsupported optionId 0x%X",
1324                optionID);
1325            VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID);
1326            break;
1327    }
1328
1329cleanUp:
1330    if ( M4NO_ERROR == err ) {
1331        LOGV("VideoEditorVideoEncoder_getOption no error");
1332    } else {
1333        LOGV("VideoEditorVideoEncoder_getOption ERROR 0x%X", err);
1334    }
1335    return err;
1336}
1337
1338M4OSA_ERR VideoEditorVideoEncoder_getInterface(M4ENCODER_Format format,
1339        M4ENCODER_Format* pFormat,
1340        M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){
1341    M4OSA_ERR err = M4NO_ERROR;
1342
1343    // Input parameters check
1344    VIDEOEDITOR_CHECK(M4OSA_NULL != pFormat,           M4ERR_PARAMETER);
1345    VIDEOEDITOR_CHECK(M4OSA_NULL != pEncoderInterface, M4ERR_PARAMETER);
1346
1347    LOGV("VideoEditorVideoEncoder_getInterface begin 0x%x 0x%x %d", pFormat,
1348        pEncoderInterface, mode);
1349
1350    SAFE_MALLOC(*pEncoderInterface, M4ENCODER_GlobalInterface, 1,
1351        "VideoEditorVideoEncoder");
1352
1353    *pFormat = format;
1354
1355    switch( format ) {
1356        case M4ENCODER_kH263:
1357            {
1358                (*pEncoderInterface)->pFctInit =
1359                    VideoEditorVideoEncoder_init_H263;
1360                break;
1361            }
1362        case M4ENCODER_kMPEG4:
1363            {
1364                (*pEncoderInterface)->pFctInit =
1365                    VideoEditorVideoEncoder_init_MPEG4;
1366                break;
1367            }
1368        case M4ENCODER_kH264:
1369            {
1370                (*pEncoderInterface)->pFctInit =
1371                    VideoEditorVideoEncoder_init_H264;
1372                break;
1373            }
1374        default:
1375            LOGV("VideoEditorVideoEncoder_getInterface : unsupported format %d",
1376                format);
1377            VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER);
1378        break;
1379    }
1380    (*pEncoderInterface)->pFctOpen         = VideoEditorVideoEncoder_open;
1381    (*pEncoderInterface)->pFctStart        = VideoEditorVideoEncoder_start;
1382    (*pEncoderInterface)->pFctStop         = VideoEditorVideoEncoder_stop;
1383    (*pEncoderInterface)->pFctPause        = M4OSA_NULL;
1384    (*pEncoderInterface)->pFctResume       = M4OSA_NULL;
1385    (*pEncoderInterface)->pFctClose        = VideoEditorVideoEncoder_close;
1386    (*pEncoderInterface)->pFctCleanup      = VideoEditorVideoEncoder_cleanup;
1387    (*pEncoderInterface)->pFctRegulBitRate =
1388        VideoEditorVideoEncoder_regulBitRate;
1389    (*pEncoderInterface)->pFctEncode       = VideoEditorVideoEncoder_encode;
1390    (*pEncoderInterface)->pFctSetOption    = VideoEditorVideoEncoder_setOption;
1391    (*pEncoderInterface)->pFctGetOption    = VideoEditorVideoEncoder_getOption;
1392
1393cleanUp:
1394    if( M4NO_ERROR == err ) {
1395        LOGV("VideoEditorVideoEncoder_getInterface no error");
1396    } else {
1397        *pEncoderInterface = M4OSA_NULL;
1398        LOGV("VideoEditorVideoEncoder_getInterface ERROR 0x%X", err);
1399    }
1400    return err;
1401}
1402
1403extern "C" {
1404
1405M4OSA_ERR VideoEditorVideoEncoder_getInterface_H263(M4ENCODER_Format* pFormat,
1406        M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){
1407    return VideoEditorVideoEncoder_getInterface(M4ENCODER_kH263, pFormat,
1408            pEncoderInterface, mode);
1409}
1410
1411M4OSA_ERR VideoEditorVideoEncoder_getInterface_MPEG4(M4ENCODER_Format* pFormat,
1412        M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){
1413    return VideoEditorVideoEncoder_getInterface(M4ENCODER_kMPEG4, pFormat,
1414           pEncoderInterface, mode);
1415}
1416
1417M4OSA_ERR VideoEditorVideoEncoder_getInterface_H264(M4ENCODER_Format* pFormat,
1418        M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){
1419    return VideoEditorVideoEncoder_getInterface(M4ENCODER_kH264, pFormat,
1420           pEncoderInterface, mode);
1421
1422}
1423
1424}  // extern "C"
1425
1426}  // namespace android
1427