SoftAVCEncoder.cpp revision 23da4cf305b9bfff07954711a8a2d9ec040865af
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "SoftAVCEncoder"
19#include <utils/Log.h>
20
21#include "avcenc_api.h"
22#include "avcenc_int.h"
23#include "OMX_Video.h"
24
25#include <media/stagefright/foundation/ADebug.h>
26#include <media/stagefright/MediaDefs.h>
27#include <media/stagefright/MediaErrors.h>
28#include <media/stagefright/MetaData.h>
29#include <media/stagefright/Utils.h>
30
31#include "SoftAVCEncoder.h"
32
33namespace android {
34
35template<class T>
36static void InitOMXParams(T *params) {
37    params->nSize = sizeof(T);
38    params->nVersion.s.nVersionMajor = 1;
39    params->nVersion.s.nVersionMinor = 0;
40    params->nVersion.s.nRevision = 0;
41    params->nVersion.s.nStep = 0;
42}
43
44typedef struct LevelConversion {
45    OMX_U32 omxLevel;
46    AVCLevel avcLevel;
47} LevelConcersion;
48
49static LevelConversion ConversionTable[] = {
50    { OMX_VIDEO_AVCLevel1,  AVC_LEVEL1_B },
51    { OMX_VIDEO_AVCLevel1b, AVC_LEVEL1   },
52    { OMX_VIDEO_AVCLevel11, AVC_LEVEL1_1 },
53    { OMX_VIDEO_AVCLevel12, AVC_LEVEL1_2 },
54    { OMX_VIDEO_AVCLevel13, AVC_LEVEL1_3 },
55    { OMX_VIDEO_AVCLevel2,  AVC_LEVEL2 },
56#if 0
57    // encoding speed is very poor if video
58    // resolution is higher than CIF
59    { OMX_VIDEO_AVCLevel21, AVC_LEVEL2_1 },
60    { OMX_VIDEO_AVCLevel22, AVC_LEVEL2_2 },
61    { OMX_VIDEO_AVCLevel3,  AVC_LEVEL3   },
62    { OMX_VIDEO_AVCLevel31, AVC_LEVEL3_1 },
63    { OMX_VIDEO_AVCLevel32, AVC_LEVEL3_2 },
64    { OMX_VIDEO_AVCLevel4,  AVC_LEVEL4   },
65    { OMX_VIDEO_AVCLevel41, AVC_LEVEL4_1 },
66    { OMX_VIDEO_AVCLevel42, AVC_LEVEL4_2 },
67    { OMX_VIDEO_AVCLevel5,  AVC_LEVEL5   },
68    { OMX_VIDEO_AVCLevel51, AVC_LEVEL5_1 },
69#endif
70};
71
72static status_t ConvertOmxAvcLevelToAvcSpecLevel(
73        OMX_U32 omxLevel, AVCLevel *avcLevel) {
74    for (size_t i = 0, n = sizeof(ConversionTable)/sizeof(ConversionTable[0]);
75        i < n; ++i) {
76        if (omxLevel == ConversionTable[i].omxLevel) {
77            *avcLevel = ConversionTable[i].avcLevel;
78            return OK;
79        }
80    }
81
82    ALOGE("ConvertOmxAvcLevelToAvcSpecLevel: %d level not supported",
83            (int32_t)omxLevel);
84
85    return BAD_VALUE;
86}
87
88static status_t ConvertAvcSpecLevelToOmxAvcLevel(
89    AVCLevel avcLevel, OMX_U32 *omxLevel) {
90    for (size_t i = 0, n = sizeof(ConversionTable)/sizeof(ConversionTable[0]);
91        i < n; ++i) {
92        if (avcLevel == ConversionTable[i].avcLevel) {
93            *omxLevel = ConversionTable[i].omxLevel;
94            return OK;
95        }
96    }
97
98    ALOGE("ConvertAvcSpecLevelToOmxAvcLevel: %d level not supported",
99            (int32_t) avcLevel);
100
101    return BAD_VALUE;
102}
103
104inline static void ConvertYUV420SemiPlanarToYUV420Planar(
105        uint8_t *inyuv, uint8_t* outyuv,
106        int32_t width, int32_t height) {
107
108    int32_t outYsize = width * height;
109    uint32_t *outy =  (uint32_t *) outyuv;
110    uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
111    uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));
112
113    /* Y copying */
114    memcpy(outy, inyuv, outYsize);
115
116    /* U & V copying */
117    uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
118    for (int32_t i = height >> 1; i > 0; --i) {
119        for (int32_t j = width >> 2; j > 0; --j) {
120            uint32_t temp = *inyuv_4++;
121            uint32_t tempU = temp & 0xFF;
122            tempU = tempU | ((temp >> 8) & 0xFF00);
123
124            uint32_t tempV = (temp >> 8) & 0xFF;
125            tempV = tempV | ((temp >> 16) & 0xFF00);
126
127            // Flip U and V
128            *outcb++ = tempV;
129            *outcr++ = tempU;
130        }
131    }
132}
133
134static void* MallocWrapper(
135        void *userData, int32_t size, int32_t attrs) {
136    return malloc(size);
137}
138
139static void FreeWrapper(void *userData, void* ptr) {
140    free(ptr);
141}
142
143static int32_t DpbAllocWrapper(void *userData,
144        unsigned int sizeInMbs, unsigned int numBuffers) {
145    SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
146    CHECK(encoder != NULL);
147    return encoder->allocOutputBuffers(sizeInMbs, numBuffers);
148}
149
150static int32_t BindFrameWrapper(
151        void *userData, int32_t index, uint8_t **yuv) {
152    SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
153    CHECK(encoder != NULL);
154    return encoder->bindOutputBuffer(index, yuv);
155}
156
157static void UnbindFrameWrapper(void *userData, int32_t index) {
158    SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
159    CHECK(encoder != NULL);
160    return encoder->unbindOutputBuffer(index);
161}
162
163SoftAVCEncoder::SoftAVCEncoder(
164            const char *name,
165            const OMX_CALLBACKTYPE *callbacks,
166            OMX_PTR appData,
167            OMX_COMPONENTTYPE **component)
168    : SimpleSoftOMXComponent(name, callbacks, appData, component),
169      mVideoWidth(176),
170      mVideoHeight(144),
171      mVideoFrameRate(30),
172      mVideoBitRate(192000),
173      mVideoColorFormat(OMX_COLOR_FormatYUV420Planar),
174      mIDRFrameRefreshIntervalInSec(1),
175      mAVCEncProfile(AVC_BASELINE),
176      mAVCEncLevel(AVC_LEVEL2),
177      mNumInputFrames(-1),
178      mPrevTimestampUs(-1),
179      mStarted(false),
180      mSawInputEOS(false),
181      mSignalledError(false),
182      mHandle(new tagAVCHandle),
183      mEncParams(new tagAVCEncParam),
184      mInputFrameData(NULL),
185      mSliceGroup(NULL) {
186
187    initPorts();
188    ALOGI("Construct SoftAVCEncoder");
189}
190
191SoftAVCEncoder::~SoftAVCEncoder() {
192    ALOGV("Destruct SoftAVCEncoder");
193    releaseEncoder();
194    List<BufferInfo *> &outQueue = getPortQueue(1);
195    List<BufferInfo *> &inQueue = getPortQueue(0);
196    CHECK(outQueue.empty());
197    CHECK(inQueue.empty());
198}
199
200OMX_ERRORTYPE SoftAVCEncoder::initEncParams() {
201    CHECK(mHandle != NULL);
202    memset(mHandle, 0, sizeof(tagAVCHandle));
203    mHandle->AVCObject = NULL;
204    mHandle->userData = this;
205    mHandle->CBAVC_DPBAlloc = DpbAllocWrapper;
206    mHandle->CBAVC_FrameBind = BindFrameWrapper;
207    mHandle->CBAVC_FrameUnbind = UnbindFrameWrapper;
208    mHandle->CBAVC_Malloc = MallocWrapper;
209    mHandle->CBAVC_Free = FreeWrapper;
210
211    CHECK(mEncParams != NULL);
212    memset(mEncParams, 0, sizeof(mEncParams));
213    mEncParams->rate_control = AVC_ON;
214    mEncParams->initQP = 0;
215    mEncParams->init_CBP_removal_delay = 1600;
216
217    mEncParams->intramb_refresh = 0;
218    mEncParams->auto_scd = AVC_ON;
219    mEncParams->out_of_band_param_set = AVC_ON;
220    mEncParams->poc_type = 2;
221    mEncParams->log2_max_poc_lsb_minus_4 = 12;
222    mEncParams->delta_poc_zero_flag = 0;
223    mEncParams->offset_poc_non_ref = 0;
224    mEncParams->offset_top_bottom = 0;
225    mEncParams->num_ref_in_cycle = 0;
226    mEncParams->offset_poc_ref = NULL;
227
228    mEncParams->num_ref_frame = 1;
229    mEncParams->num_slice_group = 1;
230    mEncParams->fmo_type = 0;
231
232    mEncParams->db_filter = AVC_ON;
233    mEncParams->disable_db_idc = 0;
234
235    mEncParams->alpha_offset = 0;
236    mEncParams->beta_offset = 0;
237    mEncParams->constrained_intra_pred = AVC_OFF;
238
239    mEncParams->data_par = AVC_OFF;
240    mEncParams->fullsearch = AVC_OFF;
241    mEncParams->search_range = 16;
242    mEncParams->sub_pel = AVC_OFF;
243    mEncParams->submb_pred = AVC_OFF;
244    mEncParams->rdopt_mode = AVC_OFF;
245    mEncParams->bidir_pred = AVC_OFF;
246
247    mEncParams->use_overrun_buffer = AVC_OFF;
248
249    if (mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
250        // Color conversion is needed.
251        CHECK(mInputFrameData == NULL);
252        mInputFrameData =
253            (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
254        CHECK(mInputFrameData != NULL);
255    }
256
257    // PV's AVC encoder requires the video dimension of multiple
258    if (mVideoWidth % 16 != 0 || mVideoHeight % 16 != 0) {
259        ALOGE("Video frame size %dx%d must be a multiple of 16",
260            mVideoWidth, mVideoHeight);
261        return OMX_ErrorBadParameter;
262    }
263
264    mEncParams->width = mVideoWidth;
265    mEncParams->height = mVideoHeight;
266    mEncParams->bitrate = mVideoBitRate;
267    mEncParams->frame_rate = 1000 * mVideoFrameRate;  // In frames/ms!
268    mEncParams->CPB_size = (uint32_t) (mVideoBitRate >> 1);
269
270    int32_t nMacroBlocks = ((((mVideoWidth + 15) >> 4) << 4) *
271            (((mVideoHeight + 15) >> 4) << 4)) >> 8;
272    CHECK(mSliceGroup == NULL);
273    mSliceGroup = (uint32_t *) malloc(sizeof(uint32_t) * nMacroBlocks);
274    CHECK(mSliceGroup != NULL);
275    for (int ii = 0, idx = 0; ii < nMacroBlocks; ++ii) {
276        mSliceGroup[ii] = idx++;
277        if (idx >= mEncParams->num_slice_group) {
278            idx = 0;
279        }
280    }
281    mEncParams->slice_group = mSliceGroup;
282
283    // Set IDR frame refresh interval
284    if (mIDRFrameRefreshIntervalInSec < 0) {
285        mEncParams->idr_period = -1;
286    } else if (mIDRFrameRefreshIntervalInSec == 0) {
287        mEncParams->idr_period = 1;  // All I frames
288    } else {
289        mEncParams->idr_period =
290            (mIDRFrameRefreshIntervalInSec * mVideoFrameRate);
291    }
292
293    // Set profile and level
294    mEncParams->profile = mAVCEncProfile;
295    mEncParams->level = mAVCEncLevel;
296
297    return OMX_ErrorNone;
298}
299
300OMX_ERRORTYPE SoftAVCEncoder::initEncoder() {
301    CHECK(!mStarted);
302
303    OMX_ERRORTYPE errType = OMX_ErrorNone;
304    if (OMX_ErrorNone != (errType = initEncParams())) {
305        ALOGE("Failed to initialized encoder params");
306        mSignalledError = true;
307        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
308        return errType;
309    }
310
311    AVCEnc_Status err;
312    err = PVAVCEncInitialize(mHandle, mEncParams, NULL, NULL);
313    if (err != AVCENC_SUCCESS) {
314        ALOGE("Failed to initialize the encoder: %d", err);
315        mSignalledError = true;
316        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
317        return OMX_ErrorUndefined;
318    }
319
320    mNumInputFrames = -2;  // 1st two buffers contain SPS and PPS
321    mSpsPpsHeaderReceived = false;
322    mReadyForNextFrame = true;
323    mIsIDRFrame = false;
324    mStarted = true;
325
326    return OMX_ErrorNone;
327}
328
329OMX_ERRORTYPE SoftAVCEncoder::releaseEncoder() {
330    if (!mStarted) {
331        return OMX_ErrorNone;
332    }
333
334    PVAVCCleanUpEncoder(mHandle);
335    releaseOutputBuffers();
336
337    delete mInputFrameData;
338    mInputFrameData = NULL;
339
340    delete mSliceGroup;
341    mSliceGroup = NULL;
342
343    delete mEncParams;
344    mEncParams = NULL;
345
346    delete mHandle;
347    mHandle = NULL;
348
349    mStarted = false;
350
351    return OMX_ErrorNone;
352}
353
354void SoftAVCEncoder::releaseOutputBuffers() {
355    for (size_t i = 0; i < mOutputBuffers.size(); ++i) {
356        MediaBuffer *buffer = mOutputBuffers.editItemAt(i);
357        buffer->setObserver(NULL);
358        buffer->release();
359    }
360    mOutputBuffers.clear();
361}
362
363void SoftAVCEncoder::initPorts() {
364    OMX_PARAM_PORTDEFINITIONTYPE def;
365    InitOMXParams(&def);
366
367    const size_t kInputBufferSize = (mVideoWidth * mVideoHeight * 3) >> 1;
368
369    // 31584 is PV's magic number.  Not sure why.
370    const size_t kOutputBufferSize =
371            (kInputBufferSize > 31584) ? kInputBufferSize: 31584;
372
373    def.nPortIndex = 0;
374    def.eDir = OMX_DirInput;
375    def.nBufferCountMin = kNumBuffers;
376    def.nBufferCountActual = def.nBufferCountMin;
377    def.nBufferSize = kInputBufferSize;
378    def.bEnabled = OMX_TRUE;
379    def.bPopulated = OMX_FALSE;
380    def.eDomain = OMX_PortDomainVideo;
381    def.bBuffersContiguous = OMX_FALSE;
382    def.nBufferAlignment = 1;
383
384    def.format.video.cMIMEType = const_cast<char *>("video/raw");
385    def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
386    def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
387    def.format.video.xFramerate = (mVideoFrameRate << 16);  // Q16 format
388    def.format.video.nBitrate = mVideoBitRate;
389    def.format.video.nFrameWidth = mVideoWidth;
390    def.format.video.nFrameHeight = mVideoHeight;
391    def.format.video.nStride = mVideoWidth;
392    def.format.video.nSliceHeight = mVideoHeight;
393
394    addPort(def);
395
396    def.nPortIndex = 1;
397    def.eDir = OMX_DirOutput;
398    def.nBufferCountMin = kNumBuffers;
399    def.nBufferCountActual = def.nBufferCountMin;
400    def.nBufferSize = kOutputBufferSize;
401    def.bEnabled = OMX_TRUE;
402    def.bPopulated = OMX_FALSE;
403    def.eDomain = OMX_PortDomainVideo;
404    def.bBuffersContiguous = OMX_FALSE;
405    def.nBufferAlignment = 2;
406
407    def.format.video.cMIMEType = const_cast<char *>("video/avc");
408    def.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
409    def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
410    def.format.video.xFramerate = (0 << 16);  // Q16 format
411    def.format.video.nBitrate = mVideoBitRate;
412    def.format.video.nFrameWidth = mVideoWidth;
413    def.format.video.nFrameHeight = mVideoHeight;
414    def.format.video.nStride = mVideoWidth;
415    def.format.video.nSliceHeight = mVideoHeight;
416
417    addPort(def);
418}
419
420OMX_ERRORTYPE SoftAVCEncoder::internalGetParameter(
421        OMX_INDEXTYPE index, OMX_PTR params) {
422    switch (index) {
423        case OMX_IndexParamVideoErrorCorrection:
424        {
425            return OMX_ErrorNotImplemented;
426        }
427
428        case OMX_IndexParamVideoBitrate:
429        {
430            OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
431                (OMX_VIDEO_PARAM_BITRATETYPE *) params;
432
433            if (bitRate->nPortIndex != 1) {
434                return OMX_ErrorUndefined;
435            }
436
437            bitRate->eControlRate = OMX_Video_ControlRateVariable;
438            bitRate->nTargetBitrate = mVideoBitRate;
439            return OMX_ErrorNone;
440        }
441
442        case OMX_IndexParamVideoPortFormat:
443        {
444            OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
445                (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
446
447            if (formatParams->nPortIndex > 1) {
448                return OMX_ErrorUndefined;
449            }
450
451            if (formatParams->nIndex > 1) {
452                return OMX_ErrorNoMore;
453            }
454
455            if (formatParams->nPortIndex == 0) {
456                formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
457                if (formatParams->nIndex == 0) {
458                    formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
459                } else {
460                    formatParams->eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
461                }
462            } else {
463                formatParams->eCompressionFormat = OMX_VIDEO_CodingAVC;
464                formatParams->eColorFormat = OMX_COLOR_FormatUnused;
465            }
466
467            return OMX_ErrorNone;
468        }
469
470        case OMX_IndexParamVideoAvc:
471        {
472            OMX_VIDEO_PARAM_AVCTYPE *avcParams =
473                (OMX_VIDEO_PARAM_AVCTYPE *)params;
474
475            if (avcParams->nPortIndex != 1) {
476                return OMX_ErrorUndefined;
477            }
478
479            avcParams->eProfile = OMX_VIDEO_AVCProfileBaseline;
480            OMX_U32 omxLevel = AVC_LEVEL2;
481            if (OMX_ErrorNone !=
482                ConvertAvcSpecLevelToOmxAvcLevel(mAVCEncLevel, &omxLevel)) {
483                return OMX_ErrorUndefined;
484            }
485
486            avcParams->eLevel = (OMX_VIDEO_AVCLEVELTYPE) omxLevel;
487            avcParams->nRefFrames = 1;
488            avcParams->nBFrames = 0;
489            avcParams->bUseHadamard = OMX_TRUE;
490            avcParams->nAllowedPictureTypes =
491                    (OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP);
492            avcParams->nRefIdx10ActiveMinus1 = 0;
493            avcParams->nRefIdx11ActiveMinus1 = 0;
494            avcParams->bWeightedPPrediction = OMX_FALSE;
495            avcParams->bEntropyCodingCABAC = OMX_FALSE;
496            avcParams->bconstIpred = OMX_FALSE;
497            avcParams->bDirect8x8Inference = OMX_FALSE;
498            avcParams->bDirectSpatialTemporal = OMX_FALSE;
499            avcParams->nCabacInitIdc = 0;
500            return OMX_ErrorNone;
501        }
502
503        case OMX_IndexParamVideoProfileLevelQuerySupported:
504        {
505            OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel =
506                (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)params;
507
508            if (profileLevel->nPortIndex != 1) {
509                return OMX_ErrorUndefined;
510            }
511
512            const size_t size =
513                    sizeof(ConversionTable) / sizeof(ConversionTable[0]);
514
515            if (profileLevel->nProfileIndex >= size) {
516                return OMX_ErrorNoMore;
517            }
518
519            profileLevel->eProfile = OMX_VIDEO_AVCProfileBaseline;
520            profileLevel->eLevel = ConversionTable[profileLevel->nProfileIndex].omxLevel;
521
522            return OMX_ErrorNone;
523        }
524
525        default:
526            return SimpleSoftOMXComponent::internalGetParameter(index, params);
527    }
528}
529
530OMX_ERRORTYPE SoftAVCEncoder::internalSetParameter(
531        OMX_INDEXTYPE index, const OMX_PTR params) {
532    switch (index) {
533        case OMX_IndexParamVideoErrorCorrection:
534        {
535            return OMX_ErrorNotImplemented;
536        }
537
538        case OMX_IndexParamVideoBitrate:
539        {
540            OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
541                (OMX_VIDEO_PARAM_BITRATETYPE *) params;
542
543            if (bitRate->nPortIndex != 1 ||
544                bitRate->eControlRate != OMX_Video_ControlRateVariable) {
545                return OMX_ErrorUndefined;
546            }
547
548            mVideoBitRate = bitRate->nTargetBitrate;
549            return OMX_ErrorNone;
550        }
551
552        case OMX_IndexParamPortDefinition:
553        {
554            OMX_PARAM_PORTDEFINITIONTYPE *def =
555                (OMX_PARAM_PORTDEFINITIONTYPE *)params;
556            if (def->nPortIndex > 1) {
557                return OMX_ErrorUndefined;
558            }
559
560            if (def->nPortIndex == 0) {
561                if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingUnused ||
562                    (def->format.video.eColorFormat != OMX_COLOR_FormatYUV420Planar &&
563                     def->format.video.eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar)) {
564                    return OMX_ErrorUndefined;
565                }
566            } else {
567                if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingAVC ||
568                    (def->format.video.eColorFormat != OMX_COLOR_FormatUnused)) {
569                    return OMX_ErrorUndefined;
570                }
571            }
572
573            OMX_ERRORTYPE err = SimpleSoftOMXComponent::internalSetParameter(index, params);
574            if (OMX_ErrorNone != err) {
575                return err;
576            }
577
578            if (def->nPortIndex == 0) {
579                mVideoWidth = def->format.video.nFrameWidth;
580                mVideoHeight = def->format.video.nFrameHeight;
581                mVideoFrameRate = def->format.video.xFramerate >> 16;
582                mVideoColorFormat = def->format.video.eColorFormat;
583            } else {
584                mVideoBitRate = def->format.video.nBitrate;
585            }
586
587            return OMX_ErrorNone;
588        }
589
590        case OMX_IndexParamStandardComponentRole:
591        {
592            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
593                (const OMX_PARAM_COMPONENTROLETYPE *)params;
594
595            if (strncmp((const char *)roleParams->cRole,
596                        "video_encoder.avc",
597                        OMX_MAX_STRINGNAME_SIZE - 1)) {
598                return OMX_ErrorUndefined;
599            }
600
601            return OMX_ErrorNone;
602        }
603
604        case OMX_IndexParamVideoPortFormat:
605        {
606            const OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
607                (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
608
609            if (formatParams->nPortIndex > 1) {
610                return OMX_ErrorUndefined;
611            }
612
613            if (formatParams->nIndex > 1) {
614                return OMX_ErrorNoMore;
615            }
616
617            if (formatParams->nPortIndex == 0) {
618                if (formatParams->eCompressionFormat != OMX_VIDEO_CodingUnused ||
619                    ((formatParams->nIndex == 0 &&
620                      formatParams->eColorFormat != OMX_COLOR_FormatYUV420Planar) ||
621                    (formatParams->nIndex == 1 &&
622                     formatParams->eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar))) {
623                    return OMX_ErrorUndefined;
624                }
625                mVideoColorFormat = formatParams->eColorFormat;
626            } else {
627                if (formatParams->eCompressionFormat != OMX_VIDEO_CodingAVC ||
628                    formatParams->eColorFormat != OMX_COLOR_FormatUnused) {
629                    return OMX_ErrorUndefined;
630                }
631            }
632
633            return OMX_ErrorNone;
634        }
635
636        case OMX_IndexParamVideoAvc:
637        {
638            OMX_VIDEO_PARAM_AVCTYPE *avcType =
639                (OMX_VIDEO_PARAM_AVCTYPE *)params;
640
641            if (avcType->nPortIndex != 1) {
642                return OMX_ErrorUndefined;
643            }
644
645            // PV's AVC encoder only supports baseline profile
646            if (avcType->eProfile != OMX_VIDEO_AVCProfileBaseline ||
647                avcType->nRefFrames != 1 ||
648                avcType->nBFrames != 0 ||
649                avcType->bUseHadamard != OMX_TRUE ||
650                (avcType->nAllowedPictureTypes & OMX_VIDEO_PictureTypeB) != 0 ||
651                avcType->nRefIdx10ActiveMinus1 != 0 ||
652                avcType->nRefIdx11ActiveMinus1 != 0 ||
653                avcType->bWeightedPPrediction != OMX_FALSE ||
654                avcType->bEntropyCodingCABAC != OMX_FALSE ||
655                avcType->bconstIpred != OMX_FALSE ||
656                avcType->bDirect8x8Inference != OMX_FALSE ||
657                avcType->bDirectSpatialTemporal != OMX_FALSE ||
658                avcType->nCabacInitIdc != 0) {
659                return OMX_ErrorUndefined;
660            }
661
662            if (OK != ConvertOmxAvcLevelToAvcSpecLevel(avcType->eLevel, &mAVCEncLevel)) {
663                return OMX_ErrorUndefined;
664            }
665
666            return OMX_ErrorNone;
667        }
668
669        default:
670            return SimpleSoftOMXComponent::internalSetParameter(index, params);
671    }
672}
673
674void SoftAVCEncoder::onQueueFilled(OMX_U32 portIndex) {
675    if (mSignalledError || mSawInputEOS) {
676        return;
677    }
678
679    if (!mStarted) {
680        if (OMX_ErrorNone != initEncoder()) {
681            return;
682        }
683    }
684
685    List<BufferInfo *> &inQueue = getPortQueue(0);
686    List<BufferInfo *> &outQueue = getPortQueue(1);
687
688    while (!mSawInputEOS && !inQueue.empty() && !outQueue.empty()) {
689        BufferInfo *inInfo = *inQueue.begin();
690        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
691        BufferInfo *outInfo = *outQueue.begin();
692        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
693
694        outHeader->nTimeStamp = 0;
695        outHeader->nFlags = 0;
696        outHeader->nOffset = 0;
697        outHeader->nFilledLen = 0;
698        outHeader->nOffset = 0;
699
700        uint8_t *outPtr = (uint8_t *) outHeader->pBuffer;
701        uint32_t dataLength = outHeader->nAllocLen;
702
703        if (!mSpsPpsHeaderReceived && mNumInputFrames < 0) {
704            // 4 bytes are reserved for holding the start code 0x00000001
705            // of the sequence parameter set at the beginning.
706            outPtr += 4;
707            dataLength -= 4;
708        }
709
710        int32_t type;
711        AVCEnc_Status encoderStatus = AVCENC_SUCCESS;
712
713        // Combine SPS and PPS and place them in the very first output buffer
714        // SPS and PPS are separated by start code 0x00000001
715        // Assume that we have exactly one SPS and exactly one PPS.
716        while (!mSpsPpsHeaderReceived && mNumInputFrames <= 0) {
717            encoderStatus = PVAVCEncodeNAL(mHandle, outPtr, &dataLength, &type);
718            if (encoderStatus == AVCENC_WRONG_STATE) {
719                mSpsPpsHeaderReceived = true;
720                CHECK_EQ(0, mNumInputFrames);  // 1st video frame is 0
721                outHeader->nFlags = OMX_BUFFERFLAG_CODECCONFIG;
722                outQueue.erase(outQueue.begin());
723                outInfo->mOwnedByUs = false;
724                notifyFillBufferDone(outHeader);
725                return;
726            } else {
727                switch (type) {
728                    case AVC_NALTYPE_SPS:
729                        ++mNumInputFrames;
730                        memcpy((uint8_t *)outHeader->pBuffer, "\x00\x00\x00\x01", 4);
731                        outHeader->nFilledLen = 4 + dataLength;
732                        outPtr += (dataLength + 4);  // 4 bytes for next start code
733                        dataLength = outHeader->nAllocLen - outHeader->nFilledLen;
734                        break;
735                    default:
736                        CHECK_EQ(AVC_NALTYPE_PPS, type);
737                        ++mNumInputFrames;
738                        memcpy((uint8_t *) outHeader->pBuffer + outHeader->nFilledLen,
739                                "\x00\x00\x00\x01", 4);
740                        outHeader->nFilledLen += (dataLength + 4);
741                        outPtr += (dataLength + 4);
742                        break;
743                }
744            }
745        }
746
747        // Get next input video frame
748        if (mReadyForNextFrame) {
749            // Save the input buffer info so that it can be
750            // passed to an output buffer
751            InputBufferInfo info;
752            info.mTimeUs = inHeader->nTimeStamp;
753            info.mFlags = inHeader->nFlags;
754            mInputBufferInfoVec.push(info);
755            mPrevTimestampUs = inHeader->nTimeStamp;
756
757            if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
758                mSawInputEOS = true;
759            }
760
761            if (inHeader->nFilledLen > 0) {
762                AVCFrameIO videoInput;
763                memset(&videoInput, 0, sizeof(videoInput));
764                videoInput.height = ((mVideoHeight  + 15) >> 4) << 4;
765                videoInput.pitch = ((mVideoWidth + 15) >> 4) << 4;
766                videoInput.coding_timestamp = (inHeader->nTimeStamp + 500) / 1000;  // in ms
767                const void *inData = inHeader->pBuffer + inHeader->nOffset;
768                uint8_t *inputData = (uint8_t *) inData;
769
770                if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
771                    ConvertYUV420SemiPlanarToYUV420Planar(
772                        inputData, mInputFrameData, mVideoWidth, mVideoHeight);
773                    inputData = mInputFrameData;
774                }
775                CHECK(inputData != NULL);
776                videoInput.YCbCr[0] = inputData;
777                videoInput.YCbCr[1] = videoInput.YCbCr[0] + videoInput.height * videoInput.pitch;
778                videoInput.YCbCr[2] = videoInput.YCbCr[1] +
779                    ((videoInput.height * videoInput.pitch) >> 2);
780                videoInput.disp_order = mNumInputFrames;
781
782                encoderStatus = PVAVCEncSetInput(mHandle, &videoInput);
783                if (encoderStatus == AVCENC_SUCCESS || encoderStatus == AVCENC_NEW_IDR) {
784                    mReadyForNextFrame = false;
785                    ++mNumInputFrames;
786                    if (encoderStatus == AVCENC_NEW_IDR) {
787                        mIsIDRFrame = 1;
788                    }
789                } else {
790                    if (encoderStatus < AVCENC_SUCCESS) {
791                        ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
792                        mSignalledError = true;
793                        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
794                        return;
795                    } else {
796                        ALOGV("encoderStatus = %d at line %d", encoderStatus, __LINE__);
797                        inQueue.erase(inQueue.begin());
798                        inInfo->mOwnedByUs = false;
799                        notifyEmptyBufferDone(inHeader);
800                        return;
801                    }
802                }
803            }
804        }
805
806        // Encode an input video frame
807        CHECK(encoderStatus == AVCENC_SUCCESS || encoderStatus == AVCENC_NEW_IDR);
808        dataLength = outHeader->nAllocLen;  // Reset the output buffer length
809        if (inHeader->nFilledLen > 0) {
810            encoderStatus = PVAVCEncodeNAL(mHandle, outPtr, &dataLength, &type);
811            if (encoderStatus == AVCENC_SUCCESS) {
812                CHECK(NULL == PVAVCEncGetOverrunBuffer(mHandle));
813            } else if (encoderStatus == AVCENC_PICTURE_READY) {
814                CHECK(NULL == PVAVCEncGetOverrunBuffer(mHandle));
815                if (mIsIDRFrame) {
816                    outHeader->nFlags |= OMX_BUFFERFLAG_SYNCFRAME;
817                    mIsIDRFrame = false;
818                }
819                mReadyForNextFrame = true;
820                AVCFrameIO recon;
821                if (PVAVCEncGetRecon(mHandle, &recon) == AVCENC_SUCCESS) {
822                    PVAVCEncReleaseRecon(mHandle, &recon);
823                }
824            } else {
825                dataLength = 0;
826                mReadyForNextFrame = true;
827            }
828
829            if (encoderStatus < AVCENC_SUCCESS) {
830                ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
831                mSignalledError = true;
832                notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
833                return;
834            }
835        } else {
836            dataLength = 0;
837        }
838
839        inQueue.erase(inQueue.begin());
840        inInfo->mOwnedByUs = false;
841        notifyEmptyBufferDone(inHeader);
842
843        outQueue.erase(outQueue.begin());
844        CHECK(!mInputBufferInfoVec.empty());
845        InputBufferInfo *inputBufInfo = mInputBufferInfoVec.begin();
846        mInputBufferInfoVec.erase(mInputBufferInfoVec.begin());
847        outHeader->nTimeStamp = inputBufInfo->mTimeUs;
848        outHeader->nFlags |= (inputBufInfo->mFlags | OMX_BUFFERFLAG_ENDOFFRAME);
849        outHeader->nFilledLen = dataLength;
850        outInfo->mOwnedByUs = false;
851        notifyFillBufferDone(outHeader);
852    }
853}
854
855int32_t SoftAVCEncoder::allocOutputBuffers(
856        unsigned int sizeInMbs, unsigned int numBuffers) {
857    CHECK(mOutputBuffers.isEmpty());
858    size_t frameSize = (sizeInMbs << 7) * 3;
859    for (unsigned int i = 0; i <  numBuffers; ++i) {
860        MediaBuffer *buffer = new MediaBuffer(frameSize);
861        buffer->setObserver(this);
862        mOutputBuffers.push(buffer);
863    }
864
865    return 1;
866}
867
868void SoftAVCEncoder::unbindOutputBuffer(int32_t index) {
869    CHECK(index >= 0);
870}
871
872int32_t SoftAVCEncoder::bindOutputBuffer(int32_t index, uint8_t **yuv) {
873    CHECK(index >= 0);
874    CHECK(index < (int32_t) mOutputBuffers.size());
875    *yuv = (uint8_t *) mOutputBuffers[index]->data();
876
877    return 1;
878}
879
880void SoftAVCEncoder::signalBufferReturned(MediaBuffer *buffer) {
881    ALOGV("signalBufferReturned: %p", buffer);
882}
883
884}  // namespace android
885
886android::SoftOMXComponent *createSoftOMXComponent(
887        const char *name, const OMX_CALLBACKTYPE *callbacks,
888        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
889    return new android::SoftAVCEncoder(name, callbacks, appData, component);
890}
891