SoftAVCEncoder.cpp revision f547a36c8f0fe11c4c425168037e3ce1d1a697df
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "SoftAVCEncoder"
19#include <utils/Log.h>
20
21#include "avcenc_api.h"
22#include "avcenc_int.h"
23#include "OMX_Video.h"
24
25#include <media/stagefright/foundation/ADebug.h>
26#include <media/stagefright/MediaDefs.h>
27#include <media/stagefright/MediaErrors.h>
28#include <media/stagefright/MetaData.h>
29#include <media/stagefright/Utils.h>
30
31#include "SoftAVCEncoder.h"
32
33namespace android {
34
35template<class T>
36static void InitOMXParams(T *params) {
37    params->nSize = sizeof(T);
38    params->nVersion.s.nVersionMajor = 1;
39    params->nVersion.s.nVersionMinor = 0;
40    params->nVersion.s.nRevision = 0;
41    params->nVersion.s.nStep = 0;
42}
43
44typedef struct LevelConversion {
45    OMX_U32 omxLevel;
46    AVCLevel avcLevel;
47} LevelConcersion;
48
49static LevelConversion ConversionTable[] = {
50    { OMX_VIDEO_AVCLevel1,  AVC_LEVEL1_B },
51    { OMX_VIDEO_AVCLevel1b, AVC_LEVEL1   },
52    { OMX_VIDEO_AVCLevel11, AVC_LEVEL1_1 },
53    { OMX_VIDEO_AVCLevel12, AVC_LEVEL1_2 },
54    { OMX_VIDEO_AVCLevel13, AVC_LEVEL1_3 },
55    { OMX_VIDEO_AVCLevel2,  AVC_LEVEL2 },
56#if 0
57    // encoding speed is very poor if video
58    // resolution is higher than CIF
59    { OMX_VIDEO_AVCLevel21, AVC_LEVEL2_1 },
60    { OMX_VIDEO_AVCLevel22, AVC_LEVEL2_2 },
61    { OMX_VIDEO_AVCLevel3,  AVC_LEVEL3   },
62    { OMX_VIDEO_AVCLevel31, AVC_LEVEL3_1 },
63    { OMX_VIDEO_AVCLevel32, AVC_LEVEL3_2 },
64    { OMX_VIDEO_AVCLevel4,  AVC_LEVEL4   },
65    { OMX_VIDEO_AVCLevel41, AVC_LEVEL4_1 },
66    { OMX_VIDEO_AVCLevel42, AVC_LEVEL4_2 },
67    { OMX_VIDEO_AVCLevel5,  AVC_LEVEL5   },
68    { OMX_VIDEO_AVCLevel51, AVC_LEVEL5_1 },
69#endif
70};
71
72static status_t ConvertOmxAvcLevelToAvcSpecLevel(
73        OMX_U32 omxLevel, AVCLevel *avcLevel) {
74    for (size_t i = 0, n = sizeof(ConversionTable)/sizeof(ConversionTable[0]);
75        i < n; ++i) {
76        if (omxLevel == ConversionTable[i].omxLevel) {
77            *avcLevel = ConversionTable[i].avcLevel;
78            return OK;
79        }
80    }
81
82    ALOGE("ConvertOmxAvcLevelToAvcSpecLevel: %d level not supported",
83            (int32_t)omxLevel);
84
85    return BAD_VALUE;
86}
87
88static status_t ConvertAvcSpecLevelToOmxAvcLevel(
89    AVCLevel avcLevel, OMX_U32 *omxLevel) {
90    for (size_t i = 0, n = sizeof(ConversionTable)/sizeof(ConversionTable[0]);
91        i < n; ++i) {
92        if (avcLevel == ConversionTable[i].avcLevel) {
93            *omxLevel = ConversionTable[i].omxLevel;
94            return OK;
95        }
96    }
97
98    ALOGE("ConvertAvcSpecLevelToOmxAvcLevel: %d level not supported",
99            (int32_t) avcLevel);
100
101    return BAD_VALUE;
102}
103
104inline static void ConvertYUV420SemiPlanarToYUV420Planar(
105        uint8_t *inyuv, uint8_t* outyuv,
106        int32_t width, int32_t height) {
107
108    int32_t outYsize = width * height;
109    uint32_t *outy =  (uint32_t *) outyuv;
110    uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
111    uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));
112
113    /* Y copying */
114    memcpy(outy, inyuv, outYsize);
115
116    /* U & V copying */
117    uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
118    for (int32_t i = height >> 1; i > 0; --i) {
119        for (int32_t j = width >> 2; j > 0; --j) {
120            uint32_t temp = *inyuv_4++;
121            uint32_t tempU = temp & 0xFF;
122            tempU = tempU | ((temp >> 8) & 0xFF00);
123
124            uint32_t tempV = (temp >> 8) & 0xFF;
125            tempV = tempV | ((temp >> 16) & 0xFF00);
126
127            // Flip U and V
128            *outcb++ = tempV;
129            *outcr++ = tempU;
130        }
131    }
132}
133
134static void* MallocWrapper(
135        void *userData, int32_t size, int32_t attrs) {
136    void *ptr = malloc(size);
137    if (ptr)
138        memset(ptr, 0, size);
139    return ptr;
140}
141
142static void FreeWrapper(void *userData, void* ptr) {
143    free(ptr);
144}
145
146static int32_t DpbAllocWrapper(void *userData,
147        unsigned int sizeInMbs, unsigned int numBuffers) {
148    SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
149    CHECK(encoder != NULL);
150    return encoder->allocOutputBuffers(sizeInMbs, numBuffers);
151}
152
153static int32_t BindFrameWrapper(
154        void *userData, int32_t index, uint8_t **yuv) {
155    SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
156    CHECK(encoder != NULL);
157    return encoder->bindOutputBuffer(index, yuv);
158}
159
160static void UnbindFrameWrapper(void *userData, int32_t index) {
161    SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
162    CHECK(encoder != NULL);
163    return encoder->unbindOutputBuffer(index);
164}
165
166SoftAVCEncoder::SoftAVCEncoder(
167            const char *name,
168            const OMX_CALLBACKTYPE *callbacks,
169            OMX_PTR appData,
170            OMX_COMPONENTTYPE **component)
171    : SimpleSoftOMXComponent(name, callbacks, appData, component),
172      mVideoWidth(176),
173      mVideoHeight(144),
174      mVideoFrameRate(30),
175      mVideoBitRate(192000),
176      mVideoColorFormat(OMX_COLOR_FormatYUV420Planar),
177      mIDRFrameRefreshIntervalInSec(1),
178      mAVCEncProfile(AVC_BASELINE),
179      mAVCEncLevel(AVC_LEVEL2),
180      mNumInputFrames(-1),
181      mPrevTimestampUs(-1),
182      mStarted(false),
183      mSawInputEOS(false),
184      mSignalledError(false),
185      mHandle(new tagAVCHandle),
186      mEncParams(new tagAVCEncParam),
187      mInputFrameData(NULL),
188      mSliceGroup(NULL) {
189
190    initPorts();
191    ALOGI("Construct SoftAVCEncoder");
192}
193
194SoftAVCEncoder::~SoftAVCEncoder() {
195    ALOGV("Destruct SoftAVCEncoder");
196    releaseEncoder();
197    List<BufferInfo *> &outQueue = getPortQueue(1);
198    List<BufferInfo *> &inQueue = getPortQueue(0);
199    CHECK(outQueue.empty());
200    CHECK(inQueue.empty());
201}
202
203OMX_ERRORTYPE SoftAVCEncoder::initEncParams() {
204    CHECK(mHandle != NULL);
205    memset(mHandle, 0, sizeof(tagAVCHandle));
206    mHandle->AVCObject = NULL;
207    mHandle->userData = this;
208    mHandle->CBAVC_DPBAlloc = DpbAllocWrapper;
209    mHandle->CBAVC_FrameBind = BindFrameWrapper;
210    mHandle->CBAVC_FrameUnbind = UnbindFrameWrapper;
211    mHandle->CBAVC_Malloc = MallocWrapper;
212    mHandle->CBAVC_Free = FreeWrapper;
213
214    CHECK(mEncParams != NULL);
215    memset(mEncParams, 0, sizeof(mEncParams));
216    mEncParams->rate_control = AVC_ON;
217    mEncParams->initQP = 0;
218    mEncParams->init_CBP_removal_delay = 1600;
219
220    mEncParams->intramb_refresh = 0;
221    mEncParams->auto_scd = AVC_ON;
222    mEncParams->out_of_band_param_set = AVC_ON;
223    mEncParams->poc_type = 2;
224    mEncParams->log2_max_poc_lsb_minus_4 = 12;
225    mEncParams->delta_poc_zero_flag = 0;
226    mEncParams->offset_poc_non_ref = 0;
227    mEncParams->offset_top_bottom = 0;
228    mEncParams->num_ref_in_cycle = 0;
229    mEncParams->offset_poc_ref = NULL;
230
231    mEncParams->num_ref_frame = 1;
232    mEncParams->num_slice_group = 1;
233    mEncParams->fmo_type = 0;
234
235    mEncParams->db_filter = AVC_ON;
236    mEncParams->disable_db_idc = 0;
237
238    mEncParams->alpha_offset = 0;
239    mEncParams->beta_offset = 0;
240    mEncParams->constrained_intra_pred = AVC_OFF;
241
242    mEncParams->data_par = AVC_OFF;
243    mEncParams->fullsearch = AVC_OFF;
244    mEncParams->search_range = 16;
245    mEncParams->sub_pel = AVC_OFF;
246    mEncParams->submb_pred = AVC_OFF;
247    mEncParams->rdopt_mode = AVC_OFF;
248    mEncParams->bidir_pred = AVC_OFF;
249
250    mEncParams->use_overrun_buffer = AVC_OFF;
251
252    if (mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
253        // Color conversion is needed.
254        CHECK(mInputFrameData == NULL);
255        mInputFrameData =
256            (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
257        CHECK(mInputFrameData != NULL);
258    }
259
260    // PV's AVC encoder requires the video dimension of multiple
261    if (mVideoWidth % 16 != 0 || mVideoHeight % 16 != 0) {
262        ALOGE("Video frame size %dx%d must be a multiple of 16",
263            mVideoWidth, mVideoHeight);
264        return OMX_ErrorBadParameter;
265    }
266
267    mEncParams->width = mVideoWidth;
268    mEncParams->height = mVideoHeight;
269    mEncParams->bitrate = mVideoBitRate;
270    mEncParams->frame_rate = 1000 * mVideoFrameRate;  // In frames/ms!
271    mEncParams->CPB_size = (uint32_t) (mVideoBitRate >> 1);
272
273    int32_t nMacroBlocks = ((((mVideoWidth + 15) >> 4) << 4) *
274            (((mVideoHeight + 15) >> 4) << 4)) >> 8;
275    CHECK(mSliceGroup == NULL);
276    mSliceGroup = (uint32_t *) malloc(sizeof(uint32_t) * nMacroBlocks);
277    CHECK(mSliceGroup != NULL);
278    for (int ii = 0, idx = 0; ii < nMacroBlocks; ++ii) {
279        mSliceGroup[ii] = idx++;
280        if (idx >= mEncParams->num_slice_group) {
281            idx = 0;
282        }
283    }
284    mEncParams->slice_group = mSliceGroup;
285
286    // Set IDR frame refresh interval
287    if (mIDRFrameRefreshIntervalInSec < 0) {
288        mEncParams->idr_period = -1;
289    } else if (mIDRFrameRefreshIntervalInSec == 0) {
290        mEncParams->idr_period = 1;  // All I frames
291    } else {
292        mEncParams->idr_period =
293            (mIDRFrameRefreshIntervalInSec * mVideoFrameRate);
294    }
295
296    // Set profile and level
297    mEncParams->profile = mAVCEncProfile;
298    mEncParams->level = mAVCEncLevel;
299
300    return OMX_ErrorNone;
301}
302
303OMX_ERRORTYPE SoftAVCEncoder::initEncoder() {
304    CHECK(!mStarted);
305
306    OMX_ERRORTYPE errType = OMX_ErrorNone;
307    if (OMX_ErrorNone != (errType = initEncParams())) {
308        ALOGE("Failed to initialized encoder params");
309        mSignalledError = true;
310        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
311        return errType;
312    }
313
314    AVCEnc_Status err;
315    err = PVAVCEncInitialize(mHandle, mEncParams, NULL, NULL);
316    if (err != AVCENC_SUCCESS) {
317        ALOGE("Failed to initialize the encoder: %d", err);
318        mSignalledError = true;
319        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
320        return OMX_ErrorUndefined;
321    }
322
323    mNumInputFrames = -2;  // 1st two buffers contain SPS and PPS
324    mSpsPpsHeaderReceived = false;
325    mReadyForNextFrame = true;
326    mIsIDRFrame = false;
327    mStarted = true;
328
329    return OMX_ErrorNone;
330}
331
332OMX_ERRORTYPE SoftAVCEncoder::releaseEncoder() {
333    if (!mStarted) {
334        return OMX_ErrorNone;
335    }
336
337    PVAVCCleanUpEncoder(mHandle);
338    releaseOutputBuffers();
339
340    delete mInputFrameData;
341    mInputFrameData = NULL;
342
343    delete mSliceGroup;
344    mSliceGroup = NULL;
345
346    delete mEncParams;
347    mEncParams = NULL;
348
349    delete mHandle;
350    mHandle = NULL;
351
352    mStarted = false;
353
354    return OMX_ErrorNone;
355}
356
357void SoftAVCEncoder::releaseOutputBuffers() {
358    for (size_t i = 0; i < mOutputBuffers.size(); ++i) {
359        MediaBuffer *buffer = mOutputBuffers.editItemAt(i);
360        buffer->setObserver(NULL);
361        buffer->release();
362    }
363    mOutputBuffers.clear();
364}
365
366void SoftAVCEncoder::initPorts() {
367    OMX_PARAM_PORTDEFINITIONTYPE def;
368    InitOMXParams(&def);
369
370    const size_t kInputBufferSize = (mVideoWidth * mVideoHeight * 3) >> 1;
371
372    // 31584 is PV's magic number.  Not sure why.
373    const size_t kOutputBufferSize =
374            (kInputBufferSize > 31584) ? kInputBufferSize: 31584;
375
376    def.nPortIndex = 0;
377    def.eDir = OMX_DirInput;
378    def.nBufferCountMin = kNumBuffers;
379    def.nBufferCountActual = def.nBufferCountMin;
380    def.nBufferSize = kInputBufferSize;
381    def.bEnabled = OMX_TRUE;
382    def.bPopulated = OMX_FALSE;
383    def.eDomain = OMX_PortDomainVideo;
384    def.bBuffersContiguous = OMX_FALSE;
385    def.nBufferAlignment = 1;
386
387    def.format.video.cMIMEType = const_cast<char *>("video/raw");
388    def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
389    def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
390    def.format.video.xFramerate = (mVideoFrameRate << 16);  // Q16 format
391    def.format.video.nBitrate = mVideoBitRate;
392    def.format.video.nFrameWidth = mVideoWidth;
393    def.format.video.nFrameHeight = mVideoHeight;
394    def.format.video.nStride = mVideoWidth;
395    def.format.video.nSliceHeight = mVideoHeight;
396
397    addPort(def);
398
399    def.nPortIndex = 1;
400    def.eDir = OMX_DirOutput;
401    def.nBufferCountMin = kNumBuffers;
402    def.nBufferCountActual = def.nBufferCountMin;
403    def.nBufferSize = kOutputBufferSize;
404    def.bEnabled = OMX_TRUE;
405    def.bPopulated = OMX_FALSE;
406    def.eDomain = OMX_PortDomainVideo;
407    def.bBuffersContiguous = OMX_FALSE;
408    def.nBufferAlignment = 2;
409
410    def.format.video.cMIMEType = const_cast<char *>("video/avc");
411    def.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
412    def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
413    def.format.video.xFramerate = (0 << 16);  // Q16 format
414    def.format.video.nBitrate = mVideoBitRate;
415    def.format.video.nFrameWidth = mVideoWidth;
416    def.format.video.nFrameHeight = mVideoHeight;
417    def.format.video.nStride = mVideoWidth;
418    def.format.video.nSliceHeight = mVideoHeight;
419
420    addPort(def);
421}
422
423OMX_ERRORTYPE SoftAVCEncoder::internalGetParameter(
424        OMX_INDEXTYPE index, OMX_PTR params) {
425    switch (index) {
426        case OMX_IndexParamVideoErrorCorrection:
427        {
428            return OMX_ErrorNotImplemented;
429        }
430
431        case OMX_IndexParamVideoBitrate:
432        {
433            OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
434                (OMX_VIDEO_PARAM_BITRATETYPE *) params;
435
436            if (bitRate->nPortIndex != 1) {
437                return OMX_ErrorUndefined;
438            }
439
440            bitRate->eControlRate = OMX_Video_ControlRateVariable;
441            bitRate->nTargetBitrate = mVideoBitRate;
442            return OMX_ErrorNone;
443        }
444
445        case OMX_IndexParamVideoPortFormat:
446        {
447            OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
448                (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
449
450            if (formatParams->nPortIndex > 1) {
451                return OMX_ErrorUndefined;
452            }
453
454            if (formatParams->nIndex > 1) {
455                return OMX_ErrorNoMore;
456            }
457
458            if (formatParams->nPortIndex == 0) {
459                formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
460                if (formatParams->nIndex == 0) {
461                    formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
462                } else {
463                    formatParams->eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
464                }
465            } else {
466                formatParams->eCompressionFormat = OMX_VIDEO_CodingAVC;
467                formatParams->eColorFormat = OMX_COLOR_FormatUnused;
468            }
469
470            return OMX_ErrorNone;
471        }
472
473        case OMX_IndexParamVideoAvc:
474        {
475            OMX_VIDEO_PARAM_AVCTYPE *avcParams =
476                (OMX_VIDEO_PARAM_AVCTYPE *)params;
477
478            if (avcParams->nPortIndex != 1) {
479                return OMX_ErrorUndefined;
480            }
481
482            avcParams->eProfile = OMX_VIDEO_AVCProfileBaseline;
483            OMX_U32 omxLevel = AVC_LEVEL2;
484            if (OMX_ErrorNone !=
485                ConvertAvcSpecLevelToOmxAvcLevel(mAVCEncLevel, &omxLevel)) {
486                return OMX_ErrorUndefined;
487            }
488
489            avcParams->eLevel = (OMX_VIDEO_AVCLEVELTYPE) omxLevel;
490            avcParams->nRefFrames = 1;
491            avcParams->nBFrames = 0;
492            avcParams->bUseHadamard = OMX_TRUE;
493            avcParams->nAllowedPictureTypes =
494                    (OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP);
495            avcParams->nRefIdx10ActiveMinus1 = 0;
496            avcParams->nRefIdx11ActiveMinus1 = 0;
497            avcParams->bWeightedPPrediction = OMX_FALSE;
498            avcParams->bEntropyCodingCABAC = OMX_FALSE;
499            avcParams->bconstIpred = OMX_FALSE;
500            avcParams->bDirect8x8Inference = OMX_FALSE;
501            avcParams->bDirectSpatialTemporal = OMX_FALSE;
502            avcParams->nCabacInitIdc = 0;
503            return OMX_ErrorNone;
504        }
505
506        case OMX_IndexParamVideoProfileLevelQuerySupported:
507        {
508            OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel =
509                (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)params;
510
511            if (profileLevel->nPortIndex != 1) {
512                return OMX_ErrorUndefined;
513            }
514
515            const size_t size =
516                    sizeof(ConversionTable) / sizeof(ConversionTable[0]);
517
518            if (profileLevel->nProfileIndex >= size) {
519                return OMX_ErrorNoMore;
520            }
521
522            profileLevel->eProfile = OMX_VIDEO_AVCProfileBaseline;
523            profileLevel->eLevel = ConversionTable[profileLevel->nProfileIndex].omxLevel;
524
525            return OMX_ErrorNone;
526        }
527
528        default:
529            return SimpleSoftOMXComponent::internalGetParameter(index, params);
530    }
531}
532
533OMX_ERRORTYPE SoftAVCEncoder::internalSetParameter(
534        OMX_INDEXTYPE index, const OMX_PTR params) {
535    switch (index) {
536        case OMX_IndexParamVideoErrorCorrection:
537        {
538            return OMX_ErrorNotImplemented;
539        }
540
541        case OMX_IndexParamVideoBitrate:
542        {
543            OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
544                (OMX_VIDEO_PARAM_BITRATETYPE *) params;
545
546            if (bitRate->nPortIndex != 1 ||
547                bitRate->eControlRate != OMX_Video_ControlRateVariable) {
548                return OMX_ErrorUndefined;
549            }
550
551            mVideoBitRate = bitRate->nTargetBitrate;
552            return OMX_ErrorNone;
553        }
554
555        case OMX_IndexParamPortDefinition:
556        {
557            OMX_PARAM_PORTDEFINITIONTYPE *def =
558                (OMX_PARAM_PORTDEFINITIONTYPE *)params;
559            if (def->nPortIndex > 1) {
560                return OMX_ErrorUndefined;
561            }
562
563            if (def->nPortIndex == 0) {
564                if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingUnused ||
565                    (def->format.video.eColorFormat != OMX_COLOR_FormatYUV420Planar &&
566                     def->format.video.eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar)) {
567                    return OMX_ErrorUndefined;
568                }
569            } else {
570                if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingAVC ||
571                    (def->format.video.eColorFormat != OMX_COLOR_FormatUnused)) {
572                    return OMX_ErrorUndefined;
573                }
574            }
575
576            OMX_ERRORTYPE err = SimpleSoftOMXComponent::internalSetParameter(index, params);
577            if (OMX_ErrorNone != err) {
578                return err;
579            }
580
581            if (def->nPortIndex == 0) {
582                mVideoWidth = def->format.video.nFrameWidth;
583                mVideoHeight = def->format.video.nFrameHeight;
584                mVideoFrameRate = def->format.video.xFramerate >> 16;
585                mVideoColorFormat = def->format.video.eColorFormat;
586            } else {
587                mVideoBitRate = def->format.video.nBitrate;
588            }
589
590            return OMX_ErrorNone;
591        }
592
593        case OMX_IndexParamStandardComponentRole:
594        {
595            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
596                (const OMX_PARAM_COMPONENTROLETYPE *)params;
597
598            if (strncmp((const char *)roleParams->cRole,
599                        "video_encoder.avc",
600                        OMX_MAX_STRINGNAME_SIZE - 1)) {
601                return OMX_ErrorUndefined;
602            }
603
604            return OMX_ErrorNone;
605        }
606
607        case OMX_IndexParamVideoPortFormat:
608        {
609            const OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
610                (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
611
612            if (formatParams->nPortIndex > 1) {
613                return OMX_ErrorUndefined;
614            }
615
616            if (formatParams->nIndex > 1) {
617                return OMX_ErrorNoMore;
618            }
619
620            if (formatParams->nPortIndex == 0) {
621                if (formatParams->eCompressionFormat != OMX_VIDEO_CodingUnused ||
622                    ((formatParams->nIndex == 0 &&
623                      formatParams->eColorFormat != OMX_COLOR_FormatYUV420Planar) ||
624                    (formatParams->nIndex == 1 &&
625                     formatParams->eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar))) {
626                    return OMX_ErrorUndefined;
627                }
628                mVideoColorFormat = formatParams->eColorFormat;
629            } else {
630                if (formatParams->eCompressionFormat != OMX_VIDEO_CodingAVC ||
631                    formatParams->eColorFormat != OMX_COLOR_FormatUnused) {
632                    return OMX_ErrorUndefined;
633                }
634            }
635
636            return OMX_ErrorNone;
637        }
638
639        case OMX_IndexParamVideoAvc:
640        {
641            OMX_VIDEO_PARAM_AVCTYPE *avcType =
642                (OMX_VIDEO_PARAM_AVCTYPE *)params;
643
644            if (avcType->nPortIndex != 1) {
645                return OMX_ErrorUndefined;
646            }
647
648            // PV's AVC encoder only supports baseline profile
649            if (avcType->eProfile != OMX_VIDEO_AVCProfileBaseline ||
650                avcType->nRefFrames != 1 ||
651                avcType->nBFrames != 0 ||
652                avcType->bUseHadamard != OMX_TRUE ||
653                (avcType->nAllowedPictureTypes & OMX_VIDEO_PictureTypeB) != 0 ||
654                avcType->nRefIdx10ActiveMinus1 != 0 ||
655                avcType->nRefIdx11ActiveMinus1 != 0 ||
656                avcType->bWeightedPPrediction != OMX_FALSE ||
657                avcType->bEntropyCodingCABAC != OMX_FALSE ||
658                avcType->bconstIpred != OMX_FALSE ||
659                avcType->bDirect8x8Inference != OMX_FALSE ||
660                avcType->bDirectSpatialTemporal != OMX_FALSE ||
661                avcType->nCabacInitIdc != 0) {
662                return OMX_ErrorUndefined;
663            }
664
665            if (OK != ConvertOmxAvcLevelToAvcSpecLevel(avcType->eLevel, &mAVCEncLevel)) {
666                return OMX_ErrorUndefined;
667            }
668
669            return OMX_ErrorNone;
670        }
671
672        default:
673            return SimpleSoftOMXComponent::internalSetParameter(index, params);
674    }
675}
676
677void SoftAVCEncoder::onQueueFilled(OMX_U32 portIndex) {
678    if (mSignalledError || mSawInputEOS) {
679        return;
680    }
681
682    if (!mStarted) {
683        if (OMX_ErrorNone != initEncoder()) {
684            return;
685        }
686    }
687
688    List<BufferInfo *> &inQueue = getPortQueue(0);
689    List<BufferInfo *> &outQueue = getPortQueue(1);
690
691    while (!mSawInputEOS && !inQueue.empty() && !outQueue.empty()) {
692        BufferInfo *inInfo = *inQueue.begin();
693        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
694        BufferInfo *outInfo = *outQueue.begin();
695        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
696
697        outHeader->nTimeStamp = 0;
698        outHeader->nFlags = 0;
699        outHeader->nOffset = 0;
700        outHeader->nFilledLen = 0;
701        outHeader->nOffset = 0;
702
703        uint8_t *outPtr = (uint8_t *) outHeader->pBuffer;
704        uint32_t dataLength = outHeader->nAllocLen;
705
706        if (!mSpsPpsHeaderReceived && mNumInputFrames < 0) {
707            // 4 bytes are reserved for holding the start code 0x00000001
708            // of the sequence parameter set at the beginning.
709            outPtr += 4;
710            dataLength -= 4;
711        }
712
713        int32_t type;
714        AVCEnc_Status encoderStatus = AVCENC_SUCCESS;
715
716        // Combine SPS and PPS and place them in the very first output buffer
717        // SPS and PPS are separated by start code 0x00000001
718        // Assume that we have exactly one SPS and exactly one PPS.
719        while (!mSpsPpsHeaderReceived && mNumInputFrames <= 0) {
720            encoderStatus = PVAVCEncodeNAL(mHandle, outPtr, &dataLength, &type);
721            if (encoderStatus == AVCENC_WRONG_STATE) {
722                mSpsPpsHeaderReceived = true;
723                CHECK_EQ(0, mNumInputFrames);  // 1st video frame is 0
724                outHeader->nFlags = OMX_BUFFERFLAG_CODECCONFIG;
725                outQueue.erase(outQueue.begin());
726                outInfo->mOwnedByUs = false;
727                notifyFillBufferDone(outHeader);
728                return;
729            } else {
730                switch (type) {
731                    case AVC_NALTYPE_SPS:
732                        ++mNumInputFrames;
733                        memcpy((uint8_t *)outHeader->pBuffer, "\x00\x00\x00\x01", 4);
734                        outHeader->nFilledLen = 4 + dataLength;
735                        outPtr += (dataLength + 4);  // 4 bytes for next start code
736                        dataLength = outHeader->nAllocLen - outHeader->nFilledLen;
737                        break;
738                    default:
739                        CHECK_EQ(AVC_NALTYPE_PPS, type);
740                        ++mNumInputFrames;
741                        memcpy((uint8_t *) outHeader->pBuffer + outHeader->nFilledLen,
742                                "\x00\x00\x00\x01", 4);
743                        outHeader->nFilledLen += (dataLength + 4);
744                        outPtr += (dataLength + 4);
745                        break;
746                }
747            }
748        }
749
750        // Get next input video frame
751        if (mReadyForNextFrame) {
752            // Save the input buffer info so that it can be
753            // passed to an output buffer
754            InputBufferInfo info;
755            info.mTimeUs = inHeader->nTimeStamp;
756            info.mFlags = inHeader->nFlags;
757            mInputBufferInfoVec.push(info);
758            mPrevTimestampUs = inHeader->nTimeStamp;
759
760            if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
761                mSawInputEOS = true;
762            }
763
764            if (inHeader->nFilledLen > 0) {
765                AVCFrameIO videoInput;
766                memset(&videoInput, 0, sizeof(videoInput));
767                videoInput.height = ((mVideoHeight  + 15) >> 4) << 4;
768                videoInput.pitch = ((mVideoWidth + 15) >> 4) << 4;
769                videoInput.coding_timestamp = (inHeader->nTimeStamp + 500) / 1000;  // in ms
770                const void *inData = inHeader->pBuffer + inHeader->nOffset;
771                uint8_t *inputData = (uint8_t *) inData;
772
773                if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
774                    ConvertYUV420SemiPlanarToYUV420Planar(
775                        inputData, mInputFrameData, mVideoWidth, mVideoHeight);
776                    inputData = mInputFrameData;
777                }
778                CHECK(inputData != NULL);
779                videoInput.YCbCr[0] = inputData;
780                videoInput.YCbCr[1] = videoInput.YCbCr[0] + videoInput.height * videoInput.pitch;
781                videoInput.YCbCr[2] = videoInput.YCbCr[1] +
782                    ((videoInput.height * videoInput.pitch) >> 2);
783                videoInput.disp_order = mNumInputFrames;
784
785                encoderStatus = PVAVCEncSetInput(mHandle, &videoInput);
786                if (encoderStatus == AVCENC_SUCCESS || encoderStatus == AVCENC_NEW_IDR) {
787                    mReadyForNextFrame = false;
788                    ++mNumInputFrames;
789                    if (encoderStatus == AVCENC_NEW_IDR) {
790                        mIsIDRFrame = 1;
791                    }
792                } else {
793                    if (encoderStatus < AVCENC_SUCCESS) {
794                        ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
795                        mSignalledError = true;
796                        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
797                        return;
798                    } else {
799                        ALOGV("encoderStatus = %d at line %d", encoderStatus, __LINE__);
800                        inQueue.erase(inQueue.begin());
801                        inInfo->mOwnedByUs = false;
802                        notifyEmptyBufferDone(inHeader);
803                        return;
804                    }
805                }
806            }
807        }
808
809        // Encode an input video frame
810        CHECK(encoderStatus == AVCENC_SUCCESS || encoderStatus == AVCENC_NEW_IDR);
811        dataLength = outHeader->nAllocLen;  // Reset the output buffer length
812        if (inHeader->nFilledLen > 0) {
813            encoderStatus = PVAVCEncodeNAL(mHandle, outPtr, &dataLength, &type);
814            if (encoderStatus == AVCENC_SUCCESS) {
815                CHECK(NULL == PVAVCEncGetOverrunBuffer(mHandle));
816            } else if (encoderStatus == AVCENC_PICTURE_READY) {
817                CHECK(NULL == PVAVCEncGetOverrunBuffer(mHandle));
818                if (mIsIDRFrame) {
819                    outHeader->nFlags |= OMX_BUFFERFLAG_SYNCFRAME;
820                    mIsIDRFrame = false;
821                }
822                mReadyForNextFrame = true;
823                AVCFrameIO recon;
824                if (PVAVCEncGetRecon(mHandle, &recon) == AVCENC_SUCCESS) {
825                    PVAVCEncReleaseRecon(mHandle, &recon);
826                }
827            } else {
828                dataLength = 0;
829                mReadyForNextFrame = true;
830            }
831
832            if (encoderStatus < AVCENC_SUCCESS) {
833                ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
834                mSignalledError = true;
835                notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
836                return;
837            }
838        } else {
839            dataLength = 0;
840        }
841
842        inQueue.erase(inQueue.begin());
843        inInfo->mOwnedByUs = false;
844        notifyEmptyBufferDone(inHeader);
845
846        outQueue.erase(outQueue.begin());
847        CHECK(!mInputBufferInfoVec.empty());
848        InputBufferInfo *inputBufInfo = mInputBufferInfoVec.begin();
849        mInputBufferInfoVec.erase(mInputBufferInfoVec.begin());
850        outHeader->nTimeStamp = inputBufInfo->mTimeUs;
851        outHeader->nFlags |= (inputBufInfo->mFlags | OMX_BUFFERFLAG_ENDOFFRAME);
852        if (mSawInputEOS) {
853            outHeader->nFlags |= OMX_BUFFERFLAG_EOS;
854        }
855        outHeader->nFilledLen = dataLength;
856        outInfo->mOwnedByUs = false;
857        notifyFillBufferDone(outHeader);
858    }
859}
860
861int32_t SoftAVCEncoder::allocOutputBuffers(
862        unsigned int sizeInMbs, unsigned int numBuffers) {
863    CHECK(mOutputBuffers.isEmpty());
864    size_t frameSize = (sizeInMbs << 7) * 3;
865    for (unsigned int i = 0; i <  numBuffers; ++i) {
866        MediaBuffer *buffer = new MediaBuffer(frameSize);
867        buffer->setObserver(this);
868        mOutputBuffers.push(buffer);
869    }
870
871    return 1;
872}
873
874void SoftAVCEncoder::unbindOutputBuffer(int32_t index) {
875    CHECK(index >= 0);
876}
877
878int32_t SoftAVCEncoder::bindOutputBuffer(int32_t index, uint8_t **yuv) {
879    CHECK(index >= 0);
880    CHECK(index < (int32_t) mOutputBuffers.size());
881    *yuv = (uint8_t *) mOutputBuffers[index]->data();
882
883    return 1;
884}
885
886void SoftAVCEncoder::signalBufferReturned(MediaBuffer *buffer) {
887    ALOGV("signalBufferReturned: %p", buffer);
888}
889
890}  // namespace android
891
892android::SoftOMXComponent *createSoftOMXComponent(
893        const char *name, const OMX_CALLBACKTYPE *callbacks,
894        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
895    return new android::SoftAVCEncoder(name, callbacks, appData, component);
896}
897