SoftAVCEncoder.cpp revision 423766ca07beb7e3e9cd301385708ca13fcce3e1
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "SoftAVCEncoder"
19#include <utils/Log.h>
20
21#include "avcenc_api.h"
22#include "avcenc_int.h"
23#include "OMX_Video.h"
24
25#include <HardwareAPI.h>
26#include <MetadataBufferType.h>
27#include <media/stagefright/foundation/ADebug.h>
28#include <media/stagefright/MediaDefs.h>
29#include <media/stagefright/MediaErrors.h>
30#include <media/stagefright/MetaData.h>
31#include <media/stagefright/Utils.h>
32#include <ui/Rect.h>
33#include <ui/GraphicBufferMapper.h>
34
35#include "SoftAVCEncoder.h"
36
37#if LOG_NDEBUG
38#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
39#else
40#define UNUSED_UNLESS_VERBOSE(x)
41#endif
42
43namespace android {
44
45template<class T>
46static void InitOMXParams(T *params) {
47    params->nSize = sizeof(T);
48    params->nVersion.s.nVersionMajor = 1;
49    params->nVersion.s.nVersionMinor = 0;
50    params->nVersion.s.nRevision = 0;
51    params->nVersion.s.nStep = 0;
52}
53
54typedef struct LevelConversion {
55    OMX_U32 omxLevel;
56    AVCLevel avcLevel;
57} LevelConcersion;
58
59static LevelConversion ConversionTable[] = {
60    { OMX_VIDEO_AVCLevel1,  AVC_LEVEL1_B },
61    { OMX_VIDEO_AVCLevel1b, AVC_LEVEL1   },
62    { OMX_VIDEO_AVCLevel11, AVC_LEVEL1_1 },
63    { OMX_VIDEO_AVCLevel12, AVC_LEVEL1_2 },
64    { OMX_VIDEO_AVCLevel13, AVC_LEVEL1_3 },
65    { OMX_VIDEO_AVCLevel2,  AVC_LEVEL2 },
66#if 0
67    // encoding speed is very poor if video
68    // resolution is higher than CIF
69    { OMX_VIDEO_AVCLevel21, AVC_LEVEL2_1 },
70    { OMX_VIDEO_AVCLevel22, AVC_LEVEL2_2 },
71    { OMX_VIDEO_AVCLevel3,  AVC_LEVEL3   },
72    { OMX_VIDEO_AVCLevel31, AVC_LEVEL3_1 },
73    { OMX_VIDEO_AVCLevel32, AVC_LEVEL3_2 },
74    { OMX_VIDEO_AVCLevel4,  AVC_LEVEL4   },
75    { OMX_VIDEO_AVCLevel41, AVC_LEVEL4_1 },
76    { OMX_VIDEO_AVCLevel42, AVC_LEVEL4_2 },
77    { OMX_VIDEO_AVCLevel5,  AVC_LEVEL5   },
78    { OMX_VIDEO_AVCLevel51, AVC_LEVEL5_1 },
79#endif
80};
81
82static status_t ConvertOmxAvcLevelToAvcSpecLevel(
83        OMX_U32 omxLevel, AVCLevel *avcLevel) {
84    for (size_t i = 0, n = sizeof(ConversionTable)/sizeof(ConversionTable[0]);
85        i < n; ++i) {
86        if (omxLevel == ConversionTable[i].omxLevel) {
87            *avcLevel = ConversionTable[i].avcLevel;
88            return OK;
89        }
90    }
91
92    ALOGE("ConvertOmxAvcLevelToAvcSpecLevel: %d level not supported",
93            (int32_t)omxLevel);
94
95    return BAD_VALUE;
96}
97
98static status_t ConvertAvcSpecLevelToOmxAvcLevel(
99    AVCLevel avcLevel, OMX_U32 *omxLevel) {
100    for (size_t i = 0, n = sizeof(ConversionTable)/sizeof(ConversionTable[0]);
101        i < n; ++i) {
102        if (avcLevel == ConversionTable[i].avcLevel) {
103            *omxLevel = ConversionTable[i].omxLevel;
104            return OK;
105        }
106    }
107
108    ALOGE("ConvertAvcSpecLevelToOmxAvcLevel: %d level not supported",
109            (int32_t) avcLevel);
110
111    return BAD_VALUE;
112}
113
114static void* MallocWrapper(
115        void * /* userData */, int32_t size, int32_t /* attrs */) {
116    void *ptr = malloc(size);
117    if (ptr)
118        memset(ptr, 0, size);
119    return ptr;
120}
121
122static void FreeWrapper(void * /* userData */, void* ptr) {
123    free(ptr);
124}
125
126static int32_t DpbAllocWrapper(void *userData,
127        unsigned int sizeInMbs, unsigned int numBuffers) {
128    SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
129    CHECK(encoder != NULL);
130    return encoder->allocOutputBuffers(sizeInMbs, numBuffers);
131}
132
133static int32_t BindFrameWrapper(
134        void *userData, int32_t index, uint8_t **yuv) {
135    SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
136    CHECK(encoder != NULL);
137    return encoder->bindOutputBuffer(index, yuv);
138}
139
140static void UnbindFrameWrapper(void *userData, int32_t index) {
141    SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
142    CHECK(encoder != NULL);
143    return encoder->unbindOutputBuffer(index);
144}
145
146SoftAVCEncoder::SoftAVCEncoder(
147            const char *name,
148            const OMX_CALLBACKTYPE *callbacks,
149            OMX_PTR appData,
150            OMX_COMPONENTTYPE **component)
151    : SoftVideoEncoderOMXComponent(name, callbacks, appData, component),
152      mVideoWidth(176),
153      mVideoHeight(144),
154      mVideoFrameRate(30),
155      mVideoBitRate(192000),
156      mVideoColorFormat(OMX_COLOR_FormatYUV420Planar),
157      mStoreMetaDataInBuffers(false),
158      mIDRFrameRefreshIntervalInSec(1),
159      mAVCEncProfile(AVC_BASELINE),
160      mAVCEncLevel(AVC_LEVEL2),
161      mNumInputFrames(-1),
162      mPrevTimestampUs(-1),
163      mStarted(false),
164      mSawInputEOS(false),
165      mSignalledError(false),
166      mHandle(new tagAVCHandle),
167      mEncParams(new tagAVCEncParam),
168      mInputFrameData(NULL),
169      mSliceGroup(NULL) {
170
171    initPorts();
172    ALOGI("Construct SoftAVCEncoder");
173}
174
175SoftAVCEncoder::~SoftAVCEncoder() {
176    ALOGV("Destruct SoftAVCEncoder");
177    releaseEncoder();
178    List<BufferInfo *> &outQueue = getPortQueue(1);
179    List<BufferInfo *> &inQueue = getPortQueue(0);
180    CHECK(outQueue.empty());
181    CHECK(inQueue.empty());
182}
183
184OMX_ERRORTYPE SoftAVCEncoder::initEncParams() {
185    CHECK(mHandle != NULL);
186    memset(mHandle, 0, sizeof(tagAVCHandle));
187    mHandle->AVCObject = NULL;
188    mHandle->userData = this;
189    mHandle->CBAVC_DPBAlloc = DpbAllocWrapper;
190    mHandle->CBAVC_FrameBind = BindFrameWrapper;
191    mHandle->CBAVC_FrameUnbind = UnbindFrameWrapper;
192    mHandle->CBAVC_Malloc = MallocWrapper;
193    mHandle->CBAVC_Free = FreeWrapper;
194
195    CHECK(mEncParams != NULL);
196    memset(mEncParams, 0, sizeof(*mEncParams));
197    mEncParams->rate_control = AVC_ON;
198    mEncParams->initQP = 0;
199    mEncParams->init_CBP_removal_delay = 1600;
200
201    mEncParams->intramb_refresh = 0;
202    mEncParams->auto_scd = AVC_ON;
203    mEncParams->out_of_band_param_set = AVC_ON;
204    mEncParams->poc_type = 2;
205    mEncParams->log2_max_poc_lsb_minus_4 = 12;
206    mEncParams->delta_poc_zero_flag = 0;
207    mEncParams->offset_poc_non_ref = 0;
208    mEncParams->offset_top_bottom = 0;
209    mEncParams->num_ref_in_cycle = 0;
210    mEncParams->offset_poc_ref = NULL;
211
212    mEncParams->num_ref_frame = 1;
213    mEncParams->num_slice_group = 1;
214    mEncParams->fmo_type = 0;
215
216    mEncParams->db_filter = AVC_ON;
217    mEncParams->disable_db_idc = 0;
218
219    mEncParams->alpha_offset = 0;
220    mEncParams->beta_offset = 0;
221    mEncParams->constrained_intra_pred = AVC_OFF;
222
223    mEncParams->data_par = AVC_OFF;
224    mEncParams->fullsearch = AVC_OFF;
225    mEncParams->search_range = 16;
226    mEncParams->sub_pel = AVC_OFF;
227    mEncParams->submb_pred = AVC_OFF;
228    mEncParams->rdopt_mode = AVC_OFF;
229    mEncParams->bidir_pred = AVC_OFF;
230
231    mEncParams->use_overrun_buffer = AVC_OFF;
232
233    if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar
234            || mStoreMetaDataInBuffers) {
235        // Color conversion is needed.
236        free(mInputFrameData);
237        if (((uint64_t)mVideoWidth * mVideoHeight) > ((uint64_t)INT32_MAX / 3)) {
238            ALOGE("Buffer size is too big.");
239            return OMX_ErrorUndefined;
240        }
241        mInputFrameData =
242            (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
243        CHECK(mInputFrameData != NULL);
244    }
245
246    // PV's AVC encoder requires the video dimension of multiple
247    if (mVideoWidth % 16 != 0 || mVideoHeight % 16 != 0) {
248        ALOGE("Video frame size %dx%d must be a multiple of 16",
249            mVideoWidth, mVideoHeight);
250        return OMX_ErrorBadParameter;
251    }
252
253    mEncParams->width = mVideoWidth;
254    mEncParams->height = mVideoHeight;
255    mEncParams->bitrate = mVideoBitRate;
256    mEncParams->frame_rate = 1000 * mVideoFrameRate;  // In frames/ms!
257    mEncParams->CPB_size = (uint32_t) (mVideoBitRate >> 1);
258
259    int32_t nMacroBlocks = ((((mVideoWidth + 15) >> 4) << 4) *
260            (((mVideoHeight + 15) >> 4) << 4)) >> 8;
261    CHECK(mSliceGroup == NULL);
262    if ((size_t)nMacroBlocks > SIZE_MAX / sizeof(uint32_t)) {
263        ALOGE("requested memory size is too big.");
264        return OMX_ErrorUndefined;
265    }
266    mSliceGroup = (uint32_t *) malloc(sizeof(uint32_t) * nMacroBlocks);
267    CHECK(mSliceGroup != NULL);
268    for (int ii = 0, idx = 0; ii < nMacroBlocks; ++ii) {
269        mSliceGroup[ii] = idx++;
270        if (idx >= mEncParams->num_slice_group) {
271            idx = 0;
272        }
273    }
274    mEncParams->slice_group = mSliceGroup;
275
276    // Set IDR frame refresh interval
277    if (mIDRFrameRefreshIntervalInSec < 0) {
278        mEncParams->idr_period = -1;
279    } else if (mIDRFrameRefreshIntervalInSec == 0) {
280        mEncParams->idr_period = 1;  // All I frames
281    } else {
282        mEncParams->idr_period =
283            (mIDRFrameRefreshIntervalInSec * mVideoFrameRate);
284    }
285
286    // Set profile and level
287    mEncParams->profile = mAVCEncProfile;
288    mEncParams->level = mAVCEncLevel;
289
290    return OMX_ErrorNone;
291}
292
293OMX_ERRORTYPE SoftAVCEncoder::initEncoder() {
294    CHECK(!mStarted);
295
296    OMX_ERRORTYPE errType = OMX_ErrorNone;
297    if (OMX_ErrorNone != (errType = initEncParams())) {
298        ALOGE("Failed to initialized encoder params");
299        mSignalledError = true;
300        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
301        return errType;
302    }
303
304    AVCEnc_Status err;
305    err = PVAVCEncInitialize(mHandle, mEncParams, NULL, NULL);
306    if (err != AVCENC_SUCCESS) {
307        ALOGE("Failed to initialize the encoder: %d", err);
308        mSignalledError = true;
309        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
310        return OMX_ErrorUndefined;
311    }
312
313    mNumInputFrames = -2;  // 1st two buffers contain SPS and PPS
314    mSpsPpsHeaderReceived = false;
315    mReadyForNextFrame = true;
316    mIsIDRFrame = false;
317    mStarted = true;
318
319    return OMX_ErrorNone;
320}
321
322OMX_ERRORTYPE SoftAVCEncoder::releaseEncoder() {
323    if (!mStarted) {
324        return OMX_ErrorNone;
325    }
326
327    PVAVCCleanUpEncoder(mHandle);
328    releaseOutputBuffers();
329
330    free(mInputFrameData);
331    mInputFrameData = NULL;
332
333    free(mSliceGroup);
334    mSliceGroup = NULL;
335
336    delete mEncParams;
337    mEncParams = NULL;
338
339    delete mHandle;
340    mHandle = NULL;
341
342    mStarted = false;
343
344    return OMX_ErrorNone;
345}
346
347void SoftAVCEncoder::releaseOutputBuffers() {
348    for (size_t i = 0; i < mOutputBuffers.size(); ++i) {
349        MediaBuffer *buffer = mOutputBuffers.editItemAt(i);
350        buffer->setObserver(NULL);
351        buffer->release();
352    }
353    mOutputBuffers.clear();
354}
355
356void SoftAVCEncoder::initPorts() {
357    OMX_PARAM_PORTDEFINITIONTYPE def;
358    InitOMXParams(&def);
359
360    const size_t kInputBufferSize = (mVideoWidth * mVideoHeight * 3) >> 1;
361
362    // 31584 is PV's magic number.  Not sure why.
363    const size_t kOutputBufferSize =
364            (kInputBufferSize > 31584) ? kInputBufferSize: 31584;
365
366    def.nPortIndex = 0;
367    def.eDir = OMX_DirInput;
368    def.nBufferCountMin = kNumBuffers;
369    def.nBufferCountActual = def.nBufferCountMin;
370    def.nBufferSize = kInputBufferSize;
371    def.bEnabled = OMX_TRUE;
372    def.bPopulated = OMX_FALSE;
373    def.eDomain = OMX_PortDomainVideo;
374    def.bBuffersContiguous = OMX_FALSE;
375    def.nBufferAlignment = 1;
376
377    def.format.video.cMIMEType = const_cast<char *>("video/raw");
378    def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
379    def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
380    def.format.video.xFramerate = (mVideoFrameRate << 16);  // Q16 format
381    def.format.video.nBitrate = mVideoBitRate;
382    def.format.video.nFrameWidth = mVideoWidth;
383    def.format.video.nFrameHeight = mVideoHeight;
384    def.format.video.nStride = mVideoWidth;
385    def.format.video.nSliceHeight = mVideoHeight;
386
387    addPort(def);
388
389    def.nPortIndex = 1;
390    def.eDir = OMX_DirOutput;
391    def.nBufferCountMin = kNumBuffers;
392    def.nBufferCountActual = def.nBufferCountMin;
393    def.nBufferSize = kOutputBufferSize;
394    def.bEnabled = OMX_TRUE;
395    def.bPopulated = OMX_FALSE;
396    def.eDomain = OMX_PortDomainVideo;
397    def.bBuffersContiguous = OMX_FALSE;
398    def.nBufferAlignment = 2;
399
400    def.format.video.cMIMEType = const_cast<char *>("video/avc");
401    def.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
402    def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
403    def.format.video.xFramerate = (0 << 16);  // Q16 format
404    def.format.video.nBitrate = mVideoBitRate;
405    def.format.video.nFrameWidth = mVideoWidth;
406    def.format.video.nFrameHeight = mVideoHeight;
407    def.format.video.nStride = mVideoWidth;
408    def.format.video.nSliceHeight = mVideoHeight;
409
410    addPort(def);
411}
412
413OMX_ERRORTYPE SoftAVCEncoder::internalGetParameter(
414        OMX_INDEXTYPE index, OMX_PTR params) {
415    switch (index) {
416        case OMX_IndexParamVideoErrorCorrection:
417        {
418            return OMX_ErrorNotImplemented;
419        }
420
421        case OMX_IndexParamVideoBitrate:
422        {
423            OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
424                (OMX_VIDEO_PARAM_BITRATETYPE *) params;
425
426            if (!isValidOMXParam(bitRate)) {
427                return OMX_ErrorBadParameter;
428            }
429
430            if (bitRate->nPortIndex != 1) {
431                return OMX_ErrorUndefined;
432            }
433
434            bitRate->eControlRate = OMX_Video_ControlRateVariable;
435            bitRate->nTargetBitrate = mVideoBitRate;
436            return OMX_ErrorNone;
437        }
438
439        case OMX_IndexParamVideoPortFormat:
440        {
441            OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
442                (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
443
444            if (formatParams->nPortIndex > 1) {
445                return OMX_ErrorUndefined;
446            }
447
448            if (formatParams->nIndex > 2) {
449                return OMX_ErrorNoMore;
450            }
451
452            if (formatParams->nPortIndex == 0) {
453                formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
454                if (formatParams->nIndex == 0) {
455                    formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
456                } else if (formatParams->nIndex == 1) {
457                    formatParams->eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
458                } else {
459                    formatParams->eColorFormat = OMX_COLOR_FormatAndroidOpaque;
460                }
461            } else {
462                formatParams->eCompressionFormat = OMX_VIDEO_CodingAVC;
463                formatParams->eColorFormat = OMX_COLOR_FormatUnused;
464            }
465
466            return OMX_ErrorNone;
467        }
468
469        case OMX_IndexParamVideoAvc:
470        {
471            OMX_VIDEO_PARAM_AVCTYPE *avcParams =
472                (OMX_VIDEO_PARAM_AVCTYPE *)params;
473
474            if (!isValidOMXParam(avcParams)) {
475                return OMX_ErrorBadParameter;
476            }
477
478            if (avcParams->nPortIndex != 1) {
479                return OMX_ErrorUndefined;
480            }
481
482            avcParams->eProfile = OMX_VIDEO_AVCProfileBaseline;
483            OMX_U32 omxLevel = AVC_LEVEL2;
484            if (OMX_ErrorNone !=
485                ConvertAvcSpecLevelToOmxAvcLevel(mAVCEncLevel, &omxLevel)) {
486                return OMX_ErrorUndefined;
487            }
488
489            avcParams->eLevel = (OMX_VIDEO_AVCLEVELTYPE) omxLevel;
490            avcParams->nRefFrames = 1;
491            avcParams->nBFrames = 0;
492            avcParams->bUseHadamard = OMX_TRUE;
493            avcParams->nAllowedPictureTypes =
494                    (OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP);
495            avcParams->nRefIdx10ActiveMinus1 = 0;
496            avcParams->nRefIdx11ActiveMinus1 = 0;
497            avcParams->bWeightedPPrediction = OMX_FALSE;
498            avcParams->bEntropyCodingCABAC = OMX_FALSE;
499            avcParams->bconstIpred = OMX_FALSE;
500            avcParams->bDirect8x8Inference = OMX_FALSE;
501            avcParams->bDirectSpatialTemporal = OMX_FALSE;
502            avcParams->nCabacInitIdc = 0;
503            return OMX_ErrorNone;
504        }
505
506        case OMX_IndexParamVideoProfileLevelQuerySupported:
507        {
508            OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel =
509                (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)params;
510
511            if (profileLevel->nPortIndex != 1) {
512                return OMX_ErrorUndefined;
513            }
514
515            const size_t size =
516                    sizeof(ConversionTable) / sizeof(ConversionTable[0]);
517
518            if (profileLevel->nProfileIndex >= size) {
519                return OMX_ErrorNoMore;
520            }
521
522            profileLevel->eProfile = OMX_VIDEO_AVCProfileBaseline;
523            profileLevel->eLevel = ConversionTable[profileLevel->nProfileIndex].omxLevel;
524
525            return OMX_ErrorNone;
526        }
527
528        default:
529            return SimpleSoftOMXComponent::internalGetParameter(index, params);
530    }
531}
532
533OMX_ERRORTYPE SoftAVCEncoder::internalSetParameter(
534        OMX_INDEXTYPE index, const OMX_PTR params) {
535    int32_t indexFull = index;
536
537    switch (indexFull) {
538        case OMX_IndexParamVideoErrorCorrection:
539        {
540            return OMX_ErrorNotImplemented;
541        }
542
543        case OMX_IndexParamVideoBitrate:
544        {
545            OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
546                (OMX_VIDEO_PARAM_BITRATETYPE *) params;
547
548            if (!isValidOMXParam(bitRate)) {
549                return OMX_ErrorBadParameter;
550            }
551
552            if (bitRate->nPortIndex != 1 ||
553                bitRate->eControlRate != OMX_Video_ControlRateVariable) {
554                return OMX_ErrorUndefined;
555            }
556
557            mVideoBitRate = bitRate->nTargetBitrate;
558            return OMX_ErrorNone;
559        }
560
561        case OMX_IndexParamPortDefinition:
562        {
563            OMX_PARAM_PORTDEFINITIONTYPE *def =
564                (OMX_PARAM_PORTDEFINITIONTYPE *)params;
565            if (def->nPortIndex > 1) {
566                return OMX_ErrorUndefined;
567            }
568
569            if (def->nPortIndex == 0) {
570                if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingUnused ||
571                    (def->format.video.eColorFormat != OMX_COLOR_FormatYUV420Planar &&
572                     def->format.video.eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar &&
573                     def->format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque)) {
574                    return OMX_ErrorUndefined;
575                }
576            } else {
577                if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingAVC ||
578                    (def->format.video.eColorFormat != OMX_COLOR_FormatUnused)) {
579                    return OMX_ErrorUndefined;
580                }
581            }
582
583            OMX_ERRORTYPE err = SimpleSoftOMXComponent::internalSetParameter(index, params);
584            if (OMX_ErrorNone != err) {
585                return err;
586            }
587
588            if (def->nPortIndex == 0) {
589                mVideoWidth = def->format.video.nFrameWidth;
590                mVideoHeight = def->format.video.nFrameHeight;
591                mVideoFrameRate = def->format.video.xFramerate >> 16;
592                mVideoColorFormat = def->format.video.eColorFormat;
593
594                OMX_PARAM_PORTDEFINITIONTYPE *portDef =
595                    &editPortInfo(0)->mDef;
596                portDef->format.video.nFrameWidth = mVideoWidth;
597                portDef->format.video.nFrameHeight = mVideoHeight;
598                portDef->format.video.xFramerate = def->format.video.xFramerate;
599                portDef->format.video.eColorFormat =
600                    (OMX_COLOR_FORMATTYPE) mVideoColorFormat;
601                portDef = &editPortInfo(1)->mDef;
602                portDef->format.video.nFrameWidth = mVideoWidth;
603                portDef->format.video.nFrameHeight = mVideoHeight;
604            } else {
605                mVideoBitRate = def->format.video.nBitrate;
606            }
607
608            return OMX_ErrorNone;
609        }
610
611        case OMX_IndexParamStandardComponentRole:
612        {
613            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
614                (const OMX_PARAM_COMPONENTROLETYPE *)params;
615
616            if (strncmp((const char *)roleParams->cRole,
617                        "video_encoder.avc",
618                        OMX_MAX_STRINGNAME_SIZE - 1)) {
619                return OMX_ErrorUndefined;
620            }
621
622            return OMX_ErrorNone;
623        }
624
625        case OMX_IndexParamVideoPortFormat:
626        {
627            const OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
628                (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
629
630            if (formatParams->nPortIndex > 1) {
631                return OMX_ErrorUndefined;
632            }
633
634            if (formatParams->nIndex > 2) {
635                return OMX_ErrorNoMore;
636            }
637
638            if (formatParams->nPortIndex == 0) {
639                if (formatParams->eCompressionFormat != OMX_VIDEO_CodingUnused ||
640                    ((formatParams->nIndex == 0 &&
641                      formatParams->eColorFormat != OMX_COLOR_FormatYUV420Planar) ||
642                    (formatParams->nIndex == 1 &&
643                     formatParams->eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar) ||
644                    (formatParams->nIndex == 2 &&
645                     formatParams->eColorFormat != OMX_COLOR_FormatAndroidOpaque) )) {
646                    return OMX_ErrorUndefined;
647                }
648                mVideoColorFormat = formatParams->eColorFormat;
649            } else {
650                if (formatParams->eCompressionFormat != OMX_VIDEO_CodingAVC ||
651                    formatParams->eColorFormat != OMX_COLOR_FormatUnused) {
652                    return OMX_ErrorUndefined;
653                }
654            }
655
656            return OMX_ErrorNone;
657        }
658
659        case OMX_IndexParamVideoAvc:
660        {
661            OMX_VIDEO_PARAM_AVCTYPE *avcType =
662                (OMX_VIDEO_PARAM_AVCTYPE *)params;
663
664            if (!isValidOMXParam(avcType)) {
665                return OMX_ErrorBadParameter;
666            }
667
668            if (avcType->nPortIndex != 1) {
669                return OMX_ErrorUndefined;
670            }
671
672            // PV's AVC encoder only supports baseline profile
673            if (avcType->eProfile != OMX_VIDEO_AVCProfileBaseline ||
674                avcType->nRefFrames != 1 ||
675                avcType->nBFrames != 0 ||
676                avcType->bUseHadamard != OMX_TRUE ||
677                (avcType->nAllowedPictureTypes & OMX_VIDEO_PictureTypeB) != 0 ||
678                avcType->nRefIdx10ActiveMinus1 != 0 ||
679                avcType->nRefIdx11ActiveMinus1 != 0 ||
680                avcType->bWeightedPPrediction != OMX_FALSE ||
681                avcType->bEntropyCodingCABAC != OMX_FALSE ||
682                avcType->bconstIpred != OMX_FALSE ||
683                avcType->bDirect8x8Inference != OMX_FALSE ||
684                avcType->bDirectSpatialTemporal != OMX_FALSE ||
685                avcType->nCabacInitIdc != 0) {
686                return OMX_ErrorUndefined;
687            }
688
689            if (OK != ConvertOmxAvcLevelToAvcSpecLevel(avcType->eLevel, &mAVCEncLevel)) {
690                return OMX_ErrorUndefined;
691            }
692
693            return OMX_ErrorNone;
694        }
695
696        case kStoreMetaDataExtensionIndex:
697        {
698            StoreMetaDataInBuffersParams *storeParams =
699                    (StoreMetaDataInBuffersParams*)params;
700            if (storeParams->nPortIndex != 0) {
701                ALOGE("%s: StoreMetadataInBuffersParams.nPortIndex not zero!",
702                        __FUNCTION__);
703                return OMX_ErrorUndefined;
704            }
705
706            mStoreMetaDataInBuffers = storeParams->bStoreMetaData;
707            ALOGV("StoreMetaDataInBuffers set to: %s",
708                    mStoreMetaDataInBuffers ? " true" : "false");
709
710            if (mStoreMetaDataInBuffers) {
711                mVideoColorFormat = OMX_COLOR_FormatAndroidOpaque;
712            }
713
714            return OMX_ErrorNone;
715        }
716
717        default:
718            return SimpleSoftOMXComponent::internalSetParameter(index, params);
719    }
720}
721
722void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
723    if (mSignalledError || mSawInputEOS) {
724        return;
725    }
726
727    if (!mStarted) {
728        if (OMX_ErrorNone != initEncoder()) {
729            return;
730        }
731    }
732
733    List<BufferInfo *> &inQueue = getPortQueue(0);
734    List<BufferInfo *> &outQueue = getPortQueue(1);
735
736    while (!mSawInputEOS && !inQueue.empty() && !outQueue.empty()) {
737        BufferInfo *inInfo = *inQueue.begin();
738        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
739        BufferInfo *outInfo = *outQueue.begin();
740        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
741
742        outHeader->nTimeStamp = 0;
743        outHeader->nFlags = 0;
744        outHeader->nOffset = 0;
745        outHeader->nFilledLen = 0;
746        outHeader->nOffset = 0;
747
748        uint8_t *outPtr = (uint8_t *) outHeader->pBuffer;
749        uint32_t dataLength = outHeader->nAllocLen;
750
751        if (!mSpsPpsHeaderReceived && mNumInputFrames < 0) {
752            // 4 bytes are reserved for holding the start code 0x00000001
753            // of the sequence parameter set at the beginning.
754            outPtr += 4;
755            dataLength -= 4;
756        }
757
758        int32_t type;
759        AVCEnc_Status encoderStatus = AVCENC_SUCCESS;
760
761        // Combine SPS and PPS and place them in the very first output buffer
762        // SPS and PPS are separated by start code 0x00000001
763        // Assume that we have exactly one SPS and exactly one PPS.
764        while (!mSpsPpsHeaderReceived && mNumInputFrames <= 0) {
765            encoderStatus = PVAVCEncodeNAL(mHandle, outPtr, &dataLength, &type);
766            if (encoderStatus == AVCENC_WRONG_STATE) {
767                mSpsPpsHeaderReceived = true;
768                CHECK_EQ(0, mNumInputFrames);  // 1st video frame is 0
769                outHeader->nFlags = OMX_BUFFERFLAG_CODECCONFIG;
770                outQueue.erase(outQueue.begin());
771                outInfo->mOwnedByUs = false;
772                notifyFillBufferDone(outHeader);
773                return;
774            } else {
775                switch (type) {
776                    case AVC_NALTYPE_SPS:
777                        ++mNumInputFrames;
778                        memcpy((uint8_t *)outHeader->pBuffer, "\x00\x00\x00\x01", 4);
779                        outHeader->nFilledLen = 4 + dataLength;
780                        outPtr += (dataLength + 4);  // 4 bytes for next start code
781                        dataLength = outHeader->nAllocLen - outHeader->nFilledLen;
782                        break;
783                    default:
784                        CHECK_EQ(AVC_NALTYPE_PPS, type);
785                        ++mNumInputFrames;
786                        memcpy((uint8_t *) outHeader->pBuffer + outHeader->nFilledLen,
787                                "\x00\x00\x00\x01", 4);
788                        outHeader->nFilledLen += (dataLength + 4);
789                        outPtr += (dataLength + 4);
790                        break;
791                }
792            }
793        }
794
795        // Get next input video frame
796        if (mReadyForNextFrame) {
797            // Save the input buffer info so that it can be
798            // passed to an output buffer
799            InputBufferInfo info;
800            info.mTimeUs = inHeader->nTimeStamp;
801            info.mFlags = inHeader->nFlags;
802            mInputBufferInfoVec.push(info);
803            mPrevTimestampUs = inHeader->nTimeStamp;
804
805            if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
806                mSawInputEOS = true;
807            }
808
809            if (inHeader->nFilledLen > 0) {
810                AVCFrameIO videoInput;
811                memset(&videoInput, 0, sizeof(videoInput));
812                videoInput.height = ((mVideoHeight  + 15) >> 4) << 4;
813                videoInput.pitch = ((mVideoWidth + 15) >> 4) << 4;
814                videoInput.coding_timestamp = (inHeader->nTimeStamp + 500) / 1000;  // in ms
815                const uint8_t *inputData = NULL;
816                if (mStoreMetaDataInBuffers) {
817                    if (inHeader->nFilledLen != 8) {
818                        ALOGE("MetaData buffer is wrong size! "
819                                "(got %u bytes, expected 8)", inHeader->nFilledLen);
820                        mSignalledError = true;
821                        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
822                        return;
823                    }
824                    inputData =
825                        extractGraphicBuffer(
826                                mInputFrameData, (mVideoWidth * mVideoHeight * 3) >> 1,
827                                inHeader->pBuffer + inHeader->nOffset, inHeader->nFilledLen,
828                                mVideoWidth, mVideoHeight);
829                    if (inputData == NULL) {
830                        ALOGE("Unable to extract gralloc buffer in metadata mode");
831                        mSignalledError = true;
832                        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
833                        return;
834                    }
835                    // TODO: Verify/convert pixel format enum
836                } else {
837                    inputData = (const uint8_t *)inHeader->pBuffer + inHeader->nOffset;
838                    if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
839                        ConvertYUV420SemiPlanarToYUV420Planar(
840                            inputData, mInputFrameData, mVideoWidth, mVideoHeight);
841                        inputData = mInputFrameData;
842                    }
843                }
844
845                CHECK(inputData != NULL);
846                videoInput.YCbCr[0] = (uint8_t *)inputData;
847                videoInput.YCbCr[1] = videoInput.YCbCr[0] + videoInput.height * videoInput.pitch;
848                videoInput.YCbCr[2] = videoInput.YCbCr[1] +
849                    ((videoInput.height * videoInput.pitch) >> 2);
850                videoInput.disp_order = mNumInputFrames;
851
852                encoderStatus = PVAVCEncSetInput(mHandle, &videoInput);
853                if (encoderStatus == AVCENC_SUCCESS || encoderStatus == AVCENC_NEW_IDR) {
854                    mReadyForNextFrame = false;
855                    ++mNumInputFrames;
856                    if (encoderStatus == AVCENC_NEW_IDR) {
857                        mIsIDRFrame = 1;
858                    }
859                } else {
860                    if (encoderStatus < AVCENC_SUCCESS) {
861                        ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
862                        mSignalledError = true;
863                        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
864                        return;
865                    } else {
866                        ALOGV("encoderStatus = %d at line %d", encoderStatus, __LINE__);
867                        inQueue.erase(inQueue.begin());
868                        inInfo->mOwnedByUs = false;
869                        notifyEmptyBufferDone(inHeader);
870                        return;
871                    }
872                }
873            }
874        }
875
876        // Encode an input video frame
877        CHECK(encoderStatus == AVCENC_SUCCESS || encoderStatus == AVCENC_NEW_IDR);
878        dataLength = outHeader->nAllocLen;  // Reset the output buffer length
879        if (inHeader->nFilledLen > 0) {
880            if (outHeader->nAllocLen >= 4) {
881                memcpy(outPtr, "\x00\x00\x00\x01", 4);
882                outPtr += 4;
883                dataLength -= 4;
884            }
885            encoderStatus = PVAVCEncodeNAL(mHandle, outPtr, &dataLength, &type);
886            dataLength = outPtr + dataLength - outHeader->pBuffer;
887            if (encoderStatus == AVCENC_SUCCESS) {
888                CHECK(NULL == PVAVCEncGetOverrunBuffer(mHandle));
889            } else if (encoderStatus == AVCENC_PICTURE_READY) {
890                CHECK(NULL == PVAVCEncGetOverrunBuffer(mHandle));
891                if (mIsIDRFrame) {
892                    outHeader->nFlags |= OMX_BUFFERFLAG_SYNCFRAME;
893                    mIsIDRFrame = false;
894                }
895                mReadyForNextFrame = true;
896                AVCFrameIO recon;
897                if (PVAVCEncGetRecon(mHandle, &recon) == AVCENC_SUCCESS) {
898                    PVAVCEncReleaseRecon(mHandle, &recon);
899                }
900            } else {
901                dataLength = 0;
902                mReadyForNextFrame = true;
903            }
904
905            if (encoderStatus < AVCENC_SUCCESS) {
906                ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
907                mSignalledError = true;
908                notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
909                return;
910            }
911        } else {
912            dataLength = 0;
913        }
914
915        inQueue.erase(inQueue.begin());
916        inInfo->mOwnedByUs = false;
917        notifyEmptyBufferDone(inHeader);
918
919        outQueue.erase(outQueue.begin());
920        CHECK(!mInputBufferInfoVec.empty());
921        InputBufferInfo *inputBufInfo = mInputBufferInfoVec.begin();
922        outHeader->nTimeStamp = inputBufInfo->mTimeUs;
923        outHeader->nFlags |= (inputBufInfo->mFlags | OMX_BUFFERFLAG_ENDOFFRAME);
924        if (mSawInputEOS) {
925            outHeader->nFlags |= OMX_BUFFERFLAG_EOS;
926        }
927        outHeader->nFilledLen = dataLength;
928        outInfo->mOwnedByUs = false;
929        notifyFillBufferDone(outHeader);
930        mInputBufferInfoVec.erase(mInputBufferInfoVec.begin());
931    }
932}
933
934int32_t SoftAVCEncoder::allocOutputBuffers(
935        unsigned int sizeInMbs, unsigned int numBuffers) {
936    CHECK(mOutputBuffers.isEmpty());
937    size_t frameSize = (sizeInMbs << 7) * 3;
938    for (unsigned int i = 0; i <  numBuffers; ++i) {
939        MediaBuffer *buffer = new MediaBuffer(frameSize);
940        buffer->setObserver(this);
941        mOutputBuffers.push(buffer);
942    }
943
944    return 1;
945}
946
947void SoftAVCEncoder::unbindOutputBuffer(int32_t index) {
948    CHECK(index >= 0);
949}
950
951int32_t SoftAVCEncoder::bindOutputBuffer(int32_t index, uint8_t **yuv) {
952    CHECK(index >= 0);
953    CHECK(index < (int32_t) mOutputBuffers.size());
954    *yuv = (uint8_t *) mOutputBuffers[index]->data();
955
956    return 1;
957}
958
959void SoftAVCEncoder::signalBufferReturned(MediaBuffer *buffer) {
960    UNUSED_UNLESS_VERBOSE(buffer);
961    ALOGV("signalBufferReturned: %p", buffer);
962}
963
964}  // namespace android
965
966android::SoftOMXComponent *createSoftOMXComponent(
967        const char *name, const OMX_CALLBACKTYPE *callbacks,
968        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
969    return new android::SoftAVCEncoder(name, callbacks, appData, component);
970}
971