SoftAVCEncoder.cpp revision a0c5812dfbb34c168129baa300942db18bed9f5e
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "SoftAVCEncoder"
19#include <utils/Log.h>
20
21#include "avcenc_api.h"
22#include "avcenc_int.h"
23#include "OMX_Video.h"
24
25#include <HardwareAPI.h>
26#include <MetadataBufferType.h>
27#include <media/stagefright/foundation/ADebug.h>
28#include <media/stagefright/MediaDefs.h>
29#include <media/stagefright/MediaErrors.h>
30#include <media/stagefright/MetaData.h>
31#include <media/stagefright/Utils.h>
32#include <ui/Rect.h>
33#include <ui/GraphicBufferMapper.h>
34
35#include "SoftAVCEncoder.h"
36
37namespace android {
38
39template<class T>
40static void InitOMXParams(T *params) {
41    params->nSize = sizeof(T);
42    params->nVersion.s.nVersionMajor = 1;
43    params->nVersion.s.nVersionMinor = 0;
44    params->nVersion.s.nRevision = 0;
45    params->nVersion.s.nStep = 0;
46}
47
48typedef struct LevelConversion {
49    OMX_U32 omxLevel;
50    AVCLevel avcLevel;
51} LevelConcersion;
52
53static LevelConversion ConversionTable[] = {
54    { OMX_VIDEO_AVCLevel1,  AVC_LEVEL1_B },
55    { OMX_VIDEO_AVCLevel1b, AVC_LEVEL1   },
56    { OMX_VIDEO_AVCLevel11, AVC_LEVEL1_1 },
57    { OMX_VIDEO_AVCLevel12, AVC_LEVEL1_2 },
58    { OMX_VIDEO_AVCLevel13, AVC_LEVEL1_3 },
59    { OMX_VIDEO_AVCLevel2,  AVC_LEVEL2 },
60#if 0
61    // encoding speed is very poor if video
62    // resolution is higher than CIF
63    { OMX_VIDEO_AVCLevel21, AVC_LEVEL2_1 },
64    { OMX_VIDEO_AVCLevel22, AVC_LEVEL2_2 },
65    { OMX_VIDEO_AVCLevel3,  AVC_LEVEL3   },
66    { OMX_VIDEO_AVCLevel31, AVC_LEVEL3_1 },
67    { OMX_VIDEO_AVCLevel32, AVC_LEVEL3_2 },
68    { OMX_VIDEO_AVCLevel4,  AVC_LEVEL4   },
69    { OMX_VIDEO_AVCLevel41, AVC_LEVEL4_1 },
70    { OMX_VIDEO_AVCLevel42, AVC_LEVEL4_2 },
71    { OMX_VIDEO_AVCLevel5,  AVC_LEVEL5   },
72    { OMX_VIDEO_AVCLevel51, AVC_LEVEL5_1 },
73#endif
74};
75
76static status_t ConvertOmxAvcLevelToAvcSpecLevel(
77        OMX_U32 omxLevel, AVCLevel *avcLevel) {
78    for (size_t i = 0, n = sizeof(ConversionTable)/sizeof(ConversionTable[0]);
79        i < n; ++i) {
80        if (omxLevel == ConversionTable[i].omxLevel) {
81            *avcLevel = ConversionTable[i].avcLevel;
82            return OK;
83        }
84    }
85
86    ALOGE("ConvertOmxAvcLevelToAvcSpecLevel: %d level not supported",
87            (int32_t)omxLevel);
88
89    return BAD_VALUE;
90}
91
92static status_t ConvertAvcSpecLevelToOmxAvcLevel(
93    AVCLevel avcLevel, OMX_U32 *omxLevel) {
94    for (size_t i = 0, n = sizeof(ConversionTable)/sizeof(ConversionTable[0]);
95        i < n; ++i) {
96        if (avcLevel == ConversionTable[i].avcLevel) {
97            *omxLevel = ConversionTable[i].omxLevel;
98            return OK;
99        }
100    }
101
102    ALOGE("ConvertAvcSpecLevelToOmxAvcLevel: %d level not supported",
103            (int32_t) avcLevel);
104
105    return BAD_VALUE;
106}
107
108inline static void ConvertYUV420SemiPlanarToYUV420Planar(
109        uint8_t *inyuv, uint8_t* outyuv,
110        int32_t width, int32_t height) {
111
112    int32_t outYsize = width * height;
113    uint32_t *outy =  (uint32_t *) outyuv;
114    uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
115    uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));
116
117    /* Y copying */
118    memcpy(outy, inyuv, outYsize);
119
120    /* U & V copying */
121    uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
122    for (int32_t i = height >> 1; i > 0; --i) {
123        for (int32_t j = width >> 2; j > 0; --j) {
124            uint32_t temp = *inyuv_4++;
125            uint32_t tempU = temp & 0xFF;
126            tempU = tempU | ((temp >> 8) & 0xFF00);
127
128            uint32_t tempV = (temp >> 8) & 0xFF;
129            tempV = tempV | ((temp >> 16) & 0xFF00);
130
131            // Flip U and V
132            *outcb++ = tempV;
133            *outcr++ = tempU;
134        }
135    }
136}
137
138static void* MallocWrapper(
139        void *userData, int32_t size, int32_t attrs) {
140    void *ptr = malloc(size);
141    if (ptr)
142        memset(ptr, 0, size);
143    return ptr;
144}
145
146static void FreeWrapper(void *userData, void* ptr) {
147    free(ptr);
148}
149
150static int32_t DpbAllocWrapper(void *userData,
151        unsigned int sizeInMbs, unsigned int numBuffers) {
152    SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
153    CHECK(encoder != NULL);
154    return encoder->allocOutputBuffers(sizeInMbs, numBuffers);
155}
156
157static int32_t BindFrameWrapper(
158        void *userData, int32_t index, uint8_t **yuv) {
159    SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
160    CHECK(encoder != NULL);
161    return encoder->bindOutputBuffer(index, yuv);
162}
163
164static void UnbindFrameWrapper(void *userData, int32_t index) {
165    SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
166    CHECK(encoder != NULL);
167    return encoder->unbindOutputBuffer(index);
168}
169
170SoftAVCEncoder::SoftAVCEncoder(
171            const char *name,
172            const OMX_CALLBACKTYPE *callbacks,
173            OMX_PTR appData,
174            OMX_COMPONENTTYPE **component)
175    : SimpleSoftOMXComponent(name, callbacks, appData, component),
176      mVideoWidth(176),
177      mVideoHeight(144),
178      mVideoFrameRate(30),
179      mVideoBitRate(192000),
180      mVideoColorFormat(OMX_COLOR_FormatYUV420Planar),
181      mStoreMetaDataInBuffers(false),
182      mIDRFrameRefreshIntervalInSec(1),
183      mAVCEncProfile(AVC_BASELINE),
184      mAVCEncLevel(AVC_LEVEL2),
185      mNumInputFrames(-1),
186      mPrevTimestampUs(-1),
187      mStarted(false),
188      mSawInputEOS(false),
189      mSignalledError(false),
190      mHandle(new tagAVCHandle),
191      mEncParams(new tagAVCEncParam),
192      mInputFrameData(NULL),
193      mSliceGroup(NULL) {
194
195    initPorts();
196    ALOGI("Construct SoftAVCEncoder");
197}
198
199SoftAVCEncoder::~SoftAVCEncoder() {
200    ALOGV("Destruct SoftAVCEncoder");
201    releaseEncoder();
202    List<BufferInfo *> &outQueue = getPortQueue(1);
203    List<BufferInfo *> &inQueue = getPortQueue(0);
204    CHECK(outQueue.empty());
205    CHECK(inQueue.empty());
206}
207
208OMX_ERRORTYPE SoftAVCEncoder::initEncParams() {
209    CHECK(mHandle != NULL);
210    memset(mHandle, 0, sizeof(tagAVCHandle));
211    mHandle->AVCObject = NULL;
212    mHandle->userData = this;
213    mHandle->CBAVC_DPBAlloc = DpbAllocWrapper;
214    mHandle->CBAVC_FrameBind = BindFrameWrapper;
215    mHandle->CBAVC_FrameUnbind = UnbindFrameWrapper;
216    mHandle->CBAVC_Malloc = MallocWrapper;
217    mHandle->CBAVC_Free = FreeWrapper;
218
219    CHECK(mEncParams != NULL);
220    memset(mEncParams, 0, sizeof(*mEncParams));
221    mEncParams->rate_control = AVC_ON;
222    mEncParams->initQP = 0;
223    mEncParams->init_CBP_removal_delay = 1600;
224
225    mEncParams->intramb_refresh = 0;
226    mEncParams->auto_scd = AVC_ON;
227    mEncParams->out_of_band_param_set = AVC_ON;
228    mEncParams->poc_type = 2;
229    mEncParams->log2_max_poc_lsb_minus_4 = 12;
230    mEncParams->delta_poc_zero_flag = 0;
231    mEncParams->offset_poc_non_ref = 0;
232    mEncParams->offset_top_bottom = 0;
233    mEncParams->num_ref_in_cycle = 0;
234    mEncParams->offset_poc_ref = NULL;
235
236    mEncParams->num_ref_frame = 1;
237    mEncParams->num_slice_group = 1;
238    mEncParams->fmo_type = 0;
239
240    mEncParams->db_filter = AVC_ON;
241    mEncParams->disable_db_idc = 0;
242
243    mEncParams->alpha_offset = 0;
244    mEncParams->beta_offset = 0;
245    mEncParams->constrained_intra_pred = AVC_OFF;
246
247    mEncParams->data_par = AVC_OFF;
248    mEncParams->fullsearch = AVC_OFF;
249    mEncParams->search_range = 16;
250    mEncParams->sub_pel = AVC_OFF;
251    mEncParams->submb_pred = AVC_OFF;
252    mEncParams->rdopt_mode = AVC_OFF;
253    mEncParams->bidir_pred = AVC_OFF;
254
255    mEncParams->use_overrun_buffer = AVC_OFF;
256
257    if (mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
258        // Color conversion is needed.
259        CHECK(mInputFrameData == NULL);
260        mInputFrameData =
261            (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
262        CHECK(mInputFrameData != NULL);
263    }
264
265    // PV's AVC encoder requires the video dimension of multiple
266    if (mVideoWidth % 16 != 0 || mVideoHeight % 16 != 0) {
267        ALOGE("Video frame size %dx%d must be a multiple of 16",
268            mVideoWidth, mVideoHeight);
269        return OMX_ErrorBadParameter;
270    }
271
272    mEncParams->width = mVideoWidth;
273    mEncParams->height = mVideoHeight;
274    mEncParams->bitrate = mVideoBitRate;
275    mEncParams->frame_rate = 1000 * mVideoFrameRate;  // In frames/ms!
276    mEncParams->CPB_size = (uint32_t) (mVideoBitRate >> 1);
277
278    int32_t nMacroBlocks = ((((mVideoWidth + 15) >> 4) << 4) *
279            (((mVideoHeight + 15) >> 4) << 4)) >> 8;
280    CHECK(mSliceGroup == NULL);
281    mSliceGroup = (uint32_t *) malloc(sizeof(uint32_t) * nMacroBlocks);
282    CHECK(mSliceGroup != NULL);
283    for (int ii = 0, idx = 0; ii < nMacroBlocks; ++ii) {
284        mSliceGroup[ii] = idx++;
285        if (idx >= mEncParams->num_slice_group) {
286            idx = 0;
287        }
288    }
289    mEncParams->slice_group = mSliceGroup;
290
291    // Set IDR frame refresh interval
292    if (mIDRFrameRefreshIntervalInSec < 0) {
293        mEncParams->idr_period = -1;
294    } else if (mIDRFrameRefreshIntervalInSec == 0) {
295        mEncParams->idr_period = 1;  // All I frames
296    } else {
297        mEncParams->idr_period =
298            (mIDRFrameRefreshIntervalInSec * mVideoFrameRate);
299    }
300
301    // Set profile and level
302    mEncParams->profile = mAVCEncProfile;
303    mEncParams->level = mAVCEncLevel;
304
305    return OMX_ErrorNone;
306}
307
308OMX_ERRORTYPE SoftAVCEncoder::initEncoder() {
309    CHECK(!mStarted);
310
311    OMX_ERRORTYPE errType = OMX_ErrorNone;
312    if (OMX_ErrorNone != (errType = initEncParams())) {
313        ALOGE("Failed to initialized encoder params");
314        mSignalledError = true;
315        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
316        return errType;
317    }
318
319    AVCEnc_Status err;
320    err = PVAVCEncInitialize(mHandle, mEncParams, NULL, NULL);
321    if (err != AVCENC_SUCCESS) {
322        ALOGE("Failed to initialize the encoder: %d", err);
323        mSignalledError = true;
324        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
325        return OMX_ErrorUndefined;
326    }
327
328    mNumInputFrames = -2;  // 1st two buffers contain SPS and PPS
329    mSpsPpsHeaderReceived = false;
330    mReadyForNextFrame = true;
331    mIsIDRFrame = false;
332    mStarted = true;
333
334    return OMX_ErrorNone;
335}
336
337OMX_ERRORTYPE SoftAVCEncoder::releaseEncoder() {
338    if (!mStarted) {
339        return OMX_ErrorNone;
340    }
341
342    PVAVCCleanUpEncoder(mHandle);
343    releaseOutputBuffers();
344
345    free(mInputFrameData);
346    mInputFrameData = NULL;
347
348    free(mSliceGroup);
349    mSliceGroup = NULL;
350
351    delete mEncParams;
352    mEncParams = NULL;
353
354    delete mHandle;
355    mHandle = NULL;
356
357    mStarted = false;
358
359    return OMX_ErrorNone;
360}
361
362void SoftAVCEncoder::releaseOutputBuffers() {
363    for (size_t i = 0; i < mOutputBuffers.size(); ++i) {
364        MediaBuffer *buffer = mOutputBuffers.editItemAt(i);
365        buffer->setObserver(NULL);
366        buffer->release();
367    }
368    mOutputBuffers.clear();
369}
370
371void SoftAVCEncoder::initPorts() {
372    OMX_PARAM_PORTDEFINITIONTYPE def;
373    InitOMXParams(&def);
374
375    const size_t kInputBufferSize = (mVideoWidth * mVideoHeight * 3) >> 1;
376
377    // 31584 is PV's magic number.  Not sure why.
378    const size_t kOutputBufferSize =
379            (kInputBufferSize > 31584) ? kInputBufferSize: 31584;
380
381    def.nPortIndex = 0;
382    def.eDir = OMX_DirInput;
383    def.nBufferCountMin = kNumBuffers;
384    def.nBufferCountActual = def.nBufferCountMin;
385    def.nBufferSize = kInputBufferSize;
386    def.bEnabled = OMX_TRUE;
387    def.bPopulated = OMX_FALSE;
388    def.eDomain = OMX_PortDomainVideo;
389    def.bBuffersContiguous = OMX_FALSE;
390    def.nBufferAlignment = 1;
391
392    def.format.video.cMIMEType = const_cast<char *>("video/raw");
393    def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
394    def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
395    def.format.video.xFramerate = (mVideoFrameRate << 16);  // Q16 format
396    def.format.video.nBitrate = mVideoBitRate;
397    def.format.video.nFrameWidth = mVideoWidth;
398    def.format.video.nFrameHeight = mVideoHeight;
399    def.format.video.nStride = mVideoWidth;
400    def.format.video.nSliceHeight = mVideoHeight;
401
402    addPort(def);
403
404    def.nPortIndex = 1;
405    def.eDir = OMX_DirOutput;
406    def.nBufferCountMin = kNumBuffers;
407    def.nBufferCountActual = def.nBufferCountMin;
408    def.nBufferSize = kOutputBufferSize;
409    def.bEnabled = OMX_TRUE;
410    def.bPopulated = OMX_FALSE;
411    def.eDomain = OMX_PortDomainVideo;
412    def.bBuffersContiguous = OMX_FALSE;
413    def.nBufferAlignment = 2;
414
415    def.format.video.cMIMEType = const_cast<char *>("video/avc");
416    def.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
417    def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
418    def.format.video.xFramerate = (0 << 16);  // Q16 format
419    def.format.video.nBitrate = mVideoBitRate;
420    def.format.video.nFrameWidth = mVideoWidth;
421    def.format.video.nFrameHeight = mVideoHeight;
422    def.format.video.nStride = mVideoWidth;
423    def.format.video.nSliceHeight = mVideoHeight;
424
425    addPort(def);
426}
427
428OMX_ERRORTYPE SoftAVCEncoder::internalGetParameter(
429        OMX_INDEXTYPE index, OMX_PTR params) {
430    switch (index) {
431        case OMX_IndexParamVideoErrorCorrection:
432        {
433            return OMX_ErrorNotImplemented;
434        }
435
436        case OMX_IndexParamVideoBitrate:
437        {
438            OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
439                (OMX_VIDEO_PARAM_BITRATETYPE *) params;
440
441            if (bitRate->nPortIndex != 1) {
442                return OMX_ErrorUndefined;
443            }
444
445            bitRate->eControlRate = OMX_Video_ControlRateVariable;
446            bitRate->nTargetBitrate = mVideoBitRate;
447            return OMX_ErrorNone;
448        }
449
450        case OMX_IndexParamVideoPortFormat:
451        {
452            OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
453                (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
454
455            if (formatParams->nPortIndex > 1) {
456                return OMX_ErrorUndefined;
457            }
458
459            if (formatParams->nIndex > 2) {
460                return OMX_ErrorNoMore;
461            }
462
463            if (formatParams->nPortIndex == 0) {
464                formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
465                if (formatParams->nIndex == 0) {
466                    formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
467                } else if (formatParams->nIndex == 1) {
468                    formatParams->eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
469                } else {
470                    formatParams->eColorFormat = OMX_COLOR_FormatAndroidOpaque;
471                }
472            } else {
473                formatParams->eCompressionFormat = OMX_VIDEO_CodingAVC;
474                formatParams->eColorFormat = OMX_COLOR_FormatUnused;
475            }
476
477            return OMX_ErrorNone;
478        }
479
480        case OMX_IndexParamVideoAvc:
481        {
482            OMX_VIDEO_PARAM_AVCTYPE *avcParams =
483                (OMX_VIDEO_PARAM_AVCTYPE *)params;
484
485            if (avcParams->nPortIndex != 1) {
486                return OMX_ErrorUndefined;
487            }
488
489            avcParams->eProfile = OMX_VIDEO_AVCProfileBaseline;
490            OMX_U32 omxLevel = AVC_LEVEL2;
491            if (OMX_ErrorNone !=
492                ConvertAvcSpecLevelToOmxAvcLevel(mAVCEncLevel, &omxLevel)) {
493                return OMX_ErrorUndefined;
494            }
495
496            avcParams->eLevel = (OMX_VIDEO_AVCLEVELTYPE) omxLevel;
497            avcParams->nRefFrames = 1;
498            avcParams->nBFrames = 0;
499            avcParams->bUseHadamard = OMX_TRUE;
500            avcParams->nAllowedPictureTypes =
501                    (OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP);
502            avcParams->nRefIdx10ActiveMinus1 = 0;
503            avcParams->nRefIdx11ActiveMinus1 = 0;
504            avcParams->bWeightedPPrediction = OMX_FALSE;
505            avcParams->bEntropyCodingCABAC = OMX_FALSE;
506            avcParams->bconstIpred = OMX_FALSE;
507            avcParams->bDirect8x8Inference = OMX_FALSE;
508            avcParams->bDirectSpatialTemporal = OMX_FALSE;
509            avcParams->nCabacInitIdc = 0;
510            return OMX_ErrorNone;
511        }
512
513        case OMX_IndexParamVideoProfileLevelQuerySupported:
514        {
515            OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel =
516                (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)params;
517
518            if (profileLevel->nPortIndex != 1) {
519                return OMX_ErrorUndefined;
520            }
521
522            const size_t size =
523                    sizeof(ConversionTable) / sizeof(ConversionTable[0]);
524
525            if (profileLevel->nProfileIndex >= size) {
526                return OMX_ErrorNoMore;
527            }
528
529            profileLevel->eProfile = OMX_VIDEO_AVCProfileBaseline;
530            profileLevel->eLevel = ConversionTable[profileLevel->nProfileIndex].omxLevel;
531
532            return OMX_ErrorNone;
533        }
534
535        default:
536            return SimpleSoftOMXComponent::internalGetParameter(index, params);
537    }
538}
539
540OMX_ERRORTYPE SoftAVCEncoder::internalSetParameter(
541        OMX_INDEXTYPE index, const OMX_PTR params) {
542    int32_t indexFull = index;
543
544    switch (indexFull) {
545        case OMX_IndexParamVideoErrorCorrection:
546        {
547            return OMX_ErrorNotImplemented;
548        }
549
550        case OMX_IndexParamVideoBitrate:
551        {
552            OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
553                (OMX_VIDEO_PARAM_BITRATETYPE *) params;
554
555            if (bitRate->nPortIndex != 1 ||
556                bitRate->eControlRate != OMX_Video_ControlRateVariable) {
557                return OMX_ErrorUndefined;
558            }
559
560            mVideoBitRate = bitRate->nTargetBitrate;
561            return OMX_ErrorNone;
562        }
563
564        case OMX_IndexParamPortDefinition:
565        {
566            OMX_PARAM_PORTDEFINITIONTYPE *def =
567                (OMX_PARAM_PORTDEFINITIONTYPE *)params;
568            if (def->nPortIndex > 1) {
569                return OMX_ErrorUndefined;
570            }
571
572            if (def->nPortIndex == 0) {
573                if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingUnused ||
574                    (def->format.video.eColorFormat != OMX_COLOR_FormatYUV420Planar &&
575                     def->format.video.eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar &&
576                     def->format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque)) {
577                    return OMX_ErrorUndefined;
578                }
579            } else {
580                if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingAVC ||
581                    (def->format.video.eColorFormat != OMX_COLOR_FormatUnused)) {
582                    return OMX_ErrorUndefined;
583                }
584            }
585
586            OMX_ERRORTYPE err = SimpleSoftOMXComponent::internalSetParameter(index, params);
587            if (OMX_ErrorNone != err) {
588                return err;
589            }
590
591            if (def->nPortIndex == 0) {
592                mVideoWidth = def->format.video.nFrameWidth;
593                mVideoHeight = def->format.video.nFrameHeight;
594                mVideoFrameRate = def->format.video.xFramerate >> 16;
595                mVideoColorFormat = def->format.video.eColorFormat;
596
597                OMX_PARAM_PORTDEFINITIONTYPE *portDef =
598                    &editPortInfo(0)->mDef;
599                portDef->format.video.nFrameWidth = mVideoWidth;
600                portDef->format.video.nFrameHeight = mVideoHeight;
601                portDef->format.video.xFramerate = def->format.video.xFramerate;
602                portDef->format.video.eColorFormat =
603                    (OMX_COLOR_FORMATTYPE) mVideoColorFormat;
604                portDef = &editPortInfo(1)->mDef;
605                portDef->format.video.nFrameWidth = mVideoWidth;
606                portDef->format.video.nFrameHeight = mVideoHeight;
607            } else {
608                mVideoBitRate = def->format.video.nBitrate;
609            }
610
611            return OMX_ErrorNone;
612        }
613
614        case OMX_IndexParamStandardComponentRole:
615        {
616            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
617                (const OMX_PARAM_COMPONENTROLETYPE *)params;
618
619            if (strncmp((const char *)roleParams->cRole,
620                        "video_encoder.avc",
621                        OMX_MAX_STRINGNAME_SIZE - 1)) {
622                return OMX_ErrorUndefined;
623            }
624
625            return OMX_ErrorNone;
626        }
627
628        case OMX_IndexParamVideoPortFormat:
629        {
630            const OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
631                (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
632
633            if (formatParams->nPortIndex > 1) {
634                return OMX_ErrorUndefined;
635            }
636
637            if (formatParams->nIndex > 2) {
638                return OMX_ErrorNoMore;
639            }
640
641            if (formatParams->nPortIndex == 0) {
642                if (formatParams->eCompressionFormat != OMX_VIDEO_CodingUnused ||
643                    ((formatParams->nIndex == 0 &&
644                      formatParams->eColorFormat != OMX_COLOR_FormatYUV420Planar) ||
645                    (formatParams->nIndex == 1 &&
646                     formatParams->eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar) ||
647                    (formatParams->nIndex == 2 &&
648                     formatParams->eColorFormat != OMX_COLOR_FormatAndroidOpaque) )) {
649                    return OMX_ErrorUndefined;
650                }
651                mVideoColorFormat = formatParams->eColorFormat;
652            } else {
653                if (formatParams->eCompressionFormat != OMX_VIDEO_CodingAVC ||
654                    formatParams->eColorFormat != OMX_COLOR_FormatUnused) {
655                    return OMX_ErrorUndefined;
656                }
657            }
658
659            return OMX_ErrorNone;
660        }
661
662        case OMX_IndexParamVideoAvc:
663        {
664            OMX_VIDEO_PARAM_AVCTYPE *avcType =
665                (OMX_VIDEO_PARAM_AVCTYPE *)params;
666
667            if (avcType->nPortIndex != 1) {
668                return OMX_ErrorUndefined;
669            }
670
671            // PV's AVC encoder only supports baseline profile
672            if (avcType->eProfile != OMX_VIDEO_AVCProfileBaseline ||
673                avcType->nRefFrames != 1 ||
674                avcType->nBFrames != 0 ||
675                avcType->bUseHadamard != OMX_TRUE ||
676                (avcType->nAllowedPictureTypes & OMX_VIDEO_PictureTypeB) != 0 ||
677                avcType->nRefIdx10ActiveMinus1 != 0 ||
678                avcType->nRefIdx11ActiveMinus1 != 0 ||
679                avcType->bWeightedPPrediction != OMX_FALSE ||
680                avcType->bEntropyCodingCABAC != OMX_FALSE ||
681                avcType->bconstIpred != OMX_FALSE ||
682                avcType->bDirect8x8Inference != OMX_FALSE ||
683                avcType->bDirectSpatialTemporal != OMX_FALSE ||
684                avcType->nCabacInitIdc != 0) {
685                return OMX_ErrorUndefined;
686            }
687
688            if (OK != ConvertOmxAvcLevelToAvcSpecLevel(avcType->eLevel, &mAVCEncLevel)) {
689                return OMX_ErrorUndefined;
690            }
691
692            return OMX_ErrorNone;
693        }
694
695        case kStoreMetaDataExtensionIndex:
696        {
697            StoreMetaDataInBuffersParams *storeParams =
698                    (StoreMetaDataInBuffersParams*)params;
699            if (storeParams->nPortIndex != 0) {
700                ALOGE("%s: StoreMetadataInBuffersParams.nPortIndex not zero!",
701                        __FUNCTION__);
702                return OMX_ErrorUndefined;
703            }
704
705            mStoreMetaDataInBuffers = storeParams->bStoreMetaData;
706            ALOGV("StoreMetaDataInBuffers set to: %s",
707                    mStoreMetaDataInBuffers ? " true" : "false");
708
709            if (mStoreMetaDataInBuffers) {
710                mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar;
711                if (mInputFrameData == NULL) {
712                    mInputFrameData =
713                            (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
714                }
715            }
716
717            return OMX_ErrorNone;
718        }
719
720        default:
721            return SimpleSoftOMXComponent::internalSetParameter(index, params);
722    }
723}
724
725void SoftAVCEncoder::onQueueFilled(OMX_U32 portIndex) {
726    if (mSignalledError || mSawInputEOS) {
727        return;
728    }
729
730    if (!mStarted) {
731        if (OMX_ErrorNone != initEncoder()) {
732            return;
733        }
734    }
735
736    List<BufferInfo *> &inQueue = getPortQueue(0);
737    List<BufferInfo *> &outQueue = getPortQueue(1);
738
739    while (!mSawInputEOS && !inQueue.empty() && !outQueue.empty()) {
740        BufferInfo *inInfo = *inQueue.begin();
741        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
742        BufferInfo *outInfo = *outQueue.begin();
743        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
744
745        outHeader->nTimeStamp = 0;
746        outHeader->nFlags = 0;
747        outHeader->nOffset = 0;
748        outHeader->nFilledLen = 0;
749        outHeader->nOffset = 0;
750
751        uint8_t *outPtr = (uint8_t *) outHeader->pBuffer;
752        uint32_t dataLength = outHeader->nAllocLen;
753
754        if (!mSpsPpsHeaderReceived && mNumInputFrames < 0) {
755            // 4 bytes are reserved for holding the start code 0x00000001
756            // of the sequence parameter set at the beginning.
757            outPtr += 4;
758            dataLength -= 4;
759        }
760
761        int32_t type;
762        AVCEnc_Status encoderStatus = AVCENC_SUCCESS;
763
764        // Combine SPS and PPS and place them in the very first output buffer
765        // SPS and PPS are separated by start code 0x00000001
766        // Assume that we have exactly one SPS and exactly one PPS.
767        while (!mSpsPpsHeaderReceived && mNumInputFrames <= 0) {
768            encoderStatus = PVAVCEncodeNAL(mHandle, outPtr, &dataLength, &type);
769            if (encoderStatus == AVCENC_WRONG_STATE) {
770                mSpsPpsHeaderReceived = true;
771                CHECK_EQ(0, mNumInputFrames);  // 1st video frame is 0
772                outHeader->nFlags = OMX_BUFFERFLAG_CODECCONFIG;
773                outQueue.erase(outQueue.begin());
774                outInfo->mOwnedByUs = false;
775                notifyFillBufferDone(outHeader);
776                return;
777            } else {
778                switch (type) {
779                    case AVC_NALTYPE_SPS:
780                        ++mNumInputFrames;
781                        memcpy((uint8_t *)outHeader->pBuffer, "\x00\x00\x00\x01", 4);
782                        outHeader->nFilledLen = 4 + dataLength;
783                        outPtr += (dataLength + 4);  // 4 bytes for next start code
784                        dataLength = outHeader->nAllocLen - outHeader->nFilledLen;
785                        break;
786                    default:
787                        CHECK_EQ(AVC_NALTYPE_PPS, type);
788                        ++mNumInputFrames;
789                        memcpy((uint8_t *) outHeader->pBuffer + outHeader->nFilledLen,
790                                "\x00\x00\x00\x01", 4);
791                        outHeader->nFilledLen += (dataLength + 4);
792                        outPtr += (dataLength + 4);
793                        break;
794                }
795            }
796        }
797
798        buffer_handle_t srcBuffer; // for MetaDataMode only
799
800        // Get next input video frame
801        if (mReadyForNextFrame) {
802            // Save the input buffer info so that it can be
803            // passed to an output buffer
804            InputBufferInfo info;
805            info.mTimeUs = inHeader->nTimeStamp;
806            info.mFlags = inHeader->nFlags;
807            mInputBufferInfoVec.push(info);
808            mPrevTimestampUs = inHeader->nTimeStamp;
809
810            if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
811                mSawInputEOS = true;
812            }
813
814            if (inHeader->nFilledLen > 0) {
815                AVCFrameIO videoInput;
816                memset(&videoInput, 0, sizeof(videoInput));
817                videoInput.height = ((mVideoHeight  + 15) >> 4) << 4;
818                videoInput.pitch = ((mVideoWidth + 15) >> 4) << 4;
819                videoInput.coding_timestamp = (inHeader->nTimeStamp + 500) / 1000;  // in ms
820                uint8_t *inputData = NULL;
821                if (mStoreMetaDataInBuffers) {
822                    if (inHeader->nFilledLen != 8) {
823                        ALOGE("MetaData buffer is wrong size! "
824                                "(got %u bytes, expected 8)", inHeader->nFilledLen);
825                        mSignalledError = true;
826                        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
827                        return;
828                    }
829                    inputData =
830                            extractGrallocData(inHeader->pBuffer + inHeader->nOffset,
831                                    &srcBuffer);
832                    if (inputData == NULL) {
833                        ALOGE("Unable to extract gralloc buffer in metadata mode");
834                        mSignalledError = true;
835                        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
836                        return;
837                    }
838                    // TODO: Verify/convert pixel format enum
839                } else {
840                    inputData = (uint8_t *)inHeader->pBuffer + inHeader->nOffset;
841                }
842
843                if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
844                    ConvertYUV420SemiPlanarToYUV420Planar(
845                        inputData, mInputFrameData, mVideoWidth, mVideoHeight);
846                    inputData = mInputFrameData;
847                }
848                CHECK(inputData != NULL);
849                videoInput.YCbCr[0] = inputData;
850                videoInput.YCbCr[1] = videoInput.YCbCr[0] + videoInput.height * videoInput.pitch;
851                videoInput.YCbCr[2] = videoInput.YCbCr[1] +
852                    ((videoInput.height * videoInput.pitch) >> 2);
853                videoInput.disp_order = mNumInputFrames;
854
855                encoderStatus = PVAVCEncSetInput(mHandle, &videoInput);
856                if (encoderStatus == AVCENC_SUCCESS || encoderStatus == AVCENC_NEW_IDR) {
857                    mReadyForNextFrame = false;
858                    ++mNumInputFrames;
859                    if (encoderStatus == AVCENC_NEW_IDR) {
860                        mIsIDRFrame = 1;
861                    }
862                } else {
863                    if (encoderStatus < AVCENC_SUCCESS) {
864                        ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
865                        mSignalledError = true;
866                        releaseGrallocData(srcBuffer);
867                        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
868                        return;
869                    } else {
870                        ALOGV("encoderStatus = %d at line %d", encoderStatus, __LINE__);
871                        inQueue.erase(inQueue.begin());
872                        inInfo->mOwnedByUs = false;
873                        releaseGrallocData(srcBuffer);
874                        notifyEmptyBufferDone(inHeader);
875                        return;
876                    }
877                }
878            }
879        }
880
881        // Encode an input video frame
882        CHECK(encoderStatus == AVCENC_SUCCESS || encoderStatus == AVCENC_NEW_IDR);
883        dataLength = outHeader->nAllocLen;  // Reset the output buffer length
884        if (inHeader->nFilledLen > 0) {
885            if (outHeader->nAllocLen >= 4) {
886                memcpy(outPtr, "\x00\x00\x00\x01", 4);
887                outPtr += 4;
888                dataLength -= 4;
889            }
890            encoderStatus = PVAVCEncodeNAL(mHandle, outPtr, &dataLength, &type);
891            dataLength = outPtr + dataLength - outHeader->pBuffer;
892            if (encoderStatus == AVCENC_SUCCESS) {
893                CHECK(NULL == PVAVCEncGetOverrunBuffer(mHandle));
894            } else if (encoderStatus == AVCENC_PICTURE_READY) {
895                CHECK(NULL == PVAVCEncGetOverrunBuffer(mHandle));
896                if (mIsIDRFrame) {
897                    outHeader->nFlags |= OMX_BUFFERFLAG_SYNCFRAME;
898                    mIsIDRFrame = false;
899                }
900                mReadyForNextFrame = true;
901                AVCFrameIO recon;
902                if (PVAVCEncGetRecon(mHandle, &recon) == AVCENC_SUCCESS) {
903                    PVAVCEncReleaseRecon(mHandle, &recon);
904                }
905            } else {
906                dataLength = 0;
907                mReadyForNextFrame = true;
908            }
909
910            if (encoderStatus < AVCENC_SUCCESS) {
911                ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
912                mSignalledError = true;
913                releaseGrallocData(srcBuffer);
914                notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
915                return;
916            }
917        } else {
918            dataLength = 0;
919        }
920
921        inQueue.erase(inQueue.begin());
922        inInfo->mOwnedByUs = false;
923        releaseGrallocData(srcBuffer);
924        notifyEmptyBufferDone(inHeader);
925
926        outQueue.erase(outQueue.begin());
927        CHECK(!mInputBufferInfoVec.empty());
928        InputBufferInfo *inputBufInfo = mInputBufferInfoVec.begin();
929        outHeader->nTimeStamp = inputBufInfo->mTimeUs;
930        outHeader->nFlags |= (inputBufInfo->mFlags | OMX_BUFFERFLAG_ENDOFFRAME);
931        if (mSawInputEOS) {
932            outHeader->nFlags |= OMX_BUFFERFLAG_EOS;
933        }
934        outHeader->nFilledLen = dataLength;
935        outInfo->mOwnedByUs = false;
936        notifyFillBufferDone(outHeader);
937        mInputBufferInfoVec.erase(mInputBufferInfoVec.begin());
938    }
939}
940
941int32_t SoftAVCEncoder::allocOutputBuffers(
942        unsigned int sizeInMbs, unsigned int numBuffers) {
943    CHECK(mOutputBuffers.isEmpty());
944    size_t frameSize = (sizeInMbs << 7) * 3;
945    for (unsigned int i = 0; i <  numBuffers; ++i) {
946        MediaBuffer *buffer = new MediaBuffer(frameSize);
947        buffer->setObserver(this);
948        mOutputBuffers.push(buffer);
949    }
950
951    return 1;
952}
953
954void SoftAVCEncoder::unbindOutputBuffer(int32_t index) {
955    CHECK(index >= 0);
956}
957
958int32_t SoftAVCEncoder::bindOutputBuffer(int32_t index, uint8_t **yuv) {
959    CHECK(index >= 0);
960    CHECK(index < (int32_t) mOutputBuffers.size());
961    *yuv = (uint8_t *) mOutputBuffers[index]->data();
962
963    return 1;
964}
965
966void SoftAVCEncoder::signalBufferReturned(MediaBuffer *buffer) {
967    ALOGV("signalBufferReturned: %p", buffer);
968}
969
970OMX_ERRORTYPE SoftAVCEncoder::getExtensionIndex(
971        const char *name, OMX_INDEXTYPE *index) {
972    if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers")) {
973        *(int32_t*)index = kStoreMetaDataExtensionIndex;
974        return OMX_ErrorNone;
975    }
976    return OMX_ErrorUndefined;
977}
978
979uint8_t *SoftAVCEncoder::extractGrallocData(void *data, buffer_handle_t *buffer) {
980    OMX_U32 type = *(OMX_U32*)data;
981    status_t res;
982    if (type != kMetadataBufferTypeGrallocSource) {
983        ALOGE("Data passed in with metadata mode does not have type "
984                "kMetadataBufferTypeGrallocSource (%d), has type %d instead",
985                kMetadataBufferTypeGrallocSource, type);
986        return NULL;
987    }
988    buffer_handle_t imgBuffer = *(buffer_handle_t*)((uint8_t*)data + 4);
989
990    const Rect rect(mVideoWidth, mVideoHeight);
991    uint8_t *img;
992    res = GraphicBufferMapper::get().lock(imgBuffer,
993            GRALLOC_USAGE_HW_VIDEO_ENCODER,
994            rect, (void**)&img);
995    if (res != OK) {
996        ALOGE("%s: Unable to lock image buffer %p for access", __FUNCTION__,
997                imgBuffer);
998        return NULL;
999    }
1000
1001    *buffer = imgBuffer;
1002    return img;
1003}
1004
1005void SoftAVCEncoder::releaseGrallocData(buffer_handle_t buffer) {
1006    if (mStoreMetaDataInBuffers) {
1007        GraphicBufferMapper::get().unlock(buffer);
1008    }
1009}
1010
1011}  // namespace android
1012
1013android::SoftOMXComponent *createSoftOMXComponent(
1014        const char *name, const OMX_CALLBACKTYPE *callbacks,
1015        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
1016    return new android::SoftAVCEncoder(name, callbacks, appData, component);
1017}
1018