SoftAVCEncoder.cpp revision bf5bea96f236adb5eef78c2f414ef82b3602a0f7
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "SoftAVCEncoder"
19#include <utils/Log.h>
20
21#include "avcenc_api.h"
22#include "avcenc_int.h"
23#include "OMX_Video.h"
24
25#include <HardwareAPI.h>
26#include <MetadataBufferType.h>
27#include <media/stagefright/foundation/ADebug.h>
28#include <media/stagefright/MediaDefs.h>
29#include <media/stagefright/MediaErrors.h>
30#include <media/stagefright/MetaData.h>
31#include <media/stagefright/Utils.h>
32#include <ui/Rect.h>
33#include <ui/GraphicBufferMapper.h>
34
35#include "SoftAVCEncoder.h"
36
37namespace android {
38
39template<class T>
40static void InitOMXParams(T *params) {
41    params->nSize = sizeof(T);
42    params->nVersion.s.nVersionMajor = 1;
43    params->nVersion.s.nVersionMinor = 0;
44    params->nVersion.s.nRevision = 0;
45    params->nVersion.s.nStep = 0;
46}
47
48typedef struct LevelConversion {
49    OMX_U32 omxLevel;
50    AVCLevel avcLevel;
51} LevelConcersion;
52
53static LevelConversion ConversionTable[] = {
54    { OMX_VIDEO_AVCLevel1,  AVC_LEVEL1_B },
55    { OMX_VIDEO_AVCLevel1b, AVC_LEVEL1   },
56    { OMX_VIDEO_AVCLevel11, AVC_LEVEL1_1 },
57    { OMX_VIDEO_AVCLevel12, AVC_LEVEL1_2 },
58    { OMX_VIDEO_AVCLevel13, AVC_LEVEL1_3 },
59    { OMX_VIDEO_AVCLevel2,  AVC_LEVEL2 },
60#if 0
61    // encoding speed is very poor if video
62    // resolution is higher than CIF
63    { OMX_VIDEO_AVCLevel21, AVC_LEVEL2_1 },
64    { OMX_VIDEO_AVCLevel22, AVC_LEVEL2_2 },
65    { OMX_VIDEO_AVCLevel3,  AVC_LEVEL3   },
66    { OMX_VIDEO_AVCLevel31, AVC_LEVEL3_1 },
67    { OMX_VIDEO_AVCLevel32, AVC_LEVEL3_2 },
68    { OMX_VIDEO_AVCLevel4,  AVC_LEVEL4   },
69    { OMX_VIDEO_AVCLevel41, AVC_LEVEL4_1 },
70    { OMX_VIDEO_AVCLevel42, AVC_LEVEL4_2 },
71    { OMX_VIDEO_AVCLevel5,  AVC_LEVEL5   },
72    { OMX_VIDEO_AVCLevel51, AVC_LEVEL5_1 },
73#endif
74};
75
76static status_t ConvertOmxAvcLevelToAvcSpecLevel(
77        OMX_U32 omxLevel, AVCLevel *avcLevel) {
78    for (size_t i = 0, n = sizeof(ConversionTable)/sizeof(ConversionTable[0]);
79        i < n; ++i) {
80        if (omxLevel == ConversionTable[i].omxLevel) {
81            *avcLevel = ConversionTable[i].avcLevel;
82            return OK;
83        }
84    }
85
86    ALOGE("ConvertOmxAvcLevelToAvcSpecLevel: %d level not supported",
87            (int32_t)omxLevel);
88
89    return BAD_VALUE;
90}
91
92static status_t ConvertAvcSpecLevelToOmxAvcLevel(
93    AVCLevel avcLevel, OMX_U32 *omxLevel) {
94    for (size_t i = 0, n = sizeof(ConversionTable)/sizeof(ConversionTable[0]);
95        i < n; ++i) {
96        if (avcLevel == ConversionTable[i].avcLevel) {
97            *omxLevel = ConversionTable[i].omxLevel;
98            return OK;
99        }
100    }
101
102    ALOGE("ConvertAvcSpecLevelToOmxAvcLevel: %d level not supported",
103            (int32_t) avcLevel);
104
105    return BAD_VALUE;
106}
107
108inline static void ConvertYUV420SemiPlanarToYUV420Planar(
109        uint8_t *inyuv, uint8_t* outyuv,
110        int32_t width, int32_t height) {
111
112    int32_t outYsize = width * height;
113    uint32_t *outy =  (uint32_t *) outyuv;
114    uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
115    uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));
116
117    /* Y copying */
118    memcpy(outy, inyuv, outYsize);
119
120    /* U & V copying */
121    uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
122    for (int32_t i = height >> 1; i > 0; --i) {
123        for (int32_t j = width >> 2; j > 0; --j) {
124            uint32_t temp = *inyuv_4++;
125            uint32_t tempU = temp & 0xFF;
126            tempU = tempU | ((temp >> 8) & 0xFF00);
127
128            uint32_t tempV = (temp >> 8) & 0xFF;
129            tempV = tempV | ((temp >> 16) & 0xFF00);
130
131            // Flip U and V
132            *outcb++ = tempV;
133            *outcr++ = tempU;
134        }
135    }
136}
137
138static int32_t MallocWrapper(
139        void *userData, int32_t size, int32_t attrs) {
140    return reinterpret_cast<int32_t>(malloc(size));
141}
142
143static void FreeWrapper(void *userData, int32_t ptr) {
144    free(reinterpret_cast<void *>(ptr));
145}
146
147static int32_t DpbAllocWrapper(void *userData,
148        unsigned int sizeInMbs, unsigned int numBuffers) {
149    SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
150    CHECK(encoder != NULL);
151    return encoder->allocOutputBuffers(sizeInMbs, numBuffers);
152}
153
154static int32_t BindFrameWrapper(
155        void *userData, int32_t index, uint8_t **yuv) {
156    SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
157    CHECK(encoder != NULL);
158    return encoder->bindOutputBuffer(index, yuv);
159}
160
161static void UnbindFrameWrapper(void *userData, int32_t index) {
162    SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
163    CHECK(encoder != NULL);
164    return encoder->unbindOutputBuffer(index);
165}
166
167SoftAVCEncoder::SoftAVCEncoder(
168            const char *name,
169            const OMX_CALLBACKTYPE *callbacks,
170            OMX_PTR appData,
171            OMX_COMPONENTTYPE **component)
172    : SimpleSoftOMXComponent(name, callbacks, appData, component),
173      mVideoWidth(176),
174      mVideoHeight(144),
175      mVideoFrameRate(30),
176      mVideoBitRate(192000),
177      mVideoColorFormat(OMX_COLOR_FormatYUV420Planar),
178      mStoreMetaDataInBuffers(false),
179      mIDRFrameRefreshIntervalInSec(1),
180      mAVCEncProfile(AVC_BASELINE),
181      mAVCEncLevel(AVC_LEVEL2),
182      mNumInputFrames(-1),
183      mPrevTimestampUs(-1),
184      mStarted(false),
185      mSawInputEOS(false),
186      mSignalledError(false),
187      mHandle(new tagAVCHandle),
188      mEncParams(new tagAVCEncParam),
189      mInputFrameData(NULL),
190      mSliceGroup(NULL) {
191
192    initPorts();
193    ALOGI("Construct SoftAVCEncoder");
194}
195
196SoftAVCEncoder::~SoftAVCEncoder() {
197    ALOGV("Destruct SoftAVCEncoder");
198    releaseEncoder();
199    List<BufferInfo *> &outQueue = getPortQueue(1);
200    List<BufferInfo *> &inQueue = getPortQueue(0);
201    CHECK(outQueue.empty());
202    CHECK(inQueue.empty());
203}
204
205OMX_ERRORTYPE SoftAVCEncoder::initEncParams() {
206    CHECK(mHandle != NULL);
207    memset(mHandle, 0, sizeof(tagAVCHandle));
208    mHandle->AVCObject = NULL;
209    mHandle->userData = this;
210    mHandle->CBAVC_DPBAlloc = DpbAllocWrapper;
211    mHandle->CBAVC_FrameBind = BindFrameWrapper;
212    mHandle->CBAVC_FrameUnbind = UnbindFrameWrapper;
213    mHandle->CBAVC_Malloc = MallocWrapper;
214    mHandle->CBAVC_Free = FreeWrapper;
215
216    CHECK(mEncParams != NULL);
217    memset(mEncParams, 0, sizeof(mEncParams));
218    mEncParams->rate_control = AVC_ON;
219    mEncParams->initQP = 0;
220    mEncParams->init_CBP_removal_delay = 1600;
221
222    mEncParams->intramb_refresh = 0;
223    mEncParams->auto_scd = AVC_ON;
224    mEncParams->out_of_band_param_set = AVC_ON;
225    mEncParams->poc_type = 2;
226    mEncParams->log2_max_poc_lsb_minus_4 = 12;
227    mEncParams->delta_poc_zero_flag = 0;
228    mEncParams->offset_poc_non_ref = 0;
229    mEncParams->offset_top_bottom = 0;
230    mEncParams->num_ref_in_cycle = 0;
231    mEncParams->offset_poc_ref = NULL;
232
233    mEncParams->num_ref_frame = 1;
234    mEncParams->num_slice_group = 1;
235    mEncParams->fmo_type = 0;
236
237    mEncParams->db_filter = AVC_ON;
238    mEncParams->disable_db_idc = 0;
239
240    mEncParams->alpha_offset = 0;
241    mEncParams->beta_offset = 0;
242    mEncParams->constrained_intra_pred = AVC_OFF;
243
244    mEncParams->data_par = AVC_OFF;
245    mEncParams->fullsearch = AVC_OFF;
246    mEncParams->search_range = 16;
247    mEncParams->sub_pel = AVC_OFF;
248    mEncParams->submb_pred = AVC_OFF;
249    mEncParams->rdopt_mode = AVC_OFF;
250    mEncParams->bidir_pred = AVC_OFF;
251
252    mEncParams->use_overrun_buffer = AVC_OFF;
253
254    if (mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
255        // Color conversion is needed.
256        CHECK(mInputFrameData == NULL);
257        mInputFrameData =
258            (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
259        CHECK(mInputFrameData != NULL);
260    }
261
262    // PV's AVC encoder requires the video dimension of multiple
263    if (mVideoWidth % 16 != 0 || mVideoHeight % 16 != 0) {
264        ALOGE("Video frame size %dx%d must be a multiple of 16",
265            mVideoWidth, mVideoHeight);
266        return OMX_ErrorBadParameter;
267    }
268
269    mEncParams->width = mVideoWidth;
270    mEncParams->height = mVideoHeight;
271    mEncParams->bitrate = mVideoBitRate;
272    mEncParams->frame_rate = 1000 * mVideoFrameRate;  // In frames/ms!
273    mEncParams->CPB_size = (uint32_t) (mVideoBitRate >> 1);
274
275    int32_t nMacroBlocks = ((((mVideoWidth + 15) >> 4) << 4) *
276            (((mVideoHeight + 15) >> 4) << 4)) >> 8;
277    CHECK(mSliceGroup == NULL);
278    mSliceGroup = (uint32_t *) malloc(sizeof(uint32_t) * nMacroBlocks);
279    CHECK(mSliceGroup != NULL);
280    for (int ii = 0, idx = 0; ii < nMacroBlocks; ++ii) {
281        mSliceGroup[ii] = idx++;
282        if (idx >= mEncParams->num_slice_group) {
283            idx = 0;
284        }
285    }
286    mEncParams->slice_group = mSliceGroup;
287
288    // Set IDR frame refresh interval
289    if (mIDRFrameRefreshIntervalInSec < 0) {
290        mEncParams->idr_period = -1;
291    } else if (mIDRFrameRefreshIntervalInSec == 0) {
292        mEncParams->idr_period = 1;  // All I frames
293    } else {
294        mEncParams->idr_period =
295            (mIDRFrameRefreshIntervalInSec * mVideoFrameRate);
296    }
297
298    // Set profile and level
299    mEncParams->profile = mAVCEncProfile;
300    mEncParams->level = mAVCEncLevel;
301
302    return OMX_ErrorNone;
303}
304
305OMX_ERRORTYPE SoftAVCEncoder::initEncoder() {
306    CHECK(!mStarted);
307
308    OMX_ERRORTYPE errType = OMX_ErrorNone;
309    if (OMX_ErrorNone != (errType = initEncParams())) {
310        ALOGE("Failed to initialized encoder params");
311        mSignalledError = true;
312        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
313        return errType;
314    }
315
316    AVCEnc_Status err;
317    err = PVAVCEncInitialize(mHandle, mEncParams, NULL, NULL);
318    if (err != AVCENC_SUCCESS) {
319        ALOGE("Failed to initialize the encoder: %d", err);
320        mSignalledError = true;
321        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
322        return OMX_ErrorUndefined;
323    }
324
325    mNumInputFrames = -2;  // 1st two buffers contain SPS and PPS
326    mSpsPpsHeaderReceived = false;
327    mReadyForNextFrame = true;
328    mIsIDRFrame = false;
329    mStarted = true;
330
331    return OMX_ErrorNone;
332}
333
334OMX_ERRORTYPE SoftAVCEncoder::releaseEncoder() {
335    if (!mStarted) {
336        return OMX_ErrorNone;
337    }
338
339    PVAVCCleanUpEncoder(mHandle);
340    releaseOutputBuffers();
341
342    delete mInputFrameData;
343    mInputFrameData = NULL;
344
345    delete mSliceGroup;
346    mSliceGroup = NULL;
347
348    delete mEncParams;
349    mEncParams = NULL;
350
351    delete mHandle;
352    mHandle = NULL;
353
354    mStarted = false;
355
356    return OMX_ErrorNone;
357}
358
359void SoftAVCEncoder::releaseOutputBuffers() {
360    for (size_t i = 0; i < mOutputBuffers.size(); ++i) {
361        MediaBuffer *buffer = mOutputBuffers.editItemAt(i);
362        buffer->setObserver(NULL);
363        buffer->release();
364    }
365    mOutputBuffers.clear();
366}
367
368void SoftAVCEncoder::initPorts() {
369    OMX_PARAM_PORTDEFINITIONTYPE def;
370    InitOMXParams(&def);
371
372    const size_t kInputBufferSize = (mVideoWidth * mVideoHeight * 3) >> 1;
373
374    // 31584 is PV's magic number.  Not sure why.
375    const size_t kOutputBufferSize =
376            (kInputBufferSize > 31584) ? kInputBufferSize: 31584;
377
378    def.nPortIndex = 0;
379    def.eDir = OMX_DirInput;
380    def.nBufferCountMin = kNumBuffers;
381    def.nBufferCountActual = def.nBufferCountMin;
382    def.nBufferSize = kInputBufferSize;
383    def.bEnabled = OMX_TRUE;
384    def.bPopulated = OMX_FALSE;
385    def.eDomain = OMX_PortDomainVideo;
386    def.bBuffersContiguous = OMX_FALSE;
387    def.nBufferAlignment = 1;
388
389    def.format.video.cMIMEType = const_cast<char *>("video/raw");
390    def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
391    def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
392    def.format.video.xFramerate = (mVideoFrameRate << 16);  // Q16 format
393    def.format.video.nBitrate = mVideoBitRate;
394    def.format.video.nFrameWidth = mVideoWidth;
395    def.format.video.nFrameHeight = mVideoHeight;
396    def.format.video.nStride = mVideoWidth;
397    def.format.video.nSliceHeight = mVideoHeight;
398
399    addPort(def);
400
401    def.nPortIndex = 1;
402    def.eDir = OMX_DirOutput;
403    def.nBufferCountMin = kNumBuffers;
404    def.nBufferCountActual = def.nBufferCountMin;
405    def.nBufferSize = kOutputBufferSize;
406    def.bEnabled = OMX_TRUE;
407    def.bPopulated = OMX_FALSE;
408    def.eDomain = OMX_PortDomainVideo;
409    def.bBuffersContiguous = OMX_FALSE;
410    def.nBufferAlignment = 2;
411
412    def.format.video.cMIMEType = const_cast<char *>("video/avc");
413    def.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
414    def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
415    def.format.video.xFramerate = (0 << 16);  // Q16 format
416    def.format.video.nBitrate = mVideoBitRate;
417    def.format.video.nFrameWidth = mVideoWidth;
418    def.format.video.nFrameHeight = mVideoHeight;
419    def.format.video.nStride = mVideoWidth;
420    def.format.video.nSliceHeight = mVideoHeight;
421
422    addPort(def);
423}
424
425OMX_ERRORTYPE SoftAVCEncoder::internalGetParameter(
426        OMX_INDEXTYPE index, OMX_PTR params) {
427    switch (index) {
428        case OMX_IndexParamVideoErrorCorrection:
429        {
430            return OMX_ErrorNotImplemented;
431        }
432
433        case OMX_IndexParamVideoBitrate:
434        {
435            OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
436                (OMX_VIDEO_PARAM_BITRATETYPE *) params;
437
438            if (bitRate->nPortIndex != 1) {
439                return OMX_ErrorUndefined;
440            }
441
442            bitRate->eControlRate = OMX_Video_ControlRateVariable;
443            bitRate->nTargetBitrate = mVideoBitRate;
444            return OMX_ErrorNone;
445        }
446
447        case OMX_IndexParamVideoPortFormat:
448        {
449            OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
450                (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
451
452            if (formatParams->nPortIndex > 1) {
453                return OMX_ErrorUndefined;
454            }
455
456            if (formatParams->nIndex > 2) {
457                return OMX_ErrorNoMore;
458            }
459
460            if (formatParams->nPortIndex == 0) {
461                formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
462                if (formatParams->nIndex == 0) {
463                    formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
464                } else if (formatParams->nIndex == 1) {
465                    formatParams->eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
466                } else {
467                    formatParams->eColorFormat = OMX_COLOR_FormatAndroidOpaque;
468                }
469            } else {
470                formatParams->eCompressionFormat = OMX_VIDEO_CodingAVC;
471                formatParams->eColorFormat = OMX_COLOR_FormatUnused;
472            }
473
474            return OMX_ErrorNone;
475        }
476
477        case OMX_IndexParamVideoAvc:
478        {
479            OMX_VIDEO_PARAM_AVCTYPE *avcParams =
480                (OMX_VIDEO_PARAM_AVCTYPE *)params;
481
482            if (avcParams->nPortIndex != 1) {
483                return OMX_ErrorUndefined;
484            }
485
486            avcParams->eProfile = OMX_VIDEO_AVCProfileBaseline;
487            OMX_U32 omxLevel = AVC_LEVEL2;
488            if (OMX_ErrorNone !=
489                ConvertAvcSpecLevelToOmxAvcLevel(mAVCEncLevel, &omxLevel)) {
490                return OMX_ErrorUndefined;
491            }
492
493            avcParams->eLevel = (OMX_VIDEO_AVCLEVELTYPE) omxLevel;
494            avcParams->nRefFrames = 1;
495            avcParams->nBFrames = 0;
496            avcParams->bUseHadamard = OMX_TRUE;
497            avcParams->nAllowedPictureTypes =
498                    (OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP);
499            avcParams->nRefIdx10ActiveMinus1 = 0;
500            avcParams->nRefIdx11ActiveMinus1 = 0;
501            avcParams->bWeightedPPrediction = OMX_FALSE;
502            avcParams->bEntropyCodingCABAC = OMX_FALSE;
503            avcParams->bconstIpred = OMX_FALSE;
504            avcParams->bDirect8x8Inference = OMX_FALSE;
505            avcParams->bDirectSpatialTemporal = OMX_FALSE;
506            avcParams->nCabacInitIdc = 0;
507            return OMX_ErrorNone;
508        }
509
510        case OMX_IndexParamVideoProfileLevelQuerySupported:
511        {
512            OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel =
513                (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)params;
514
515            if (profileLevel->nPortIndex != 1) {
516                return OMX_ErrorUndefined;
517            }
518
519            const size_t size =
520                    sizeof(ConversionTable) / sizeof(ConversionTable[0]);
521
522            if (profileLevel->nProfileIndex >= size) {
523                return OMX_ErrorNoMore;
524            }
525
526            profileLevel->eProfile = OMX_VIDEO_AVCProfileBaseline;
527            profileLevel->eLevel = ConversionTable[profileLevel->nProfileIndex].omxLevel;
528
529            return OMX_ErrorNone;
530        }
531
532        default:
533            return SimpleSoftOMXComponent::internalGetParameter(index, params);
534    }
535}
536
537OMX_ERRORTYPE SoftAVCEncoder::internalSetParameter(
538        OMX_INDEXTYPE index, const OMX_PTR params) {
539    int32_t indexFull = index;
540
541    switch (indexFull) {
542        case OMX_IndexParamVideoErrorCorrection:
543        {
544            return OMX_ErrorNotImplemented;
545        }
546
547        case OMX_IndexParamVideoBitrate:
548        {
549            OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
550                (OMX_VIDEO_PARAM_BITRATETYPE *) params;
551
552            if (bitRate->nPortIndex != 1 ||
553                bitRate->eControlRate != OMX_Video_ControlRateVariable) {
554                return OMX_ErrorUndefined;
555            }
556
557            mVideoBitRate = bitRate->nTargetBitrate;
558            return OMX_ErrorNone;
559        }
560
561        case OMX_IndexParamPortDefinition:
562        {
563            OMX_PARAM_PORTDEFINITIONTYPE *def =
564                (OMX_PARAM_PORTDEFINITIONTYPE *)params;
565            if (def->nPortIndex > 1) {
566                return OMX_ErrorUndefined;
567            }
568
569            if (def->nPortIndex == 0) {
570                if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingUnused ||
571                    (def->format.video.eColorFormat != OMX_COLOR_FormatYUV420Planar &&
572                     def->format.video.eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar &&
573                     def->format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque)) {
574                    return OMX_ErrorUndefined;
575                }
576            } else {
577                if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingAVC ||
578                    (def->format.video.eColorFormat != OMX_COLOR_FormatUnused)) {
579                    return OMX_ErrorUndefined;
580                }
581            }
582
583            OMX_ERRORTYPE err = SimpleSoftOMXComponent::internalSetParameter(index, params);
584            if (OMX_ErrorNone != err) {
585                return err;
586            }
587
588            if (def->nPortIndex == 0) {
589                mVideoWidth = def->format.video.nFrameWidth;
590                mVideoHeight = def->format.video.nFrameHeight;
591                mVideoFrameRate = def->format.video.xFramerate >> 16;
592                mVideoColorFormat = def->format.video.eColorFormat;
593            } else {
594                mVideoBitRate = def->format.video.nBitrate;
595            }
596
597            return OMX_ErrorNone;
598        }
599
600        case OMX_IndexParamStandardComponentRole:
601        {
602            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
603                (const OMX_PARAM_COMPONENTROLETYPE *)params;
604
605            if (strncmp((const char *)roleParams->cRole,
606                        "video_encoder.avc",
607                        OMX_MAX_STRINGNAME_SIZE - 1)) {
608                return OMX_ErrorUndefined;
609            }
610
611            return OMX_ErrorNone;
612        }
613
614        case OMX_IndexParamVideoPortFormat:
615        {
616            const OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
617                (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
618
619            if (formatParams->nPortIndex > 1) {
620                return OMX_ErrorUndefined;
621            }
622
623            if (formatParams->nIndex > 2) {
624                return OMX_ErrorNoMore;
625            }
626
627            if (formatParams->nPortIndex == 0) {
628                if (formatParams->eCompressionFormat != OMX_VIDEO_CodingUnused ||
629                    ((formatParams->nIndex == 0 &&
630                      formatParams->eColorFormat != OMX_COLOR_FormatYUV420Planar) ||
631                    (formatParams->nIndex == 1 &&
632                     formatParams->eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar) ||
633                    (formatParams->nIndex == 2 &&
634                     formatParams->eColorFormat != OMX_COLOR_FormatAndroidOpaque) )) {
635                    return OMX_ErrorUndefined;
636                }
637                mVideoColorFormat = formatParams->eColorFormat;
638            } else {
639                if (formatParams->eCompressionFormat != OMX_VIDEO_CodingAVC ||
640                    formatParams->eColorFormat != OMX_COLOR_FormatUnused) {
641                    return OMX_ErrorUndefined;
642                }
643            }
644
645            return OMX_ErrorNone;
646        }
647
648        case OMX_IndexParamVideoAvc:
649        {
650            OMX_VIDEO_PARAM_AVCTYPE *avcType =
651                (OMX_VIDEO_PARAM_AVCTYPE *)params;
652
653            if (avcType->nPortIndex != 1) {
654                return OMX_ErrorUndefined;
655            }
656
657            // PV's AVC encoder only supports baseline profile
658            if (avcType->eProfile != OMX_VIDEO_AVCProfileBaseline ||
659                avcType->nRefFrames != 1 ||
660                avcType->nBFrames != 0 ||
661                avcType->bUseHadamard != OMX_TRUE ||
662                (avcType->nAllowedPictureTypes & OMX_VIDEO_PictureTypeB) != 0 ||
663                avcType->nRefIdx10ActiveMinus1 != 0 ||
664                avcType->nRefIdx11ActiveMinus1 != 0 ||
665                avcType->bWeightedPPrediction != OMX_FALSE ||
666                avcType->bEntropyCodingCABAC != OMX_FALSE ||
667                avcType->bconstIpred != OMX_FALSE ||
668                avcType->bDirect8x8Inference != OMX_FALSE ||
669                avcType->bDirectSpatialTemporal != OMX_FALSE ||
670                avcType->nCabacInitIdc != 0) {
671                return OMX_ErrorUndefined;
672            }
673
674            if (OK != ConvertOmxAvcLevelToAvcSpecLevel(avcType->eLevel, &mAVCEncLevel)) {
675                return OMX_ErrorUndefined;
676            }
677
678            return OMX_ErrorNone;
679        }
680
681        case kStoreMetaDataExtensionIndex:
682        {
683            StoreMetaDataInBuffersParams *storeParams =
684                    (StoreMetaDataInBuffersParams*)params;
685            if (storeParams->nPortIndex != 0) {
686                ALOGE("%s: StoreMetadataInBuffersParams.nPortIndex not zero!",
687                        __FUNCTION__);
688                return OMX_ErrorUndefined;
689            }
690
691            mStoreMetaDataInBuffers = storeParams->bStoreMetaData;
692            ALOGV("StoreMetaDataInBuffers set to: %s",
693                    mStoreMetaDataInBuffers ? " true" : "false");
694
695            if (mStoreMetaDataInBuffers) {
696                mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar;
697                if (mInputFrameData == NULL) {
698                    mInputFrameData =
699                            (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
700                }
701            }
702
703            return OMX_ErrorNone;
704        }
705
706        default:
707            return SimpleSoftOMXComponent::internalSetParameter(index, params);
708    }
709}
710
711void SoftAVCEncoder::onQueueFilled(OMX_U32 portIndex) {
712    if (mSignalledError || mSawInputEOS) {
713        return;
714    }
715
716    if (!mStarted) {
717        if (OMX_ErrorNone != initEncoder()) {
718            return;
719        }
720    }
721
722    List<BufferInfo *> &inQueue = getPortQueue(0);
723    List<BufferInfo *> &outQueue = getPortQueue(1);
724
725    while (!mSawInputEOS && !inQueue.empty() && !outQueue.empty()) {
726        BufferInfo *inInfo = *inQueue.begin();
727        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
728        BufferInfo *outInfo = *outQueue.begin();
729        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
730
731        outHeader->nTimeStamp = 0;
732        outHeader->nFlags = 0;
733        outHeader->nOffset = 0;
734        outHeader->nFilledLen = 0;
735        outHeader->nOffset = 0;
736
737        uint8_t *outPtr = (uint8_t *) outHeader->pBuffer;
738        uint32_t dataLength = outHeader->nAllocLen;
739
740        if (!mSpsPpsHeaderReceived && mNumInputFrames < 0) {
741            // 4 bytes are reserved for holding the start code 0x00000001
742            // of the sequence parameter set at the beginning.
743            outPtr += 4;
744            dataLength -= 4;
745        }
746
747        int32_t type;
748        AVCEnc_Status encoderStatus = AVCENC_SUCCESS;
749
750        // Combine SPS and PPS and place them in the very first output buffer
751        // SPS and PPS are separated by start code 0x00000001
752        // Assume that we have exactly one SPS and exactly one PPS.
753        while (!mSpsPpsHeaderReceived && mNumInputFrames <= 0) {
754            encoderStatus = PVAVCEncodeNAL(mHandle, outPtr, &dataLength, &type);
755            if (encoderStatus == AVCENC_WRONG_STATE) {
756                mSpsPpsHeaderReceived = true;
757                CHECK_EQ(0, mNumInputFrames);  // 1st video frame is 0
758                outHeader->nFlags = OMX_BUFFERFLAG_CODECCONFIG;
759                outQueue.erase(outQueue.begin());
760                outInfo->mOwnedByUs = false;
761                notifyFillBufferDone(outHeader);
762                return;
763            } else {
764                switch (type) {
765                    case AVC_NALTYPE_SPS:
766                        ++mNumInputFrames;
767                        memcpy((uint8_t *)outHeader->pBuffer, "\x00\x00\x00\x01", 4);
768                        outHeader->nFilledLen = 4 + dataLength;
769                        outPtr += (dataLength + 4);  // 4 bytes for next start code
770                        dataLength = outHeader->nAllocLen - outHeader->nFilledLen;
771                        break;
772                    default:
773                        CHECK_EQ(AVC_NALTYPE_PPS, type);
774                        ++mNumInputFrames;
775                        memcpy((uint8_t *) outHeader->pBuffer + outHeader->nFilledLen,
776                                "\x00\x00\x00\x01", 4);
777                        outHeader->nFilledLen += (dataLength + 4);
778                        outPtr += (dataLength + 4);
779                        break;
780                }
781            }
782        }
783
784        buffer_handle_t srcBuffer; // for MetaDataMode only
785
786        // Get next input video frame
787        if (mReadyForNextFrame) {
788            // Save the input buffer info so that it can be
789            // passed to an output buffer
790            InputBufferInfo info;
791            info.mTimeUs = inHeader->nTimeStamp;
792            info.mFlags = inHeader->nFlags;
793            mInputBufferInfoVec.push(info);
794            mPrevTimestampUs = inHeader->nTimeStamp;
795
796            if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
797                mSawInputEOS = true;
798            }
799
800            if (inHeader->nFilledLen > 0) {
801                AVCFrameIO videoInput;
802                memset(&videoInput, 0, sizeof(videoInput));
803                videoInput.height = ((mVideoHeight  + 15) >> 4) << 4;
804                videoInput.pitch = ((mVideoWidth + 15) >> 4) << 4;
805                videoInput.coding_timestamp = (inHeader->nTimeStamp + 500) / 1000;  // in ms
806                uint8_t *inputData = NULL;
807                if (mStoreMetaDataInBuffers) {
808                    if (inHeader->nFilledLen != 8) {
809                        ALOGE("MetaData buffer is wrong size! "
810                                "(got %lu bytes, expected 8)", inHeader->nFilledLen);
811                        mSignalledError = true;
812                        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
813                        return;
814                    }
815                    inputData =
816                            extractGrallocData(inHeader->pBuffer + inHeader->nOffset,
817                                    &srcBuffer);
818                    if (inputData == NULL) {
819                        ALOGE("Unable to extract gralloc buffer in metadata mode");
820                        mSignalledError = true;
821                        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
822                        return;
823                    }
824                    // TODO: Verify/convert pixel format enum
825                } else {
826                    inputData = (uint8_t *)inHeader->pBuffer + inHeader->nOffset;
827                }
828
829                if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
830                    ConvertYUV420SemiPlanarToYUV420Planar(
831                        inputData, mInputFrameData, mVideoWidth, mVideoHeight);
832                    inputData = mInputFrameData;
833                }
834                CHECK(inputData != NULL);
835                videoInput.YCbCr[0] = inputData;
836                videoInput.YCbCr[1] = videoInput.YCbCr[0] + videoInput.height * videoInput.pitch;
837                videoInput.YCbCr[2] = videoInput.YCbCr[1] +
838                    ((videoInput.height * videoInput.pitch) >> 2);
839                videoInput.disp_order = mNumInputFrames;
840
841                encoderStatus = PVAVCEncSetInput(mHandle, &videoInput);
842                if (encoderStatus == AVCENC_SUCCESS || encoderStatus == AVCENC_NEW_IDR) {
843                    mReadyForNextFrame = false;
844                    ++mNumInputFrames;
845                    if (encoderStatus == AVCENC_NEW_IDR) {
846                        mIsIDRFrame = 1;
847                    }
848                } else {
849                    if (encoderStatus < AVCENC_SUCCESS) {
850                        ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
851                        mSignalledError = true;
852                        releaseGrallocData(srcBuffer);
853                        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
854                        return;
855                    } else {
856                        ALOGV("encoderStatus = %d at line %d", encoderStatus, __LINE__);
857                        inQueue.erase(inQueue.begin());
858                        inInfo->mOwnedByUs = false;
859                        releaseGrallocData(srcBuffer);
860                        notifyEmptyBufferDone(inHeader);
861                        return;
862                    }
863                }
864            }
865        }
866
867        // Encode an input video frame
868        CHECK(encoderStatus == AVCENC_SUCCESS || encoderStatus == AVCENC_NEW_IDR);
869        dataLength = outHeader->nAllocLen;  // Reset the output buffer length
870        if (inHeader->nFilledLen > 0) {
871            encoderStatus = PVAVCEncodeNAL(mHandle, outPtr, &dataLength, &type);
872            if (encoderStatus == AVCENC_SUCCESS) {
873                CHECK(NULL == PVAVCEncGetOverrunBuffer(mHandle));
874            } else if (encoderStatus == AVCENC_PICTURE_READY) {
875                CHECK(NULL == PVAVCEncGetOverrunBuffer(mHandle));
876                if (mIsIDRFrame) {
877                    outHeader->nFlags |= OMX_BUFFERFLAG_SYNCFRAME;
878                    mIsIDRFrame = false;
879                }
880                mReadyForNextFrame = true;
881                AVCFrameIO recon;
882                if (PVAVCEncGetRecon(mHandle, &recon) == AVCENC_SUCCESS) {
883                    PVAVCEncReleaseRecon(mHandle, &recon);
884                }
885            } else {
886                dataLength = 0;
887                mReadyForNextFrame = true;
888            }
889
890            if (encoderStatus < AVCENC_SUCCESS) {
891                ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
892                mSignalledError = true;
893                releaseGrallocData(srcBuffer);
894                notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
895                return;
896            }
897        } else {
898            dataLength = 0;
899        }
900
901        inQueue.erase(inQueue.begin());
902        inInfo->mOwnedByUs = false;
903        releaseGrallocData(srcBuffer);
904        notifyEmptyBufferDone(inHeader);
905
906        outQueue.erase(outQueue.begin());
907        CHECK(!mInputBufferInfoVec.empty());
908        InputBufferInfo *inputBufInfo = mInputBufferInfoVec.begin();
909        mInputBufferInfoVec.erase(mInputBufferInfoVec.begin());
910        outHeader->nTimeStamp = inputBufInfo->mTimeUs;
911        outHeader->nFlags |= (inputBufInfo->mFlags | OMX_BUFFERFLAG_ENDOFFRAME);
912        outHeader->nFilledLen = dataLength;
913        outInfo->mOwnedByUs = false;
914        notifyFillBufferDone(outHeader);
915    }
916}
917
918int32_t SoftAVCEncoder::allocOutputBuffers(
919        unsigned int sizeInMbs, unsigned int numBuffers) {
920    CHECK(mOutputBuffers.isEmpty());
921    size_t frameSize = (sizeInMbs << 7) * 3;
922    for (unsigned int i = 0; i <  numBuffers; ++i) {
923        MediaBuffer *buffer = new MediaBuffer(frameSize);
924        buffer->setObserver(this);
925        mOutputBuffers.push(buffer);
926    }
927
928    return 1;
929}
930
931void SoftAVCEncoder::unbindOutputBuffer(int32_t index) {
932    CHECK(index >= 0);
933}
934
935int32_t SoftAVCEncoder::bindOutputBuffer(int32_t index, uint8_t **yuv) {
936    CHECK(index >= 0);
937    CHECK(index < (int32_t) mOutputBuffers.size());
938    *yuv = (uint8_t *) mOutputBuffers[index]->data();
939
940    return 1;
941}
942
943void SoftAVCEncoder::signalBufferReturned(MediaBuffer *buffer) {
944    ALOGV("signalBufferReturned: %p", buffer);
945}
946
947OMX_ERRORTYPE SoftAVCEncoder::getExtensionIndex(
948        const char *name, OMX_INDEXTYPE *index) {
949    if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers")) {
950        *(int32_t*)index = kStoreMetaDataExtensionIndex;
951        return OMX_ErrorNone;
952    }
953    return OMX_ErrorUndefined;
954}
955
956uint8_t *SoftAVCEncoder::extractGrallocData(void *data, buffer_handle_t *buffer) {
957    OMX_U32 type = *(OMX_U32*)data;
958    status_t res;
959    if (type != kMetadataBufferTypeGrallocSource) {
960        ALOGE("Data passed in with metadata mode does not have type "
961                "kMetadataBufferTypeGrallocSource (%d), has type %ld instead",
962                kMetadataBufferTypeGrallocSource, type);
963        return NULL;
964    }
965    buffer_handle_t imgBuffer = *(buffer_handle_t*)((uint8_t*)data + 4);
966
967    const Rect rect(mVideoWidth, mVideoHeight);
968    uint8_t *img;
969    res = GraphicBufferMapper::get().lock(imgBuffer,
970            GRALLOC_USAGE_HW_VIDEO_ENCODER,
971            rect, (void**)&img);
972    if (res != OK) {
973        ALOGE("%s: Unable to lock image buffer %p for access", __FUNCTION__,
974                imgBuffer);
975        return NULL;
976    }
977
978    *buffer = imgBuffer;
979    return img;
980}
981
982void SoftAVCEncoder::releaseGrallocData(buffer_handle_t buffer) {
983    if (mStoreMetaDataInBuffers) {
984        GraphicBufferMapper::get().unlock(buffer);
985    }
986}
987
988}  // namespace android
989
990android::SoftOMXComponent *createSoftOMXComponent(
991        const char *name, const OMX_CALLBACKTYPE *callbacks,
992        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
993    return new android::SoftAVCEncoder(name, callbacks, appData, component);
994}
995