1/*
2* Copyright (c) 2009-2011 Intel Corporation.  All rights reserved.
3*
4* Licensed under the Apache License, Version 2.0 (the "License");
5* you may not use this file except in compliance with the License.
6* You may obtain a copy of the License at
7*
8* http://www.apache.org/licenses/LICENSE-2.0
9*
10* Unless required by applicable law or agreed to in writing, software
11* distributed under the License is distributed on an "AS IS" BASIS,
12* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13* See the License for the specific language governing permissions and
14* limitations under the License.
15*/
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "PVSoftMPEG4Encoder"
19#include <wrs_omxil_core/log.h>
20
21#include "mp4enc_api.h"
22#include "OMX_Video.h"
23
24#include <media/stagefright/foundation/ADebug.h>
25#include <media/stagefright/MediaDefs.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28#include <media/stagefright/Utils.h>
29
30#include <ui/GraphicBufferMapper.h>
31#include <ui/Rect.h>
32
33#include "PVSoftMPEG4Encoder.h"
34#include "VideoEncoderLog.h"
35
36#define ALIGN(x, align)                  (((x) + (align) - 1) & (~((align) - 1)))
37
38inline static void ConvertYUV420SemiPlanarToYUV420Planar(
39        uint8_t *inyuv, uint8_t* outyuv,
40        int32_t width, int32_t height) {
41
42    int32_t outYsize = width * height;
43    uint32_t *outy =  (uint32_t *) outyuv;
44    uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
45    uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));
46
47    /* Y copying */
48    memcpy(outy, inyuv, outYsize);
49
50    /* U & V copying */
51    uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
52    for (int32_t i = height >> 1; i > 0; --i) {
53        for (int32_t j = width >> 2; j > 0; --j) {
54            uint32_t temp = *inyuv_4++;
55            uint32_t tempU = temp & 0xFF;
56            tempU = tempU | ((temp >> 8) & 0xFF00);
57
58            uint32_t tempV = (temp >> 8) & 0xFF;
59            tempV = tempV | ((temp >> 16) & 0xFF00);
60
61            // Flip U and V
62            *outcb++ = tempU;
63            *outcr++ = tempV;
64        }
65    }
66}
67
68inline static void trimBuffer(uint8_t *dataIn, uint8_t *dataOut,
69        int32_t width, int32_t height,
70        int32_t alignedHeight, int32_t stride) {
71    int32_t h;
72    uint8_t *y_start, *uv_start, *_y_start, *_uv_start;
73    y_start = dataOut;
74    uv_start = dataOut + width * height;
75    _y_start = dataIn;
76    _uv_start = dataIn + stride * alignedHeight;
77
78    for (h = 0; h < height; h++)
79        memcpy(y_start + h * width, _y_start + h * stride, width);
80    for (h = 0; h < height / 2; h++)
81        memcpy(uv_start + h * width,
82                _uv_start + h * stride, width);
83}
84
85PVSoftMPEG4Encoder::PVSoftMPEG4Encoder(const char *name)
86    :  mEncodeMode(COMBINE_MODE_WITH_ERR_RES),
87      mVideoWidth(176),
88      mVideoHeight(144),
89      mVideoFrameRate(30),
90      mVideoBitRate(192000),
91      mVideoColorFormat(OMX_COLOR_FormatYUV420SemiPlanar),
92      mStoreMetaDataInBuffers(false),
93      mIDRFrameRefreshIntervalInSec(1),
94      mNumInputFrames(-1),
95      mStarted(false),
96      mSawInputEOS(false),
97      mSignalledError(false),
98      mHandle(new tagvideoEncControls),
99      mEncParams(new tagvideoEncOptions),
100      mInputFrameData(NULL)
101{
102
103    if (!strcmp(name, "OMX.google.h263.encoder")) {
104        mEncodeMode = H263_MODE;
105        LOG_I("construct h263 encoder");
106    } else {
107        CHECK(!strcmp(name, "OMX.google.mpeg4.encoder"));
108        LOG_I("construct mpeg4 encoder");
109    }
110
111    setDefaultParams();
112#if NO_BUFFER_SHARE
113    mVASurfaceMappingAction |= MAPACT_COPY;
114#endif
115
116    LOG_I("Construct PVSoftMPEG4Encoder");
117
118}
119
120PVSoftMPEG4Encoder::~PVSoftMPEG4Encoder() {
121    LOG_I("Destruct PVSoftMPEG4Encoder");
122    releaseEncoder();
123
124}
125
126void PVSoftMPEG4Encoder::setDefaultParams() {
127
128    // Set default value for input parameters
129    mComParams.profile = VAProfileH264Baseline;
130    mComParams.level = 41;
131    mComParams.rawFormat = RAW_FORMAT_NV12;
132    mComParams.frameRate.frameRateNum = 30;
133    mComParams.frameRate.frameRateDenom = 1;
134    mComParams.resolution.width = 0;
135    mComParams.resolution.height = 0;
136    mComParams.intraPeriod = 30;
137    mComParams.rcMode = RATE_CONTROL_NONE;
138    mComParams.rcParams.initQP = 15;
139    mComParams.rcParams.minQP = 0;
140    mComParams.rcParams.bitRate = 640000;
141    mComParams.rcParams.targetPercentage= 0;
142    mComParams.rcParams.windowSize = 0;
143    mComParams.rcParams.disableFrameSkip = 0;
144    mComParams.rcParams.disableBitsStuffing = 1;
145    mComParams.cyclicFrameInterval = 30;
146    mComParams.refreshType = VIDEO_ENC_NONIR;
147    mComParams.airParams.airMBs = 0;
148    mComParams.airParams.airThreshold = 0;
149    mComParams.airParams.airAuto = 1;
150    mComParams.disableDeblocking = 2;
151    mComParams.syncEncMode = false;
152    mComParams.codedBufNum = 2;
153
154}
155
156Encode_Status PVSoftMPEG4Encoder::initEncParams() {
157    CHECK(mHandle != NULL);
158    memset(mHandle, 0, sizeof(tagvideoEncControls));
159
160    CHECK(mEncParams != NULL);
161    memset(mEncParams, 0, sizeof(tagvideoEncOptions));
162    if (!PVGetDefaultEncOption(mEncParams, 0)) {
163        LOG_E("Failed to get default encoding parameters");
164        return ENCODE_FAIL;
165    }
166    mEncParams->encMode = mEncodeMode;
167    mEncParams->encWidth[0] = mVideoWidth;
168    mEncParams->encHeight[0] = mVideoHeight;
169    mEncParams->encFrameRate[0] = mVideoFrameRate;
170    mEncParams->rcType = VBR_1;
171    mEncParams->vbvDelay = 5.0f;
172
173    // FIXME:
174    // Add more profile and level support for MPEG4 encoder
175    mEncParams->profile_level = CORE_PROFILE_LEVEL2;
176    mEncParams->packetSize = 32;
177    mEncParams->rvlcEnable = PV_OFF;
178    mEncParams->numLayers = 1;
179    mEncParams->timeIncRes = 1000;
180    mEncParams->tickPerSrc = mEncParams->timeIncRes / mVideoFrameRate;
181
182    mEncParams->bitRate[0] = mVideoBitRate <= 2000000 ? mVideoBitRate : 2000000;
183    mEncParams->iQuant[0] = 15;
184    mEncParams->pQuant[0] = 12;
185    mEncParams->quantType[0] = 0;
186    mEncParams->noFrameSkipped = PV_OFF;
187
188    mTrimedInputData =
189        (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
190    CHECK(mTrimedInputData != NULL);
191
192    if (mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
193        // Color conversion is needed.
194        CHECK(mInputFrameData == NULL);
195        mInputFrameData =
196            (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
197        CHECK(mInputFrameData != NULL);
198    }
199
200    // PV's MPEG4 encoder requires the video dimension of multiple
201    if (mVideoWidth % 16 != 0 || mVideoHeight % 16 != 0) {
202        LOG_E("Video frame size %dx%d must be a multiple of 16",
203            mVideoWidth, mVideoHeight);
204        return ENCODE_INVALID_PARAMS;
205    }
206
207    // Set IDR frame refresh interval
208    if (mIDRFrameRefreshIntervalInSec < 0) {
209        mEncParams->intraPeriod = -1;
210    } else if (mIDRFrameRefreshIntervalInSec == 0) {
211        mEncParams->intraPeriod = 1;  // All I frames
212    } else {
213        mEncParams->intraPeriod =
214            (mIDRFrameRefreshIntervalInSec * mVideoFrameRate);
215    }
216
217    mEncParams->numIntraMB = 0;
218    mEncParams->sceneDetect = PV_ON;
219    mEncParams->searchRange = 16;
220    mEncParams->mv8x8Enable = PV_OFF;
221    mEncParams->gobHeaderInterval = 0;
222    mEncParams->useACPred = PV_ON;
223    mEncParams->intraDCVlcTh = 0;
224
225    return ENCODE_SUCCESS;
226}
227
228Encode_Status PVSoftMPEG4Encoder::initEncoder() {
229    LOG_V("Begin\n");
230
231    CHECK(!mStarted);
232
233    Encode_Status ret = ENCODE_SUCCESS;
234    if (ENCODE_SUCCESS != (ret = initEncParams())) {
235        LOG_E("Failed to initialized encoder params");
236        mSignalledError = true;
237        return ret;
238    }
239
240    if (!PVInitVideoEncoder(mHandle, mEncParams)) {
241        LOG_E("Failed to initialize the encoder");
242        mSignalledError = true;
243        return ENCODE_FAIL;
244    }
245
246    mNumInputFrames = -1;  // 1st buffer for codec specific data
247    mStarted = true;
248    mCurTimestampUs = 0;
249    mLastTimestampUs = 0;
250    mVolHeaderLength = 256;
251
252    LOG_V("End\n");
253
254    return ENCODE_SUCCESS;
255}
256
257Encode_Status PVSoftMPEG4Encoder::releaseEncoder() {
258    LOG_V("Begin\n");
259
260    if (!mStarted) {
261        return ENCODE_SUCCESS;
262    }
263
264    PVCleanUpVideoEncoder(mHandle);
265
266    delete mTrimedInputData;
267    mTrimedInputData = NULL;
268
269    delete mInputFrameData;
270    mInputFrameData = NULL;
271
272    delete mEncParams;
273    mEncParams = NULL;
274
275    delete mHandle;
276    mHandle = NULL;
277
278    mStarted = false;
279
280    LOG_V("End\n");
281
282    return ENCODE_SUCCESS;
283}
284
285Encode_Status PVSoftMPEG4Encoder::setParameters(
286        VideoParamConfigSet *videoEncParams)
287{
288
289    Encode_Status ret = ENCODE_SUCCESS;
290    CHECK_NULL_RETURN_IFFAIL(videoEncParams);
291    LOG_I("Config type = %d\n", (int)videoEncParams->type);
292
293    if (mStarted) {
294        LOG_E("Encoder has been initialized, should use setConfig to change configurations\n");
295        return ENCODE_ALREADY_INIT;
296    }
297
298    switch (videoEncParams->type) {
299        case VideoParamsTypeCommon: {
300
301            VideoParamsCommon *paramsCommon =
302                    reinterpret_cast <VideoParamsCommon *> (videoEncParams);
303            if (paramsCommon->size != sizeof (VideoParamsCommon)) {
304                return ENCODE_INVALID_PARAMS;
305            }
306            if(paramsCommon->codedBufNum < 2)
307                paramsCommon->codedBufNum =2;
308            mComParams = *paramsCommon;
309
310            mVideoWidth = mComParams.resolution.width;
311            mVideoHeight = mComParams.resolution.height;
312            mVideoFrameRate =  mComParams.frameRate.frameRateNum / \
313                               mComParams.frameRate.frameRateDenom;
314            mVideoBitRate = mComParams.rcParams.bitRate;
315            mVideoColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
316            break;
317        }
318
319        case VideoParamsTypeStoreMetaDataInBuffers: {
320            VideoParamsStoreMetaDataInBuffers *metadata =
321                    reinterpret_cast <VideoParamsStoreMetaDataInBuffers *> (videoEncParams);
322
323            if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) {
324                return ENCODE_INVALID_PARAMS;
325            }
326
327            mStoreMetaDataInBuffers = metadata->isEnabled;
328
329            break;
330        }
331
332        default: {
333            LOG_I ("Wrong ParamType here\n");
334            break;
335        }
336    }
337
338    return ret;
339}
340
341Encode_Status PVSoftMPEG4Encoder::getParameters(
342        VideoParamConfigSet *videoEncParams) {
343
344    Encode_Status ret = ENCODE_SUCCESS;
345    CHECK_NULL_RETURN_IFFAIL(videoEncParams);
346    LOG_I("Config type = %d\n", (int)videoEncParams->type);
347
348    switch (videoEncParams->type) {
349        case VideoParamsTypeCommon: {
350
351            VideoParamsCommon *paramsCommon =
352                    reinterpret_cast <VideoParamsCommon *> (videoEncParams);
353
354            if (paramsCommon->size != sizeof (VideoParamsCommon)) {
355                return ENCODE_INVALID_PARAMS;
356            }
357            *paramsCommon = mComParams;
358            break;
359        }
360
361        case VideoParamsTypeStoreMetaDataInBuffers: {
362            VideoParamsStoreMetaDataInBuffers *metadata =
363                    reinterpret_cast <VideoParamsStoreMetaDataInBuffers *> (videoEncParams);
364
365            if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) {
366                return ENCODE_INVALID_PARAMS;
367            }
368
369            metadata->isEnabled = mStoreMetaDataInBuffers;
370
371            break;
372        }
373
374        default: {
375            LOG_I ("Wrong ParamType here\n");
376            break;
377        }
378
379    }
380    return ret;
381}
382
383Encode_Status PVSoftMPEG4Encoder::encode(VideoEncRawBuffer *inBuffer, uint32_t timeout)
384{
385    LOG_V("Begin\n");
386
387    Encode_Status ret = ENCODE_SUCCESS;
388
389    if (mCurTimestampUs <= inBuffer->timeStamp) {
390        mLastTimestampUs = mCurTimestampUs;
391        mCurTimestampUs = inBuffer->timeStamp;
392    }
393
394    if (mNumInputFrames < 0) {
395        if (!PVGetVolHeader(mHandle, mVolHeader, &mVolHeaderLength, 0)) {
396            LOG_E("Failed to get VOL header");
397            mSignalledError = true;
398            return ENCODE_FAIL;
399        }
400        LOG_I("Output VOL header: %d bytes", mVolHeaderLength);
401        mNumInputFrames++;
402        //return ENCODE_SUCCESS;
403    }
404
405    if (mStoreMetaDataInBuffers) {
406        IntelMetadataBuffer imb;
407        int32_t type;
408        int32_t value;
409        uint8_t *img;
410        const android::Rect rect(mVideoWidth, mVideoHeight);
411        android::status_t res;
412        ValueInfo vinfo;
413        ValueInfo *pvinfo = &vinfo;
414        CHECK(IMB_SUCCESS == imb.UnSerialize(inBuffer->data, inBuffer->size));
415        imb.GetType((::IntelMetadataBufferType&)type);
416        imb.GetValue(value);
417        imb.GetValueInfo(pvinfo);
418        if(pvinfo == NULL) {
419            res = android::GraphicBufferMapper::get().lock((buffer_handle_t)value,
420                    GRALLOC_USAGE_SW_READ_MASK,
421                    rect, (void**)&img);
422        } else {
423            img = (uint8_t*)value;
424        }
425        if (pvinfo != NULL)
426            trimBuffer(img, mTrimedInputData, pvinfo->width, pvinfo->height,
427                   pvinfo->height, pvinfo->lumaStride);
428        else {
429            //NV12 Y-TILED
430            trimBuffer(img, mTrimedInputData, mVideoWidth, mVideoHeight,
431                    ALIGN(mVideoHeight, 32), ALIGN(mVideoWidth, 128));
432            android::GraphicBufferMapper::get().unlock((buffer_handle_t)value);
433        }
434    } else {
435        memcpy(mTrimedInputData, inBuffer->data,
436                (mVideoWidth * mVideoHeight * 3 ) >> 1);
437    }
438
439    if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
440        ConvertYUV420SemiPlanarToYUV420Planar(
441                mTrimedInputData, mInputFrameData, mVideoWidth, mVideoHeight);
442    } else {
443        memcpy(mTrimedInputData, mInputFrameData,
444                (mVideoWidth * mVideoHeight * 3 ) >> 1);
445    }
446
447    LOG_V("End\n");
448
449    return ret;
450}
451
452Encode_Status PVSoftMPEG4Encoder::getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout)
453{
454    LOG_V("Begin\n");
455
456    Encode_Status ret = ENCODE_SUCCESS;
457    uint8_t *outPtr = outBuffer->data;
458    int32_t dataLength = outBuffer->bufferSize;
459    outBuffer->flag = 0;
460
461    if ((mEncodeMode == COMBINE_MODE_WITH_ERR_RES) &&
462            (outBuffer->format == OUTPUT_CODEC_DATA)) {
463        memcpy(outPtr, mVolHeader, mVolHeaderLength);
464        ++mNumInputFrames;
465        outBuffer->flag |= ENCODE_BUFFERFLAG_CODECCONFIG;
466        outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
467        outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
468        outBuffer->dataSize = mVolHeaderLength;
469        outBuffer->remainingSize = 0;
470        return ENCODE_SUCCESS;
471    }
472
473    outBuffer->timeStamp = mCurTimestampUs;
474    LOG_I("info.mTimeUs %lld\n", outBuffer->timeStamp);
475
476    VideoEncFrameIO vin, vout;
477    memset(&vin, 0, sizeof(vin));
478    memset(&vout, 0, sizeof(vout));
479    vin.height = ((mVideoHeight  + 15) >> 4) << 4;
480    vin.pitch = ((mVideoWidth + 15) >> 4) << 4;
481    vin.timestamp = (outBuffer->timeStamp + 500) / 1000;  // in ms
482    vin.yChan = mInputFrameData;
483    vin.uChan = vin.yChan + vin.height * vin.pitch;
484    vin.vChan = vin.uChan + ((vin.height * vin.pitch) >> 2);
485
486    unsigned long modTimeMs = 0;
487    int32_t nLayer = 0;
488    MP4HintTrack hintTrack;
489    if (!PVEncodeVideoFrame(mHandle, &vin, &vout,
490                &modTimeMs, outPtr, &dataLength, &nLayer) ||
491            !PVGetHintTrack(mHandle, &hintTrack)) {
492        LOG_E("Failed to encode frame or get hink track at frame %lld",
493                mNumInputFrames);
494        mSignalledError = true;
495        hintTrack.CodeType = 0;
496        ret = ENCODE_FAIL;
497    }
498    LOG_I("dataLength %d\n", dataLength);
499    CHECK(NULL == PVGetOverrunBuffer(mHandle));
500    if (hintTrack.CodeType == 0) {  // I-frame serves as sync frame
501        outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
502    }
503
504    ++mNumInputFrames;
505
506    outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
507    outBuffer->dataSize = dataLength;
508
509    LOG_V("End\n");
510
511    return ret;
512}
513
514