SoftAVC.cpp revision d5a2f55034022f2d0425fa0701894d0c4787b726
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "SoftAVC"
19#include <utils/Log.h>
20
21#include "SoftAVC.h"
22
23#include <media/stagefright/foundation/ADebug.h>
24#include <media/stagefright/MediaDefs.h>
25#include <media/stagefright/MediaErrors.h>
26#include <media/IOMX.h>
27
28
29namespace android {
30
31static const CodecProfileLevel kProfileLevels[] = {
32    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1  },
33    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1b },
34    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel11 },
35    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel12 },
36    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel13 },
37    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel2  },
38    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel21 },
39    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel22 },
40    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel3  },
41    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel31 },
42    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel32 },
43    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel4  },
44    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel41 },
45    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel42 },
46    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel5  },
47    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel51 },
48};
49
50SoftAVC::SoftAVC(
51        const char *name,
52        const OMX_CALLBACKTYPE *callbacks,
53        OMX_PTR appData,
54        OMX_COMPONENTTYPE **component)
55    : SoftVideoDecoderOMXComponent(
56            name, "video_decoder.avc", OMX_VIDEO_CodingAVC,
57            kProfileLevels, ARRAY_SIZE(kProfileLevels),
58            320 /* width */, 240 /* height */, callbacks, appData, component),
59      mHandle(NULL),
60      mInputBufferCount(0),
61      mFirstPicture(NULL),
62      mFirstPictureId(-1),
63      mPicId(0),
64      mHeadersDecoded(false),
65      mEOSStatus(INPUT_DATA_AVAILABLE),
66      mSignalledError(false) {
67    initPorts(
68            kNumInputBuffers, 8192 /* inputBufferSize */,
69            kNumOutputBuffers, MEDIA_MIMETYPE_VIDEO_AVC);
70
71    CHECK_EQ(initDecoder(), (status_t)OK);
72}
73
74SoftAVC::~SoftAVC() {
75    H264SwDecRelease(mHandle);
76    mHandle = NULL;
77
78    while (mPicToHeaderMap.size() != 0) {
79        OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.editValueAt(0);
80        mPicToHeaderMap.removeItemsAt(0);
81        delete header;
82        header = NULL;
83    }
84    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
85    List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
86    CHECK(outQueue.empty());
87    CHECK(inQueue.empty());
88
89    delete[] mFirstPicture;
90}
91
92status_t SoftAVC::initDecoder() {
93    // Force decoder to output buffers in display order.
94    if (H264SwDecInit(&mHandle, 0) == H264SWDEC_OK) {
95        return OK;
96    }
97    return UNKNOWN_ERROR;
98}
99
100void SoftAVC::onQueueFilled(OMX_U32 /* portIndex */) {
101    if (mSignalledError || mOutputPortSettingsChange != NONE) {
102        return;
103    }
104
105    if (mEOSStatus == OUTPUT_FRAMES_FLUSHED) {
106        return;
107    }
108
109    List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
110    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
111
112    if (mHeadersDecoded) {
113        // Dequeue any already decoded output frames to free up space
114        // in the output queue.
115
116        drainAllOutputBuffers(false /* eos */);
117    }
118
119    H264SwDecRet ret = H264SWDEC_PIC_RDY;
120    bool portWillReset = false;
121    while ((mEOSStatus != INPUT_DATA_AVAILABLE || !inQueue.empty())
122            && outQueue.size() == kNumOutputBuffers) {
123
124        if (mEOSStatus == INPUT_EOS_SEEN) {
125            drainAllOutputBuffers(true /* eos */);
126            return;
127        }
128
129        BufferInfo *inInfo = *inQueue.begin();
130        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
131        ++mPicId;
132
133        OMX_BUFFERHEADERTYPE *header = new OMX_BUFFERHEADERTYPE;
134        memset(header, 0, sizeof(OMX_BUFFERHEADERTYPE));
135        header->nTimeStamp = inHeader->nTimeStamp;
136        header->nFlags = inHeader->nFlags;
137        if (header->nFlags & OMX_BUFFERFLAG_EOS) {
138            mEOSStatus = INPUT_EOS_SEEN;
139        }
140        mPicToHeaderMap.add(mPicId, header);
141        inQueue.erase(inQueue.begin());
142
143        H264SwDecInput inPicture;
144        H264SwDecOutput outPicture;
145        memset(&inPicture, 0, sizeof(inPicture));
146        inPicture.dataLen = inHeader->nFilledLen;
147        inPicture.pStream = inHeader->pBuffer + inHeader->nOffset;
148        inPicture.picId = mPicId;
149        inPicture.intraConcealmentMethod = 1;
150        H264SwDecPicture decodedPicture;
151
152        while (inPicture.dataLen > 0) {
153            ret = H264SwDecDecode(mHandle, &inPicture, &outPicture);
154            if (ret == H264SWDEC_HDRS_RDY_BUFF_NOT_EMPTY ||
155                ret == H264SWDEC_PIC_RDY_BUFF_NOT_EMPTY) {
156                inPicture.dataLen -= (u32)(outPicture.pStrmCurrPos - inPicture.pStream);
157                inPicture.pStream = outPicture.pStrmCurrPos;
158                if (ret == H264SWDEC_HDRS_RDY_BUFF_NOT_EMPTY) {
159                    mHeadersDecoded = true;
160                    H264SwDecInfo decoderInfo;
161                    CHECK(H264SwDecGetInfo(mHandle, &decoderInfo) == H264SWDEC_OK);
162
163                    SoftVideoDecoderOMXComponent::CropSettingsMode cropSettingsMode =
164                        handleCropParams(decoderInfo);
165                    handlePortSettingsChange(
166                            &portWillReset, decoderInfo.picWidth, decoderInfo.picHeight,
167                            cropSettingsMode);
168                }
169            } else {
170                if (portWillReset) {
171                    if (H264SwDecNextPicture(mHandle, &decodedPicture, 0)
172                        == H264SWDEC_PIC_RDY) {
173
174                        // Save this output buffer; otherwise, it will be
175                        // lost during dynamic port reconfiguration because
176                        // OpenMAX client will delete _all_ output buffers
177                        // in the process.
178                        saveFirstOutputBuffer(
179                            decodedPicture.picId,
180                            (uint8_t *)decodedPicture.pOutputPicture);
181                    }
182                }
183                inPicture.dataLen = 0;
184                if (ret < 0) {
185                    ALOGE("Decoder failed: %d", ret);
186
187                    notify(OMX_EventError, OMX_ErrorUndefined,
188                           ERROR_MALFORMED, NULL);
189
190                    mSignalledError = true;
191                    return;
192                }
193            }
194        }
195        inInfo->mOwnedByUs = false;
196        notifyEmptyBufferDone(inHeader);
197
198        if (portWillReset) {
199            return;
200        }
201
202        if (mFirstPicture && !outQueue.empty()) {
203            drainOneOutputBuffer(mFirstPictureId, mFirstPicture);
204            delete[] mFirstPicture;
205            mFirstPicture = NULL;
206            mFirstPictureId = -1;
207        }
208
209        drainAllOutputBuffers(false /* eos */);
210    }
211}
212
213SoftVideoDecoderOMXComponent::CropSettingsMode SoftAVC::handleCropParams(
214        const H264SwDecInfo& decInfo) {
215    if (!decInfo.croppingFlag) {
216        return kCropUnSet;
217    }
218
219    const CropParams& crop = decInfo.cropParams;
220    if (mCropLeft == crop.cropLeftOffset &&
221        mCropTop == crop.cropTopOffset &&
222        mCropWidth == crop.cropOutWidth &&
223        mCropHeight == crop.cropOutHeight) {
224        return kCropSet;
225    }
226
227    mCropLeft = crop.cropLeftOffset;
228    mCropTop = crop.cropTopOffset;
229    mCropWidth = crop.cropOutWidth;
230    mCropHeight = crop.cropOutHeight;
231    return kCropChanged;
232}
233
234void SoftAVC::saveFirstOutputBuffer(int32_t picId, uint8_t *data) {
235    CHECK(mFirstPicture == NULL);
236    mFirstPictureId = picId;
237
238    uint32_t pictureSize = mWidth * mHeight * 3 / 2;
239    mFirstPicture = new uint8_t[pictureSize];
240    memcpy(mFirstPicture, data, pictureSize);
241}
242
243void SoftAVC::drainOneOutputBuffer(int32_t picId, uint8_t* data) {
244    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
245    BufferInfo *outInfo = *outQueue.begin();
246    outQueue.erase(outQueue.begin());
247    OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
248    OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.valueFor(picId);
249    outHeader->nTimeStamp = header->nTimeStamp;
250    outHeader->nFlags = header->nFlags;
251    outHeader->nFilledLen = mWidth * mHeight * 3 / 2;
252
253    uint8_t *dst = outHeader->pBuffer + outHeader->nOffset;
254    const uint8_t *srcY = data;
255    const uint8_t *srcU = srcY + mWidth * mHeight;
256    const uint8_t *srcV = srcU + mWidth * mHeight / 4;
257    size_t srcYStride = mWidth;
258    size_t srcUStride = mWidth / 2;
259    size_t srcVStride = srcUStride;
260    copyYV12FrameToOutputBuffer(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride);
261
262    mPicToHeaderMap.removeItem(picId);
263    delete header;
264    outInfo->mOwnedByUs = false;
265    notifyFillBufferDone(outHeader);
266}
267
268void SoftAVC::drainAllOutputBuffers(bool eos) {
269    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
270    H264SwDecPicture decodedPicture;
271
272    if (mHeadersDecoded) {
273        while (!outQueue.empty()
274                && H264SWDEC_PIC_RDY == H264SwDecNextPicture(
275                    mHandle, &decodedPicture, eos /* flush */)) {
276            int32_t picId = decodedPicture.picId;
277            uint8_t *data = (uint8_t *) decodedPicture.pOutputPicture;
278            drainOneOutputBuffer(picId, data);
279        }
280    }
281
282    if (!eos) {
283        return;
284    }
285
286    while (!outQueue.empty()) {
287        BufferInfo *outInfo = *outQueue.begin();
288        outQueue.erase(outQueue.begin());
289        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
290
291        outHeader->nTimeStamp = 0;
292        outHeader->nFilledLen = 0;
293        outHeader->nFlags = OMX_BUFFERFLAG_EOS;
294
295        outInfo->mOwnedByUs = false;
296        notifyFillBufferDone(outHeader);
297
298        mEOSStatus = OUTPUT_FRAMES_FLUSHED;
299    }
300}
301
302void SoftAVC::onPortFlushCompleted(OMX_U32 portIndex) {
303    if (portIndex == kInputPortIndex) {
304        mEOSStatus = INPUT_DATA_AVAILABLE;
305    }
306}
307
308void SoftAVC::onReset() {
309    SoftVideoDecoderOMXComponent::onReset();
310    mSignalledError = false;
311}
312
313}  // namespace android
314
315android::SoftOMXComponent *createSoftOMXComponent(
316        const char *name, const OMX_CALLBACKTYPE *callbacks,
317        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
318    return new android::SoftAVC(name, callbacks, appData, component);
319}
320