SoftAVC.cpp revision 84333e0475bc911adc16417f4ca327c975cf6c36
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "SoftAVC"
19#include <utils/Log.h>
20
21#include "SoftAVC.h"
22
23#include <media/stagefright/foundation/ADebug.h>
24#include <media/stagefright/MediaDefs.h>
25#include <media/stagefright/MediaErrors.h>
26#include <media/IOMX.h>
27
28
29namespace android {
30
31static const CodecProfileLevel kProfileLevels[] = {
32    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1  },
33    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1b },
34    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel11 },
35    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel12 },
36    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel13 },
37    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel2  },
38    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel21 },
39    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel22 },
40    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel3  },
41    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel31 },
42    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel32 },
43    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel4  },
44    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel41 },
45    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel42 },
46    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel5  },
47    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel51 },
48};
49
50SoftAVC::SoftAVC(
51        const char *name,
52        const OMX_CALLBACKTYPE *callbacks,
53        OMX_PTR appData,
54        OMX_COMPONENTTYPE **component)
55    : SoftVideoDecoderOMXComponent(
56            name, "video_decoder.avc", OMX_VIDEO_CodingAVC,
57            kProfileLevels, ARRAY_SIZE(kProfileLevels),
58            320 /* width */, 240 /* height */, callbacks, appData, component),
59      mHandle(NULL),
60      mInputBufferCount(0),
61      mPictureSize(mWidth * mHeight * 3 / 2),
62      mFirstPicture(NULL),
63      mFirstPictureId(-1),
64      mPicId(0),
65      mHeadersDecoded(false),
66      mEOSStatus(INPUT_DATA_AVAILABLE),
67      mSignalledError(false) {
68    initPorts(
69            kNumInputBuffers, 8192 /* inputBufferSize */,
70            kNumOutputBuffers, MEDIA_MIMETYPE_VIDEO_AVC);
71
72    CHECK_EQ(initDecoder(), (status_t)OK);
73}
74
75SoftAVC::~SoftAVC() {
76    H264SwDecRelease(mHandle);
77    mHandle = NULL;
78
79    while (mPicToHeaderMap.size() != 0) {
80        OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.editValueAt(0);
81        mPicToHeaderMap.removeItemsAt(0);
82        delete header;
83        header = NULL;
84    }
85    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
86    List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
87    CHECK(outQueue.empty());
88    CHECK(inQueue.empty());
89
90    delete[] mFirstPicture;
91}
92
93status_t SoftAVC::initDecoder() {
94    // Force decoder to output buffers in display order.
95    if (H264SwDecInit(&mHandle, 0) == H264SWDEC_OK) {
96        return OK;
97    }
98    return UNKNOWN_ERROR;
99}
100
101void SoftAVC::onQueueFilled(OMX_U32 /* portIndex */) {
102    if (mSignalledError || mOutputPortSettingsChange != NONE) {
103        return;
104    }
105
106    if (mEOSStatus == OUTPUT_FRAMES_FLUSHED) {
107        return;
108    }
109
110    List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
111    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
112
113    if (mHeadersDecoded) {
114        // Dequeue any already decoded output frames to free up space
115        // in the output queue.
116
117        drainAllOutputBuffers(false /* eos */);
118    }
119
120    H264SwDecRet ret = H264SWDEC_PIC_RDY;
121    bool portSettingsChanged = false;
122    while ((mEOSStatus != INPUT_DATA_AVAILABLE || !inQueue.empty())
123            && outQueue.size() == kNumOutputBuffers) {
124
125        if (mEOSStatus == INPUT_EOS_SEEN) {
126            drainAllOutputBuffers(true /* eos */);
127            return;
128        }
129
130        BufferInfo *inInfo = *inQueue.begin();
131        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
132        ++mPicId;
133
134        OMX_BUFFERHEADERTYPE *header = new OMX_BUFFERHEADERTYPE;
135        memset(header, 0, sizeof(OMX_BUFFERHEADERTYPE));
136        header->nTimeStamp = inHeader->nTimeStamp;
137        header->nFlags = inHeader->nFlags;
138        if (header->nFlags & OMX_BUFFERFLAG_EOS) {
139            mEOSStatus = INPUT_EOS_SEEN;
140        }
141        mPicToHeaderMap.add(mPicId, header);
142        inQueue.erase(inQueue.begin());
143
144        H264SwDecInput inPicture;
145        H264SwDecOutput outPicture;
146        memset(&inPicture, 0, sizeof(inPicture));
147        inPicture.dataLen = inHeader->nFilledLen;
148        inPicture.pStream = inHeader->pBuffer + inHeader->nOffset;
149        inPicture.picId = mPicId;
150        inPicture.intraConcealmentMethod = 1;
151        H264SwDecPicture decodedPicture;
152
153        while (inPicture.dataLen > 0) {
154            ret = H264SwDecDecode(mHandle, &inPicture, &outPicture);
155            if (ret == H264SWDEC_HDRS_RDY_BUFF_NOT_EMPTY ||
156                ret == H264SWDEC_PIC_RDY_BUFF_NOT_EMPTY) {
157                inPicture.dataLen -= (u32)(outPicture.pStrmCurrPos - inPicture.pStream);
158                inPicture.pStream = outPicture.pStrmCurrPos;
159                if (ret == H264SWDEC_HDRS_RDY_BUFF_NOT_EMPTY) {
160                    mHeadersDecoded = true;
161                    H264SwDecInfo decoderInfo;
162                    CHECK(H264SwDecGetInfo(mHandle, &decoderInfo) == H264SWDEC_OK);
163
164                    if (handlePortSettingChangeEvent(&decoderInfo)) {
165                        portSettingsChanged = true;
166                    }
167
168                    if (decoderInfo.croppingFlag &&
169                        handleCropRectEvent(&decoderInfo.cropParams)) {
170                        portSettingsChanged = true;
171                    }
172                }
173            } else {
174                if (portSettingsChanged) {
175                    if (H264SwDecNextPicture(mHandle, &decodedPicture, 0)
176                        == H264SWDEC_PIC_RDY) {
177
178                        // Save this output buffer; otherwise, it will be
179                        // lost during dynamic port reconfiguration because
180                        // OpenMAX client will delete _all_ output buffers
181                        // in the process.
182                        saveFirstOutputBuffer(
183                            decodedPicture.picId,
184                            (uint8_t *)decodedPicture.pOutputPicture);
185                    }
186                }
187                inPicture.dataLen = 0;
188                if (ret < 0) {
189                    ALOGE("Decoder failed: %d", ret);
190
191                    notify(OMX_EventError, OMX_ErrorUndefined,
192                           ERROR_MALFORMED, NULL);
193
194                    mSignalledError = true;
195                    return;
196                }
197            }
198        }
199        inInfo->mOwnedByUs = false;
200        notifyEmptyBufferDone(inHeader);
201
202        if (portSettingsChanged) {
203            portSettingsChanged = false;
204            return;
205        }
206
207        if (mFirstPicture && !outQueue.empty()) {
208            drainOneOutputBuffer(mFirstPictureId, mFirstPicture);
209            delete[] mFirstPicture;
210            mFirstPicture = NULL;
211            mFirstPictureId = -1;
212        }
213
214        drainAllOutputBuffers(false /* eos */);
215    }
216}
217
218bool SoftAVC::handlePortSettingChangeEvent(const H264SwDecInfo *info) {
219    if (mWidth != info->picWidth || mHeight != info->picHeight) {
220        mWidth  = info->picWidth;
221        mHeight = info->picHeight;
222        mPictureSize = mWidth * mHeight * 3 / 2;
223        updatePortDefinitions();
224        notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
225        mOutputPortSettingsChange = AWAITING_DISABLED;
226        return true;
227    }
228
229    return false;
230}
231
232bool SoftAVC::handleCropRectEvent(const CropParams *crop) {
233    if (mCropLeft != crop->cropLeftOffset ||
234        mCropTop != crop->cropTopOffset ||
235        mCropWidth != crop->cropOutWidth ||
236        mCropHeight != crop->cropOutHeight) {
237        mCropLeft = crop->cropLeftOffset;
238        mCropTop = crop->cropTopOffset;
239        mCropWidth = crop->cropOutWidth;
240        mCropHeight = crop->cropOutHeight;
241
242        notify(OMX_EventPortSettingsChanged, 1,
243                OMX_IndexConfigCommonOutputCrop, NULL);
244
245        return true;
246    }
247    return false;
248}
249
250void SoftAVC::saveFirstOutputBuffer(int32_t picId, uint8_t *data) {
251    CHECK(mFirstPicture == NULL);
252    mFirstPictureId = picId;
253
254    mFirstPicture = new uint8_t[mPictureSize];
255    memcpy(mFirstPicture, data, mPictureSize);
256}
257
258void SoftAVC::drainOneOutputBuffer(int32_t picId, uint8_t* data) {
259    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
260    BufferInfo *outInfo = *outQueue.begin();
261    outQueue.erase(outQueue.begin());
262    OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
263    OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.valueFor(picId);
264    outHeader->nTimeStamp = header->nTimeStamp;
265    outHeader->nFlags = header->nFlags;
266    outHeader->nFilledLen = mPictureSize;
267    memcpy(outHeader->pBuffer + outHeader->nOffset,
268            data, mPictureSize);
269    mPicToHeaderMap.removeItem(picId);
270    delete header;
271    outInfo->mOwnedByUs = false;
272    notifyFillBufferDone(outHeader);
273}
274
275void SoftAVC::drainAllOutputBuffers(bool eos) {
276    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
277    H264SwDecPicture decodedPicture;
278
279    if (mHeadersDecoded) {
280        while (!outQueue.empty()
281                && H264SWDEC_PIC_RDY == H264SwDecNextPicture(
282                    mHandle, &decodedPicture, eos /* flush */)) {
283            int32_t picId = decodedPicture.picId;
284            uint8_t *data = (uint8_t *) decodedPicture.pOutputPicture;
285            drainOneOutputBuffer(picId, data);
286        }
287    }
288
289    if (!eos) {
290        return;
291    }
292
293    while (!outQueue.empty()) {
294        BufferInfo *outInfo = *outQueue.begin();
295        outQueue.erase(outQueue.begin());
296        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
297
298        outHeader->nTimeStamp = 0;
299        outHeader->nFilledLen = 0;
300        outHeader->nFlags = OMX_BUFFERFLAG_EOS;
301
302        outInfo->mOwnedByUs = false;
303        notifyFillBufferDone(outHeader);
304
305        mEOSStatus = OUTPUT_FRAMES_FLUSHED;
306    }
307}
308
309void SoftAVC::onPortFlushCompleted(OMX_U32 portIndex) {
310    if (portIndex == kInputPortIndex) {
311        mEOSStatus = INPUT_DATA_AVAILABLE;
312    }
313}
314
315void SoftAVC::onReset() {
316    SoftVideoDecoderOMXComponent::onReset();
317    mSignalledError = false;
318}
319
320}  // namespace android
321
322android::SoftOMXComponent *createSoftOMXComponent(
323        const char *name, const OMX_CALLBACKTYPE *callbacks,
324        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
325    return new android::SoftAVC(name, callbacks, appData, component);
326}
327