SoftAVC.cpp revision bf220f3e6e799f28d1599c3c5106e9e15631a91d
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "SoftAVC"
19#include <utils/Log.h>
20
21#include "SoftAVC.h"
22
23#include <media/stagefright/foundation/ADebug.h>
24#include <media/stagefright/MediaDefs.h>
25#include <media/stagefright/MediaErrors.h>
26#include <media/IOMX.h>
27
28
29namespace android {
30
31static const CodecProfileLevel kProfileLevels[] = {
32    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1  },
33    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1b },
34    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel11 },
35    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel12 },
36    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel13 },
37    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel2  },
38    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel21 },
39    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel22 },
40    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel3  },
41    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel31 },
42    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel32 },
43    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel4  },
44    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel41 },
45    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel42 },
46    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel5  },
47    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel51 },
48};
49
50SoftAVC::SoftAVC(
51        const char *name,
52        const OMX_CALLBACKTYPE *callbacks,
53        OMX_PTR appData,
54        OMX_COMPONENTTYPE **component)
55    : SoftVideoDecoderOMXComponent(
56            name, "video_decoder.avc", OMX_VIDEO_CodingAVC,
57            kProfileLevels, ARRAY_SIZE(kProfileLevels),
58            320 /* width */, 240 /* height */, callbacks, appData, component),
59      mHandle(NULL),
60      mInputBufferCount(0),
61      mFirstPicture(NULL),
62      mFirstPictureId(-1),
63      mPicId(0),
64      mHeadersDecoded(false),
65      mEOSStatus(INPUT_DATA_AVAILABLE),
66      mSignalledError(false) {
67    initPorts(
68            kNumInputBuffers, 8192 /* inputBufferSize */,
69            kNumOutputBuffers, MEDIA_MIMETYPE_VIDEO_AVC);
70
71    CHECK_EQ(initDecoder(), (status_t)OK);
72}
73
74SoftAVC::~SoftAVC() {
75    H264SwDecRelease(mHandle);
76    mHandle = NULL;
77
78    while (mPicToHeaderMap.size() != 0) {
79        OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.editValueAt(0);
80        mPicToHeaderMap.removeItemsAt(0);
81        delete header;
82        header = NULL;
83    }
84    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
85    List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
86    CHECK(outQueue.empty());
87    CHECK(inQueue.empty());
88
89    delete[] mFirstPicture;
90}
91
92status_t SoftAVC::initDecoder() {
93    // Force decoder to output buffers in display order.
94    if (H264SwDecInit(&mHandle, 0) == H264SWDEC_OK) {
95        return OK;
96    }
97    return UNKNOWN_ERROR;
98}
99
100void SoftAVC::onQueueFilled(OMX_U32 /* portIndex */) {
101    if (mSignalledError || mOutputPortSettingsChange != NONE) {
102        return;
103    }
104
105    if (mEOSStatus == OUTPUT_FRAMES_FLUSHED) {
106        return;
107    }
108
109    List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
110    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
111
112    if (mHeadersDecoded) {
113        // Dequeue any already decoded output frames to free up space
114        // in the output queue.
115
116        drainAllOutputBuffers(false /* eos */);
117    }
118
119    H264SwDecRet ret = H264SWDEC_PIC_RDY;
120    bool portWillReset = false;
121    while ((mEOSStatus != INPUT_DATA_AVAILABLE || !inQueue.empty())
122            && outQueue.size() == kNumOutputBuffers) {
123
124        if (mEOSStatus == INPUT_EOS_SEEN) {
125            drainAllOutputBuffers(true /* eos */);
126            return;
127        }
128
129        BufferInfo *inInfo = *inQueue.begin();
130        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
131        ++mPicId;
132
133        OMX_BUFFERHEADERTYPE *header = new OMX_BUFFERHEADERTYPE;
134        memset(header, 0, sizeof(OMX_BUFFERHEADERTYPE));
135        header->nTimeStamp = inHeader->nTimeStamp;
136        header->nFlags = inHeader->nFlags;
137        if (header->nFlags & OMX_BUFFERFLAG_EOS) {
138            mEOSStatus = INPUT_EOS_SEEN;
139        }
140        mPicToHeaderMap.add(mPicId, header);
141        inQueue.erase(inQueue.begin());
142
143        H264SwDecInput inPicture;
144        H264SwDecOutput outPicture;
145        memset(&inPicture, 0, sizeof(inPicture));
146        inPicture.dataLen = inHeader->nFilledLen;
147        inPicture.pStream = inHeader->pBuffer + inHeader->nOffset;
148        inPicture.picId = mPicId;
149        inPicture.intraConcealmentMethod = 1;
150        H264SwDecPicture decodedPicture;
151
152        while (inPicture.dataLen > 0) {
153            ret = H264SwDecDecode(mHandle, &inPicture, &outPicture);
154            if (ret == H264SWDEC_HDRS_RDY_BUFF_NOT_EMPTY ||
155                ret == H264SWDEC_PIC_RDY_BUFF_NOT_EMPTY) {
156                inPicture.dataLen -= (u32)(outPicture.pStrmCurrPos - inPicture.pStream);
157                inPicture.pStream = outPicture.pStrmCurrPos;
158                if (ret == H264SWDEC_HDRS_RDY_BUFF_NOT_EMPTY) {
159                    mHeadersDecoded = true;
160                    H264SwDecInfo decoderInfo;
161                    CHECK(H264SwDecGetInfo(mHandle, &decoderInfo) == H264SWDEC_OK);
162
163                    bool cropChanged = handleCropChange(decoderInfo);
164                    handlePortSettingsChange(
165                            &portWillReset, decoderInfo.picWidth, decoderInfo.picHeight,
166                            cropChanged);
167                }
168            } else {
169                if (portWillReset) {
170                    if (H264SwDecNextPicture(mHandle, &decodedPicture, 0)
171                        == H264SWDEC_PIC_RDY) {
172
173                        // Save this output buffer; otherwise, it will be
174                        // lost during dynamic port reconfiguration because
175                        // OpenMAX client will delete _all_ output buffers
176                        // in the process.
177                        saveFirstOutputBuffer(
178                            decodedPicture.picId,
179                            (uint8_t *)decodedPicture.pOutputPicture);
180                    }
181                }
182                inPicture.dataLen = 0;
183                if (ret < 0) {
184                    ALOGE("Decoder failed: %d", ret);
185
186                    notify(OMX_EventError, OMX_ErrorUndefined,
187                           ERROR_MALFORMED, NULL);
188
189                    mSignalledError = true;
190                    return;
191                }
192            }
193        }
194        inInfo->mOwnedByUs = false;
195        notifyEmptyBufferDone(inHeader);
196
197        if (portWillReset) {
198            return;
199        }
200
201        if (mFirstPicture && !outQueue.empty()) {
202            drainOneOutputBuffer(mFirstPictureId, mFirstPicture);
203            delete[] mFirstPicture;
204            mFirstPicture = NULL;
205            mFirstPictureId = -1;
206        }
207
208        drainAllOutputBuffers(false /* eos */);
209    }
210}
211
212bool SoftAVC::handleCropChange(const H264SwDecInfo& decInfo) {
213    if (!decInfo.croppingFlag) {
214        return false;
215    }
216
217    const CropParams& crop = decInfo.cropParams;
218    if (mCropLeft == crop.cropLeftOffset &&
219        mCropTop == crop.cropTopOffset &&
220        mCropWidth == crop.cropOutWidth &&
221        mCropHeight == crop.cropOutHeight) {
222        return false;
223    }
224
225    mCropLeft = crop.cropLeftOffset;
226    mCropTop = crop.cropTopOffset;
227    mCropWidth = crop.cropOutWidth;
228    mCropHeight = crop.cropOutHeight;
229    return true;
230}
231
232void SoftAVC::saveFirstOutputBuffer(int32_t picId, uint8_t *data) {
233    CHECK(mFirstPicture == NULL);
234    mFirstPictureId = picId;
235
236    uint32_t pictureSize = mWidth * mHeight * 3 / 2;
237    mFirstPicture = new uint8_t[pictureSize];
238    memcpy(mFirstPicture, data, pictureSize);
239}
240
241void SoftAVC::drainOneOutputBuffer(int32_t picId, uint8_t* data) {
242    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
243    BufferInfo *outInfo = *outQueue.begin();
244    outQueue.erase(outQueue.begin());
245    OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
246    OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.valueFor(picId);
247    outHeader->nTimeStamp = header->nTimeStamp;
248    outHeader->nFlags = header->nFlags;
249    outHeader->nFilledLen = mWidth * mHeight * 3 / 2;
250
251    uint8_t *dst = outHeader->pBuffer + outHeader->nOffset;
252    const uint8_t *srcY = data;
253    const uint8_t *srcU = srcY + mWidth * mHeight;
254    const uint8_t *srcV = srcU + mWidth * mHeight / 4;
255    size_t srcYStride = mWidth;
256    size_t srcUStride = mWidth / 2;
257    size_t srcVStride = srcUStride;
258    copyYV12FrameToOutputBuffer(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride);
259
260    mPicToHeaderMap.removeItem(picId);
261    delete header;
262    outInfo->mOwnedByUs = false;
263    notifyFillBufferDone(outHeader);
264}
265
266void SoftAVC::drainAllOutputBuffers(bool eos) {
267    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
268    H264SwDecPicture decodedPicture;
269
270    if (mHeadersDecoded) {
271        while (!outQueue.empty()
272                && H264SWDEC_PIC_RDY == H264SwDecNextPicture(
273                    mHandle, &decodedPicture, eos /* flush */)) {
274            int32_t picId = decodedPicture.picId;
275            uint8_t *data = (uint8_t *) decodedPicture.pOutputPicture;
276            drainOneOutputBuffer(picId, data);
277        }
278    }
279
280    if (!eos) {
281        return;
282    }
283
284    while (!outQueue.empty()) {
285        BufferInfo *outInfo = *outQueue.begin();
286        outQueue.erase(outQueue.begin());
287        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
288
289        outHeader->nTimeStamp = 0;
290        outHeader->nFilledLen = 0;
291        outHeader->nFlags = OMX_BUFFERFLAG_EOS;
292
293        outInfo->mOwnedByUs = false;
294        notifyFillBufferDone(outHeader);
295
296        mEOSStatus = OUTPUT_FRAMES_FLUSHED;
297    }
298}
299
300void SoftAVC::onPortFlushCompleted(OMX_U32 portIndex) {
301    if (portIndex == kInputPortIndex) {
302        mEOSStatus = INPUT_DATA_AVAILABLE;
303    }
304}
305
306void SoftAVC::onReset() {
307    SoftVideoDecoderOMXComponent::onReset();
308    mSignalledError = false;
309}
310
311}  // namespace android
312
313android::SoftOMXComponent *createSoftOMXComponent(
314        const char *name, const OMX_CALLBACKTYPE *callbacks,
315        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
316    return new android::SoftAVC(name, callbacks, appData, component);
317}
318