SoftAVC.cpp revision a0940a569f2bc24b00dc10ce0fa7658b1dc3a3a5
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "SoftAVC"
19#include <utils/Log.h>
20
21#include "SoftAVC.h"
22
23#include <media/stagefright/foundation/ADebug.h>
24#include <media/stagefright/MediaDefs.h>
25#include <media/stagefright/MediaErrors.h>
26#include <media/IOMX.h>
27
28
29namespace android {
30
31static const CodecProfileLevel kProfileLevels[] = {
32    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1  },
33    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1b },
34    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel11 },
35    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel12 },
36    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel13 },
37    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel2  },
38    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel21 },
39    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel22 },
40    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel3  },
41    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel31 },
42    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel32 },
43    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel4  },
44    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel41 },
45    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel42 },
46    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel5  },
47    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel51 },
48};
49
50SoftAVC::SoftAVC(
51        const char *name,
52        const OMX_CALLBACKTYPE *callbacks,
53        OMX_PTR appData,
54        OMX_COMPONENTTYPE **component)
55    : SoftVideoDecoderOMXComponent(
56            name, "video_decoder.avc", OMX_VIDEO_CodingAVC,
57            kProfileLevels, ARRAY_SIZE(kProfileLevels),
58            320 /* width */, 240 /* height */, callbacks, appData, component),
59      mHandle(NULL),
60      mInputBufferCount(0),
61      mFirstPicture(NULL),
62      mFirstPictureId(-1),
63      mPicId(0),
64      mHeadersDecoded(false),
65      mEOSStatus(INPUT_DATA_AVAILABLE),
66      mSignalledError(false) {
67    const size_t kMinCompressionRatio = 2;
68    const size_t kMaxOutputBufferSize = 2048 * 2048 * 3 / 2;
69    initPorts(
70            kNumInputBuffers, kMaxOutputBufferSize / kMinCompressionRatio /* minInputBufferSize */,
71            kNumOutputBuffers, MEDIA_MIMETYPE_VIDEO_AVC, kMinCompressionRatio);
72
73    CHECK_EQ(initDecoder(), (status_t)OK);
74}
75
76SoftAVC::~SoftAVC() {
77    H264SwDecRelease(mHandle);
78    mHandle = NULL;
79
80    while (mPicToHeaderMap.size() != 0) {
81        OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.editValueAt(0);
82        mPicToHeaderMap.removeItemsAt(0);
83        delete header;
84        header = NULL;
85    }
86    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
87    List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
88    CHECK(outQueue.empty());
89    CHECK(inQueue.empty());
90
91    delete[] mFirstPicture;
92}
93
94status_t SoftAVC::initDecoder() {
95    // Force decoder to output buffers in display order.
96    if (H264SwDecInit(&mHandle, 0) == H264SWDEC_OK) {
97        return OK;
98    }
99    return UNKNOWN_ERROR;
100}
101
102void SoftAVC::onQueueFilled(OMX_U32 /* portIndex */) {
103    if (mSignalledError || mOutputPortSettingsChange != NONE) {
104        return;
105    }
106
107    if (mEOSStatus == OUTPUT_FRAMES_FLUSHED) {
108        return;
109    }
110
111    List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
112    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
113
114    if (mHeadersDecoded) {
115        // Dequeue any already decoded output frames to free up space
116        // in the output queue.
117
118        drainAllOutputBuffers(false /* eos */);
119    }
120
121    H264SwDecRet ret = H264SWDEC_PIC_RDY;
122    bool portWillReset = false;
123    while ((mEOSStatus != INPUT_DATA_AVAILABLE || !inQueue.empty())
124            && outQueue.size() == kNumOutputBuffers) {
125
126        if (mEOSStatus == INPUT_EOS_SEEN) {
127            drainAllOutputBuffers(true /* eos */);
128            return;
129        }
130
131        BufferInfo *inInfo = *inQueue.begin();
132        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
133        ++mPicId;
134
135        OMX_BUFFERHEADERTYPE *header = new OMX_BUFFERHEADERTYPE;
136        memset(header, 0, sizeof(OMX_BUFFERHEADERTYPE));
137        header->nTimeStamp = inHeader->nTimeStamp;
138        header->nFlags = inHeader->nFlags;
139        if (header->nFlags & OMX_BUFFERFLAG_EOS) {
140            mEOSStatus = INPUT_EOS_SEEN;
141        }
142        mPicToHeaderMap.add(mPicId, header);
143        inQueue.erase(inQueue.begin());
144
145        H264SwDecInput inPicture;
146        H264SwDecOutput outPicture;
147        memset(&inPicture, 0, sizeof(inPicture));
148        inPicture.dataLen = inHeader->nFilledLen;
149        inPicture.pStream = inHeader->pBuffer + inHeader->nOffset;
150        inPicture.picId = mPicId;
151        inPicture.intraConcealmentMethod = 1;
152        H264SwDecPicture decodedPicture;
153
154        while (inPicture.dataLen > 0) {
155            ret = H264SwDecDecode(mHandle, &inPicture, &outPicture);
156            if (ret == H264SWDEC_HDRS_RDY_BUFF_NOT_EMPTY ||
157                ret == H264SWDEC_PIC_RDY_BUFF_NOT_EMPTY) {
158                inPicture.dataLen -= (u32)(outPicture.pStrmCurrPos - inPicture.pStream);
159                inPicture.pStream = outPicture.pStrmCurrPos;
160                if (ret == H264SWDEC_HDRS_RDY_BUFF_NOT_EMPTY) {
161                    mHeadersDecoded = true;
162                    H264SwDecInfo decoderInfo;
163                    CHECK(H264SwDecGetInfo(mHandle, &decoderInfo) == H264SWDEC_OK);
164
165                    SoftVideoDecoderOMXComponent::CropSettingsMode cropSettingsMode =
166                        handleCropParams(decoderInfo);
167                    handlePortSettingsChange(
168                            &portWillReset, decoderInfo.picWidth, decoderInfo.picHeight,
169                            cropSettingsMode);
170                }
171            } else {
172                if (portWillReset) {
173                    if (H264SwDecNextPicture(mHandle, &decodedPicture, 0)
174                        == H264SWDEC_PIC_RDY) {
175
176                        // Save this output buffer; otherwise, it will be
177                        // lost during dynamic port reconfiguration because
178                        // OpenMAX client will delete _all_ output buffers
179                        // in the process.
180                        saveFirstOutputBuffer(
181                            decodedPicture.picId,
182                            (uint8_t *)decodedPicture.pOutputPicture);
183                    }
184                }
185                inPicture.dataLen = 0;
186                if (ret < 0) {
187                    ALOGE("Decoder failed: %d", ret);
188
189                    notify(OMX_EventError, OMX_ErrorUndefined,
190                           ERROR_MALFORMED, NULL);
191
192                    mSignalledError = true;
193                    return;
194                }
195            }
196        }
197        inInfo->mOwnedByUs = false;
198        notifyEmptyBufferDone(inHeader);
199
200        if (portWillReset) {
201            return;
202        }
203
204        if (mFirstPicture && !outQueue.empty()) {
205            drainOneOutputBuffer(mFirstPictureId, mFirstPicture);
206            delete[] mFirstPicture;
207            mFirstPicture = NULL;
208            mFirstPictureId = -1;
209        }
210
211        drainAllOutputBuffers(false /* eos */);
212    }
213}
214
215SoftVideoDecoderOMXComponent::CropSettingsMode SoftAVC::handleCropParams(
216        const H264SwDecInfo& decInfo) {
217    if (!decInfo.croppingFlag) {
218        return kCropUnSet;
219    }
220
221    const CropParams& crop = decInfo.cropParams;
222    if (mCropLeft == crop.cropLeftOffset &&
223        mCropTop == crop.cropTopOffset &&
224        mCropWidth == crop.cropOutWidth &&
225        mCropHeight == crop.cropOutHeight) {
226        return kCropSet;
227    }
228
229    mCropLeft = crop.cropLeftOffset;
230    mCropTop = crop.cropTopOffset;
231    mCropWidth = crop.cropOutWidth;
232    mCropHeight = crop.cropOutHeight;
233    return kCropChanged;
234}
235
236void SoftAVC::saveFirstOutputBuffer(int32_t picId, uint8_t *data) {
237    CHECK(mFirstPicture == NULL);
238    mFirstPictureId = picId;
239
240    uint32_t pictureSize = mWidth * mHeight * 3 / 2;
241    mFirstPicture = new uint8_t[pictureSize];
242    memcpy(mFirstPicture, data, pictureSize);
243}
244
245void SoftAVC::drainOneOutputBuffer(int32_t picId, uint8_t* data) {
246    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
247    BufferInfo *outInfo = *outQueue.begin();
248    outQueue.erase(outQueue.begin());
249    OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
250    OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.valueFor(picId);
251    outHeader->nTimeStamp = header->nTimeStamp;
252    outHeader->nFlags = header->nFlags;
253    outHeader->nFilledLen = mWidth * mHeight * 3 / 2;
254
255    uint8_t *dst = outHeader->pBuffer + outHeader->nOffset;
256    const uint8_t *srcY = data;
257    const uint8_t *srcU = srcY + mWidth * mHeight;
258    const uint8_t *srcV = srcU + mWidth * mHeight / 4;
259    size_t srcYStride = mWidth;
260    size_t srcUStride = mWidth / 2;
261    size_t srcVStride = srcUStride;
262    copyYV12FrameToOutputBuffer(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride);
263
264    mPicToHeaderMap.removeItem(picId);
265    delete header;
266    outInfo->mOwnedByUs = false;
267    notifyFillBufferDone(outHeader);
268}
269
270void SoftAVC::drainAllOutputBuffers(bool eos) {
271    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
272    H264SwDecPicture decodedPicture;
273
274    if (mHeadersDecoded) {
275        while (!outQueue.empty()
276                && H264SWDEC_PIC_RDY == H264SwDecNextPicture(
277                    mHandle, &decodedPicture, eos /* flush */)) {
278            int32_t picId = decodedPicture.picId;
279            uint8_t *data = (uint8_t *) decodedPicture.pOutputPicture;
280            drainOneOutputBuffer(picId, data);
281        }
282    }
283
284    if (!eos) {
285        return;
286    }
287
288    while (!outQueue.empty()) {
289        BufferInfo *outInfo = *outQueue.begin();
290        outQueue.erase(outQueue.begin());
291        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
292
293        outHeader->nTimeStamp = 0;
294        outHeader->nFilledLen = 0;
295        outHeader->nFlags = OMX_BUFFERFLAG_EOS;
296
297        outInfo->mOwnedByUs = false;
298        notifyFillBufferDone(outHeader);
299
300        mEOSStatus = OUTPUT_FRAMES_FLUSHED;
301    }
302}
303
304void SoftAVC::onPortFlushCompleted(OMX_U32 portIndex) {
305    if (portIndex == kInputPortIndex) {
306        mEOSStatus = INPUT_DATA_AVAILABLE;
307    }
308}
309
310void SoftAVC::onReset() {
311    SoftVideoDecoderOMXComponent::onReset();
312    mSignalledError = false;
313}
314
315}  // namespace android
316
317android::SoftOMXComponent *createSoftOMXComponent(
318        const char *name, const OMX_CALLBACKTYPE *callbacks,
319        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
320    return new android::SoftAVC(name, callbacks, appData, component);
321}
322