1/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "OMXUtils"
19
20#include <string.h>
21
22#include <media/hardware/HardwareAPI.h>
23#include <media/stagefright/foundation/ADebug.h>
24#include <media/stagefright/foundation/AUtils.h>
25#include <media/stagefright/MediaErrors.h>
26#include <media/MediaDefs.h>
27#include "OMXUtils.h"
28
29namespace android {
30
31status_t StatusFromOMXError(OMX_ERRORTYPE err) {
32    switch (err) {
33        case OMX_ErrorNone:
34            return OK;
35        case OMX_ErrorNoMore:
36            return NOT_ENOUGH_DATA;
37        case OMX_ErrorUnsupportedSetting:
38        case OMX_ErrorUnsupportedIndex:
39            return ERROR_UNSUPPORTED; // this is a media specific error
40        case OMX_ErrorBadParameter:
41            return BAD_VALUE;
42        case OMX_ErrorInsufficientResources:
43            return NO_MEMORY;
44        case OMX_ErrorInvalidComponentName:
45        case OMX_ErrorComponentNotFound:
46            return NAME_NOT_FOUND;
47        default:
48            return UNKNOWN_ERROR;
49    }
50}
51
52/**************************************************************************************************/
53
54DescribeColorFormatParams::DescribeColorFormatParams(const DescribeColorFormat2Params &params) {
55    InitOMXParams(this);
56
57    eColorFormat = params.eColorFormat;
58    nFrameWidth = params.nFrameWidth;
59    nFrameHeight = params.nFrameHeight;
60    nStride = params.nStride;
61    nSliceHeight = params.nSliceHeight;
62    bUsingNativeBuffers = params.bUsingNativeBuffers;
63    // we don't copy media images as this conversion is only used pre-query
64};
65
66void DescribeColorFormat2Params::initFromV1(const DescribeColorFormatParams &params) {
67    InitOMXParams(this);
68
69    eColorFormat = params.eColorFormat;
70    nFrameWidth = params.nFrameWidth;
71    nFrameHeight = params.nFrameHeight;
72    nStride = params.nStride;
73    nSliceHeight = params.nSliceHeight;
74    bUsingNativeBuffers = params.bUsingNativeBuffers;
75    sMediaImage.initFromV1(params.sMediaImage);
76};
77
78void MediaImage2::initFromV1(const MediaImage &image) {
79    memset(this, 0, sizeof(*this));
80
81    if (image.mType != MediaImage::MEDIA_IMAGE_TYPE_YUV) {
82        mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
83        return;
84    }
85
86    for (size_t ix = 0; ix < image.mNumPlanes; ++ix) {
87        if (image.mPlane[ix].mHorizSubsampling > INT32_MAX
88                || image.mPlane[ix].mVertSubsampling > INT32_MAX) {
89            mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
90            return;
91        }
92    }
93
94    mType = (MediaImage2::Type)image.mType;
95    mNumPlanes = image.mNumPlanes;
96    mWidth = image.mWidth;
97    mHeight = image.mHeight;
98    mBitDepth = image.mBitDepth;
99    mBitDepthAllocated = 8;
100    for (size_t ix = 0; ix < image.mNumPlanes; ++ix) {
101        mPlane[ix].mOffset = image.mPlane[ix].mOffset;
102        mPlane[ix].mColInc = image.mPlane[ix].mColInc;
103        mPlane[ix].mRowInc = image.mPlane[ix].mRowInc;
104        mPlane[ix].mHorizSubsampling = (int32_t)image.mPlane[ix].mHorizSubsampling;
105        mPlane[ix].mVertSubsampling = (int32_t)image.mPlane[ix].mVertSubsampling;
106    }
107}
108
109/**************************************************************************************************/
110
111const char *GetComponentRole(bool isEncoder, const char *mime) {
112    struct MimeToRole {
113        const char *mime;
114        const char *decoderRole;
115        const char *encoderRole;
116    };
117
118    static const MimeToRole kMimeToRole[] = {
119        { MEDIA_MIMETYPE_AUDIO_MPEG,
120            "audio_decoder.mp3", "audio_encoder.mp3" },
121        { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
122            "audio_decoder.mp1", "audio_encoder.mp1" },
123        { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
124            "audio_decoder.mp2", "audio_encoder.mp2" },
125        { MEDIA_MIMETYPE_AUDIO_AMR_NB,
126            "audio_decoder.amrnb", "audio_encoder.amrnb" },
127        { MEDIA_MIMETYPE_AUDIO_AMR_WB,
128            "audio_decoder.amrwb", "audio_encoder.amrwb" },
129        { MEDIA_MIMETYPE_AUDIO_AAC,
130            "audio_decoder.aac", "audio_encoder.aac" },
131        { MEDIA_MIMETYPE_AUDIO_VORBIS,
132            "audio_decoder.vorbis", "audio_encoder.vorbis" },
133        { MEDIA_MIMETYPE_AUDIO_OPUS,
134            "audio_decoder.opus", "audio_encoder.opus" },
135        { MEDIA_MIMETYPE_AUDIO_G711_MLAW,
136            "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" },
137        { MEDIA_MIMETYPE_AUDIO_G711_ALAW,
138            "audio_decoder.g711alaw", "audio_encoder.g711alaw" },
139        { MEDIA_MIMETYPE_VIDEO_AVC,
140            "video_decoder.avc", "video_encoder.avc" },
141        { MEDIA_MIMETYPE_VIDEO_HEVC,
142            "video_decoder.hevc", "video_encoder.hevc" },
143        { MEDIA_MIMETYPE_VIDEO_MPEG4,
144            "video_decoder.mpeg4", "video_encoder.mpeg4" },
145        { MEDIA_MIMETYPE_VIDEO_H263,
146            "video_decoder.h263", "video_encoder.h263" },
147        { MEDIA_MIMETYPE_VIDEO_VP8,
148            "video_decoder.vp8", "video_encoder.vp8" },
149        { MEDIA_MIMETYPE_VIDEO_VP9,
150            "video_decoder.vp9", "video_encoder.vp9" },
151        { MEDIA_MIMETYPE_AUDIO_RAW,
152            "audio_decoder.raw", "audio_encoder.raw" },
153        { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
154            "video_decoder.dolby-vision", "video_encoder.dolby-vision" },
155        { MEDIA_MIMETYPE_AUDIO_FLAC,
156            "audio_decoder.flac", "audio_encoder.flac" },
157        { MEDIA_MIMETYPE_AUDIO_MSGSM,
158            "audio_decoder.gsm", "audio_encoder.gsm" },
159        { MEDIA_MIMETYPE_VIDEO_MPEG2,
160            "video_decoder.mpeg2", "video_encoder.mpeg2" },
161        { MEDIA_MIMETYPE_AUDIO_AC3,
162            "audio_decoder.ac3", "audio_encoder.ac3" },
163        { MEDIA_MIMETYPE_AUDIO_EAC3,
164            "audio_decoder.eac3", "audio_encoder.eac3" },
165    };
166
167    static const size_t kNumMimeToRole =
168        sizeof(kMimeToRole) / sizeof(kMimeToRole[0]);
169
170    size_t i;
171    for (i = 0; i < kNumMimeToRole; ++i) {
172        if (!strcasecmp(mime, kMimeToRole[i].mime)) {
173            break;
174        }
175    }
176
177    if (i == kNumMimeToRole) {
178        return NULL;
179    }
180
181    return isEncoder ? kMimeToRole[i].encoderRole
182                  : kMimeToRole[i].decoderRole;
183}
184
185status_t SetComponentRole(const sp<IOMXNode> &omxNode, const char *role) {
186    OMX_PARAM_COMPONENTROLETYPE roleParams;
187    InitOMXParams(&roleParams);
188
189    strncpy((char *)roleParams.cRole,
190            role, OMX_MAX_STRINGNAME_SIZE - 1);
191
192    roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0';
193
194    return omxNode->setParameter(
195            OMX_IndexParamStandardComponentRole,
196            &roleParams, sizeof(roleParams));
197}
198
199bool DescribeDefaultColorFormat(DescribeColorFormat2Params &params) {
200    MediaImage2 &image = params.sMediaImage;
201    memset(&image, 0, sizeof(image));
202
203    image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
204    image.mNumPlanes = 0;
205
206    const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat;
207    image.mWidth = params.nFrameWidth;
208    image.mHeight = params.nFrameHeight;
209
210    // only supporting YUV420
211    if (fmt != OMX_COLOR_FormatYUV420Planar &&
212        fmt != OMX_COLOR_FormatYUV420PackedPlanar &&
213        fmt != OMX_COLOR_FormatYUV420SemiPlanar &&
214        fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar &&
215        fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) {
216        ALOGW("do not know color format 0x%x = %d", fmt, fmt);
217        return false;
218    }
219
220    // TEMPORARY FIX for some vendors that advertise sliceHeight as 0
221    if (params.nStride != 0 && params.nSliceHeight == 0) {
222        ALOGW("using sliceHeight=%u instead of what codec advertised (=0)",
223                params.nFrameHeight);
224        params.nSliceHeight = params.nFrameHeight;
225    }
226
227    // we need stride and slice-height to be non-zero and sensible. These values were chosen to
228    // prevent integer overflows further down the line, and do not indicate support for
229    // 32kx32k video.
230    if (params.nStride == 0 || params.nSliceHeight == 0
231            || params.nStride > 32768 || params.nSliceHeight > 32768) {
232        ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u",
233                fmt, fmt, params.nStride, params.nSliceHeight);
234        return false;
235    }
236
237    // set-up YUV format
238    image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV;
239    image.mNumPlanes = 3;
240    image.mBitDepth = 8;
241    image.mBitDepthAllocated = 8;
242    image.mPlane[image.Y].mOffset = 0;
243    image.mPlane[image.Y].mColInc = 1;
244    image.mPlane[image.Y].mRowInc = params.nStride;
245    image.mPlane[image.Y].mHorizSubsampling = 1;
246    image.mPlane[image.Y].mVertSubsampling = 1;
247
248    switch ((int)fmt) {
249        case HAL_PIXEL_FORMAT_YV12:
250            if (params.bUsingNativeBuffers) {
251                size_t ystride = align(params.nStride, 16);
252                size_t cstride = align(params.nStride / 2, 16);
253                image.mPlane[image.Y].mRowInc = ystride;
254
255                image.mPlane[image.V].mOffset = ystride * params.nSliceHeight;
256                image.mPlane[image.V].mColInc = 1;
257                image.mPlane[image.V].mRowInc = cstride;
258                image.mPlane[image.V].mHorizSubsampling = 2;
259                image.mPlane[image.V].mVertSubsampling = 2;
260
261                image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset
262                        + (cstride * params.nSliceHeight / 2);
263                image.mPlane[image.U].mColInc = 1;
264                image.mPlane[image.U].mRowInc = cstride;
265                image.mPlane[image.U].mHorizSubsampling = 2;
266                image.mPlane[image.U].mVertSubsampling = 2;
267                break;
268            } else {
269                // fall through as YV12 is used for YUV420Planar by some codecs
270            }
271
272        case OMX_COLOR_FormatYUV420Planar:
273        case OMX_COLOR_FormatYUV420PackedPlanar:
274            image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight;
275            image.mPlane[image.U].mColInc = 1;
276            image.mPlane[image.U].mRowInc = params.nStride / 2;
277            image.mPlane[image.U].mHorizSubsampling = 2;
278            image.mPlane[image.U].mVertSubsampling = 2;
279
280            image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset
281                    + (params.nStride * params.nSliceHeight / 4);
282            image.mPlane[image.V].mColInc = 1;
283            image.mPlane[image.V].mRowInc = params.nStride / 2;
284            image.mPlane[image.V].mHorizSubsampling = 2;
285            image.mPlane[image.V].mVertSubsampling = 2;
286            break;
287
288        case OMX_COLOR_FormatYUV420SemiPlanar:
289            // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder
290        case OMX_COLOR_FormatYUV420PackedSemiPlanar:
291            // NV12
292            image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight;
293            image.mPlane[image.U].mColInc = 2;
294            image.mPlane[image.U].mRowInc = params.nStride;
295            image.mPlane[image.U].mHorizSubsampling = 2;
296            image.mPlane[image.U].mVertSubsampling = 2;
297
298            image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1;
299            image.mPlane[image.V].mColInc = 2;
300            image.mPlane[image.V].mRowInc = params.nStride;
301            image.mPlane[image.V].mHorizSubsampling = 2;
302            image.mPlane[image.V].mVertSubsampling = 2;
303            break;
304
305        default:
306            TRESPASS();
307    }
308    return true;
309}
310
311bool DescribeColorFormat(
312        const sp<IOMXNode> &omxNode,
313        DescribeColorFormat2Params &describeParams)
314{
315    OMX_INDEXTYPE describeColorFormatIndex;
316    if (omxNode->getExtensionIndex(
317            "OMX.google.android.index.describeColorFormat",
318            &describeColorFormatIndex) == OK) {
319        DescribeColorFormatParams describeParamsV1(describeParams);
320        if (omxNode->getParameter(
321                describeColorFormatIndex,
322                &describeParamsV1, sizeof(describeParamsV1)) == OK) {
323            describeParams.initFromV1(describeParamsV1);
324            return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
325        }
326    } else if (omxNode->getExtensionIndex(
327            "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK
328               && omxNode->getParameter(
329                       describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) {
330        return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
331    }
332
333    return DescribeDefaultColorFormat(describeParams);
334}
335
336// static
337bool IsFlexibleColorFormat(
338         const sp<IOMXNode> &omxNode,
339         uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) {
340    DescribeColorFormat2Params describeParams;
341    InitOMXParams(&describeParams);
342    describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat;
343    // reasonable dummy values
344    describeParams.nFrameWidth = 128;
345    describeParams.nFrameHeight = 128;
346    describeParams.nStride = 128;
347    describeParams.nSliceHeight = 128;
348    describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers;
349
350    CHECK(flexibleEquivalent != NULL);
351
352    if (!DescribeColorFormat(omxNode, describeParams)) {
353        return false;
354    }
355
356    const MediaImage2 &img = describeParams.sMediaImage;
357    if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) {
358        if (img.mNumPlanes != 3
359                || img.mPlane[img.Y].mHorizSubsampling != 1
360                || img.mPlane[img.Y].mVertSubsampling != 1) {
361            return false;
362        }
363
364        // YUV 420
365        if (img.mPlane[img.U].mHorizSubsampling == 2
366                && img.mPlane[img.U].mVertSubsampling == 2
367                && img.mPlane[img.V].mHorizSubsampling == 2
368                && img.mPlane[img.V].mVertSubsampling == 2) {
369            // possible flexible YUV420 format
370            if (img.mBitDepth <= 8) {
371               *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible;
372               return true;
373            }
374        }
375    }
376    return false;
377}
378
379}  // namespace android
380
381