HardwareAPI.h revision 999083756c117b5b5bce862c296d525a2f7b94ac
1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef HARDWARE_API_H_
18
19#define HARDWARE_API_H_
20
21#include <media/hardware/OMXPluginBase.h>
22#include <media/hardware/MetadataBufferType.h>
23#include <system/window.h>
24#include <utils/RefBase.h>
25
26#include <OMX_Component.h>
27
28namespace android {
29
30// A pointer to this struct is passed to the OMX_SetParameter when the extension
31// index for the 'OMX.google.android.index.enableAndroidNativeBuffers' extension
32// is given.
33//
34// When Android native buffer use is disabled for a port (the default state),
35// the OMX node should operate as normal, and expect UseBuffer calls to set its
36// buffers.  This is the mode that will be used when CPU access to the buffer is
37// required.
38//
39// When Android native buffer use has been enabled for a given port, the video
40// color format for the port is to be interpreted as an Android pixel format
41// rather than an OMX color format.  Enabling Android native buffers may also
42// change how the component receives the native buffers.  If store-metadata-mode
43// is enabled on the port, the component will receive the buffers as specified
44// in the section below. Otherwise, unless the node supports the
45// 'OMX.google.android.index.useAndroidNativeBuffer2' extension, it should
46// expect to receive UseAndroidNativeBuffer calls (via OMX_SetParameter) rather
47// than UseBuffer calls for that port.
48struct EnableAndroidNativeBuffersParams {
49    OMX_U32 nSize;
50    OMX_VERSIONTYPE nVersion;
51    OMX_U32 nPortIndex;
52    OMX_BOOL enable;
53};
54
55// A pointer to this struct is passed to OMX_SetParameter() when the extension index
56// "OMX.google.android.index.storeMetaDataInBuffers" or
57// "OMX.google.android.index.storeANWBufferInMetadata" is given.
58//
59// When meta data is stored in the video buffers passed between OMX clients
60// and OMX components, interpretation of the buffer data is up to the
61// buffer receiver, and the data may or may not be the actual video data, but
62// some information helpful for the receiver to locate the actual data.
63// The buffer receiver thus needs to know how to interpret what is stored
64// in these buffers, with mechanisms pre-determined externally. How to
65// interpret the meta data is outside of the scope of this parameter.
66//
67// Currently, this is used to pass meta data from video source (camera component, for instance) to
68// video encoder to avoid memcpying of input video frame data, as well as to pass dynamic output
69// buffer to video decoder. To do this, bStoreMetaData is set to OMX_TRUE.
70//
71// If bStoreMetaData is set to false, real YUV frame data will be stored in input buffers, and
72// the output buffers contain either real YUV frame data, or are themselves native handles as
73// directed by enable/use-android-native-buffer parameter settings.
74// In addition, if no OMX_SetParameter() call is made on a port with the corresponding extension
75// index, the component should not assume that the client is not using metadata mode for the port.
76//
77// If the component supports this using the "OMX.google.android.index.storeANWBufferInMetadata"
78// extension and bStoreMetaData is set to OMX_TRUE, data is passed using the VideoNativeMetadata
79// layout as defined below. Each buffer will be accompanied by a fence. The fence must signal
80// before the buffer can be used (e.g. read from or written into). When returning such buffer to
81// the client, component must provide a new fence that must signal before the returned buffer can
82// be used (e.g. read from or written into). The component owns the incoming fenceFd, and must close
83// it when fence has signaled. The client will own and close the returned fence file descriptor.
84//
85// If the component supports this using the "OMX.google.android.index.storeMetaDataInBuffers"
86// extension and bStoreMetaData is set to OMX_TRUE, data is passed using VideoGrallocMetadata
87// (the layout of which is the VideoGrallocMetadata defined below). Camera input can be also passed
88// as "CameraSource", the layout of which is vendor dependent.
89//
90// Metadata buffers are registered with the component using UseBuffer calls, or can be allocated
91// by the component for encoder-metadata-output buffers.
92struct StoreMetaDataInBuffersParams {
93    OMX_U32 nSize;
94    OMX_VERSIONTYPE nVersion;
95    OMX_U32 nPortIndex;
96    OMX_BOOL bStoreMetaData;
97};
98
99// Meta data buffer layout used to transport output frames to the decoder for
100// dynamic buffer handling.
101struct VideoGrallocMetadata {
102    MetadataBufferType eType;               // must be kMetadataBufferTypeGrallocSource
103#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
104    OMX_PTR pHandle;
105#else
106    buffer_handle_t pHandle;
107#endif
108};
109
110// Legacy name for VideoGrallocMetadata struct.
111struct VideoDecoderOutputMetaData : public VideoGrallocMetadata {};
112
113struct VideoNativeMetadata {
114    MetadataBufferType eType;               // must be kMetadataBufferTypeANWBuffer
115#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
116    OMX_PTR pBuffer;
117#else
118    struct ANativeWindowBuffer* pBuffer;
119#endif
120    int nFenceFd;                           // -1 if unused
121};
122
123// Meta data buffer layout for passing a native_handle to codec
124struct VideoNativeHandleMetadata {
125    MetadataBufferType eType;               // must be kMetadataBufferTypeNativeHandleSource
126
127#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
128    OMX_PTR pHandle;
129#else
130    native_handle_t *pHandle;
131#endif
132};
133
134// A pointer to this struct is passed to OMX_SetParameter() when the extension
135// index "OMX.google.android.index.prepareForAdaptivePlayback" is given.
136//
137// This method is used to signal a video decoder, that the user has requested
138// seamless resolution change support (if bEnable is set to OMX_TRUE).
139// nMaxFrameWidth and nMaxFrameHeight are the dimensions of the largest
140// anticipated frames in the video.  If bEnable is OMX_FALSE, no resolution
141// change is expected, and the nMaxFrameWidth/Height fields are unused.
142//
143// If the decoder supports dynamic output buffers, it may ignore this
144// request.  Otherwise, it shall request resources in such a way so that it
145// avoids full port-reconfiguration (due to output port-definition change)
146// during resolution changes.
147//
148// DO NOT USE THIS STRUCTURE AS IT WILL BE REMOVED.  INSTEAD, IMPLEMENT
149// METADATA SUPPORT FOR VIDEO DECODERS.
150struct PrepareForAdaptivePlaybackParams {
151    OMX_U32 nSize;
152    OMX_VERSIONTYPE nVersion;
153    OMX_U32 nPortIndex;
154    OMX_BOOL bEnable;
155    OMX_U32 nMaxFrameWidth;
156    OMX_U32 nMaxFrameHeight;
157};
158
159// A pointer to this struct is passed to OMX_SetParameter when the extension
160// index for the 'OMX.google.android.index.useAndroidNativeBuffer' extension is
161// given.  This call will only be performed if a prior call was made with the
162// 'OMX.google.android.index.enableAndroidNativeBuffers' extension index,
163// enabling use of Android native buffers.
164struct UseAndroidNativeBufferParams {
165    OMX_U32 nSize;
166    OMX_VERSIONTYPE nVersion;
167    OMX_U32 nPortIndex;
168    OMX_PTR pAppPrivate;
169    OMX_BUFFERHEADERTYPE **bufferHeader;
170    const sp<ANativeWindowBuffer>& nativeBuffer;
171};
172
173// A pointer to this struct is passed to OMX_GetParameter when the extension
174// index for the 'OMX.google.android.index.getAndroidNativeBufferUsage'
175// extension is given.  The usage bits returned from this query will be used to
176// allocate the Gralloc buffers that get passed to the useAndroidNativeBuffer
177// command.
178struct GetAndroidNativeBufferUsageParams {
179    OMX_U32 nSize;              // IN
180    OMX_VERSIONTYPE nVersion;   // IN
181    OMX_U32 nPortIndex;         // IN
182    OMX_U32 nUsage;             // OUT
183};
184
185// An enum OMX_COLOR_FormatAndroidOpaque to indicate an opaque colorformat
186// is declared in media/stagefright/openmax/OMX_IVCommon.h
187// This will inform the encoder that the actual
188// colorformat will be relayed by the GRalloc Buffers.
189// OMX_COLOR_FormatAndroidOpaque  = 0x7F000001,
190
191// A pointer to this struct is passed to OMX_SetParameter when the extension
192// index for the 'OMX.google.android.index.prependSPSPPSToIDRFrames' extension
193// is given.
194// A successful result indicates that future IDR frames will be prefixed by
195// SPS/PPS.
196struct PrependSPSPPSToIDRFramesParams {
197    OMX_U32 nSize;
198    OMX_VERSIONTYPE nVersion;
199    OMX_BOOL bEnable;
200};
201
202// Structure describing a media image (frame)
203// Currently only supporting YUV
204struct MediaImage {
205    enum Type {
206        MEDIA_IMAGE_TYPE_UNKNOWN = 0,
207        MEDIA_IMAGE_TYPE_YUV,
208    };
209
210    enum PlaneIndex {
211        Y = 0,
212        U,
213        V,
214        MAX_NUM_PLANES
215    };
216
217    Type mType;
218    uint32_t mNumPlanes;              // number of planes
219    uint32_t mWidth;                  // width of largest plane (unpadded, as in nFrameWidth)
220    uint32_t mHeight;                 // height of largest plane (unpadded, as in nFrameHeight)
221    uint32_t mBitDepth;               // useable bit depth
222    struct PlaneInfo {
223        uint32_t mOffset;             // offset of first pixel of the plane in bytes
224                                      // from buffer offset
225        uint32_t mColInc;             // column increment in bytes
226        uint32_t mRowInc;             // row increment in bytes
227        uint32_t mHorizSubsampling;   // subsampling compared to the largest plane
228        uint32_t mVertSubsampling;    // subsampling compared to the largest plane
229    };
230    PlaneInfo mPlane[MAX_NUM_PLANES];
231};
232
233// A pointer to this struct is passed to OMX_GetParameter when the extension
234// index for the 'OMX.google.android.index.describeColorFormat'
235// extension is given.  This method can be called from any component state
236// other than invalid.  The color-format, frame width/height, and stride/
237// slice-height parameters are ones that are associated with a raw video
238// port (input or output), but the stride/slice height parameters may be
239// incorrect. bUsingNativeBuffers is OMX_TRUE if native android buffers will
240// be used (while specifying this color format).
241//
242// The component shall fill out the MediaImage structure that
243// corresponds to the described raw video format, and the potentially corrected
244// stride and slice-height info.
245//
246// The behavior is slightly different if bUsingNativeBuffers is OMX_TRUE,
247// though most implementations can ignore this difference. When using native buffers,
248// the component may change the configured color format to an optimized format.
249// Additionally, when allocating these buffers for flexible usecase, the framework
250// will set the SW_READ/WRITE_OFTEN usage flags. In this case (if bUsingNativeBuffers
251// is OMX_TRUE), the component shall fill out the MediaImage information for the
252// scenario when these SW-readable/writable buffers are locked using gralloc_lock.
253// Note, that these buffers may also be locked using gralloc_lock_ycbcr, which must
254// be supported for vendor-specific formats.
255//
256// For non-YUV packed planar/semiplanar image formats, or if bUsingNativeBuffers
257// is OMX_TRUE and the component does not support this color format with native
258// buffers, the component shall set mNumPlanes to 0, and mType to MEDIA_IMAGE_TYPE_UNKNOWN.
259struct DescribeColorFormatParams {
260    OMX_U32 nSize;
261    OMX_VERSIONTYPE nVersion;
262    // input: parameters from OMX_VIDEO_PORTDEFINITIONTYPE
263    OMX_COLOR_FORMATTYPE eColorFormat;
264    OMX_U32 nFrameWidth;
265    OMX_U32 nFrameHeight;
266    OMX_U32 nStride;
267    OMX_U32 nSliceHeight;
268    OMX_BOOL bUsingNativeBuffers;
269
270    // output: fill out the MediaImage fields
271    MediaImage sMediaImage;
272};
273
274// A pointer to this struct is passed to OMX_SetParameter or OMX_GetParameter
275// when the extension index for the
276// 'OMX.google.android.index.configureVideoTunnelMode' extension is  given.
277// If the extension is supported then tunneled playback mode should be supported
278// by the codec. If bTunneled is set to OMX_TRUE then the video decoder should
279// operate in "tunneled" mode and output its decoded frames directly to the
280// sink. In this case nAudioHwSync is the HW SYNC ID of the audio HAL Output
281// stream to sync the video with. If bTunneled is set to OMX_FALSE, "tunneled"
282// mode should be disabled and nAudioHwSync should be ignored.
283// OMX_GetParameter is used to query tunneling configuration. bTunneled should
284// return whether decoder is operating in tunneled mode, and if it is,
285// pSidebandWindow should contain the codec allocated sideband window handle.
286struct ConfigureVideoTunnelModeParams {
287    OMX_U32 nSize;              // IN
288    OMX_VERSIONTYPE nVersion;   // IN
289    OMX_U32 nPortIndex;         // IN
290    OMX_BOOL bTunneled;         // IN/OUT
291    OMX_U32 nAudioHwSync;       // IN
292    OMX_PTR pSidebandWindow;    // OUT
293};
294
295}  // namespace android
296
297extern android::OMXPluginBase *createOMXPlugin();
298
299#endif  // HARDWARE_API_H_
300