OMXFD.cpp revision 967f1197f015b6d2f3b34e60fa787f7066efb824
1/*
2 * Copyright (C) Texas Instruments - http://www.ti.com/
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/**
18* @file OMXFD.cpp
19*
20* This file contains functionality for handling face detection.
21*
22*/
23
24#undef LOG_TAG
25
26#define LOG_TAG "CameraHAL"
27
28#include "CameraHal.h"
29#include "OMXCameraAdapter.h"
30
31#define FACE_DETECTION_THRESHOLD 80
32
33namespace android {
34
35status_t OMXCameraAdapter::setParametersFD(const CameraParameters &params,
36                                           BaseCameraAdapter::AdapterState state)
37{
38    status_t ret = NO_ERROR;
39
40    LOG_FUNCTION_NAME;
41
42    LOG_FUNCTION_NAME_EXIT;
43
44    return ret;
45}
46
47status_t OMXCameraAdapter::startFaceDetection()
48{
49    status_t ret = NO_ERROR;
50
51    Mutex::Autolock lock(mFaceDetectionLock);
52
53    ret = setFaceDetection(true, mDeviceOrientation);
54    if (ret != NO_ERROR) {
55        goto out;
56    }
57
58    // Set 3A modes to face priority
59    ret = setExposureMode(mParameters3A);
60    if (ret != NO_ERROR) {
61        goto out;
62    }
63
64
65    //Note: White balance will not be face prioritized, since
66    //the algorithm needs full frame statistics, and not face
67    //regions alone.
68
69
70 out:
71    return ret;
72}
73
74status_t OMXCameraAdapter::stopFaceDetection()
75{
76    status_t ret = NO_ERROR;
77    const char *str = NULL;
78    BaseCameraAdapter::AdapterState state;
79    BaseCameraAdapter::getState(state);
80
81    Mutex::Autolock lock(mFaceDetectionLock);
82
83    ret = setFaceDetection(false, mDeviceOrientation);
84    if (ret != NO_ERROR) {
85        goto out;
86    }
87
88    // Reset 3A settings
89    ret = setParameters3A(mParams, state);
90    if (ret != NO_ERROR) {
91        goto out;
92    }
93
94    if (mPending3Asettings) {
95        apply3Asettings(mParameters3A);
96    }
97
98 out:
99    return ret;
100}
101
102void OMXCameraAdapter::pauseFaceDetection(bool pause)
103{
104    Mutex::Autolock lock(mFaceDetectionLock);
105    // pausing will only take affect if fd is already running
106    if (mFaceDetectionRunning) {
107        mFaceDetectionPaused = pause;
108    }
109}
110
111status_t OMXCameraAdapter::setFaceDetection(bool enable, OMX_U32 orientation)
112{
113    status_t ret = NO_ERROR;
114    OMX_ERRORTYPE eError = OMX_ErrorNone;
115    OMX_CONFIG_EXTRADATATYPE extraDataControl;
116    OMX_CONFIG_OBJDETECTIONTYPE objDetection;
117
118    LOG_FUNCTION_NAME;
119
120    if ( OMX_StateInvalid == mComponentState )
121        {
122        CAMHAL_LOGEA("OMX component is in invalid state");
123        ret = -EINVAL;
124        }
125
126    if ( NO_ERROR == ret )
127        {
128        if ( orientation < 0 || orientation > 270 ) {
129            orientation = 0;
130        }
131
132        OMX_INIT_STRUCT_PTR (&objDetection, OMX_CONFIG_OBJDETECTIONTYPE);
133        objDetection.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
134        objDetection.nDeviceOrientation = orientation;
135        if  ( enable )
136            {
137            objDetection.bEnable = OMX_TRUE;
138            }
139        else
140            {
141            objDetection.bEnable = OMX_FALSE;
142            }
143
144        eError =  OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
145                                ( OMX_INDEXTYPE ) OMX_IndexConfigImageFaceDetection,
146                                &objDetection);
147        if ( OMX_ErrorNone != eError )
148            {
149            CAMHAL_LOGEB("Error while configuring face detection 0x%x", eError);
150            ret = -1;
151            }
152        else
153            {
154            CAMHAL_LOGDA("Face detection configured successfully");
155            }
156        }
157
158    if ( NO_ERROR == ret )
159        {
160        OMX_INIT_STRUCT_PTR (&extraDataControl, OMX_CONFIG_EXTRADATATYPE);
161        extraDataControl.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
162        extraDataControl.eExtraDataType = OMX_FaceDetection;
163        extraDataControl.eCameraView = OMX_2D;
164        if  ( enable )
165            {
166            extraDataControl.bEnable = OMX_TRUE;
167            }
168        else
169            {
170            extraDataControl.bEnable = OMX_FALSE;
171            }
172
173        eError =  OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
174                                ( OMX_INDEXTYPE ) OMX_IndexConfigOtherExtraDataControl,
175                                &extraDataControl);
176        if ( OMX_ErrorNone != eError )
177            {
178            CAMHAL_LOGEB("Error while configuring face detection extra data 0x%x",
179                         eError);
180            ret = -1;
181            }
182        else
183            {
184            CAMHAL_LOGDA("Face detection extra data configured successfully");
185            }
186        }
187
188    if ( NO_ERROR == ret )
189        {
190        mFaceDetectionRunning = enable;
191        mFaceDetectionPaused = !enable;
192        }
193
194    LOG_FUNCTION_NAME_EXIT;
195
196    return ret;
197}
198
199status_t OMXCameraAdapter::detectFaces(OMX_BUFFERHEADERTYPE* pBuffHeader,
200                                       sp<CameraFDResult> &result,
201                                       size_t previewWidth,
202                                       size_t previewHeight)
203{
204    status_t ret = NO_ERROR;
205    OMX_ERRORTYPE eError = OMX_ErrorNone;
206    OMX_TI_FACERESULT *faceResult;
207    OMX_OTHER_EXTRADATATYPE *extraData;
208    OMX_FACEDETECTIONTYPE *faceData;
209    OMX_TI_PLATFORMPRIVATE *platformPrivate;
210    camera_frame_metadata_t *faces;
211
212    LOG_FUNCTION_NAME;
213
214    if ( OMX_StateExecuting != mComponentState ) {
215        CAMHAL_LOGEA("OMX component is not in executing state");
216        return NO_INIT;
217    }
218
219    if ( NULL == pBuffHeader ) {
220        CAMHAL_LOGEA("Invalid Buffer header");
221        return-EINVAL;
222    }
223
224    platformPrivate = (OMX_TI_PLATFORMPRIVATE *) (pBuffHeader->pPlatformPrivate);
225    if ( NULL != platformPrivate ) {
226        if ( sizeof(OMX_TI_PLATFORMPRIVATE) == platformPrivate->nSize ) {
227            CAMHAL_LOGVB("Size = %d, sizeof = %d, pAuxBuf = 0x%x, pAuxBufSize= %d, pMetaDataBufer = 0x%x, nMetaDataSize = %d",
228                         platformPrivate->nSize,
229                         sizeof(OMX_TI_PLATFORMPRIVATE),
230                         platformPrivate->pAuxBuf1,
231                         platformPrivate->pAuxBufSize1,
232                         platformPrivate->pMetaDataBuffer,
233                         platformPrivate->nMetaDataSize);
234        } else {
235            CAMHAL_LOGEB("OMX_TI_PLATFORMPRIVATE size mismatch: expected = %d, received = %d",
236                         ( unsigned int ) sizeof(OMX_TI_PLATFORMPRIVATE),
237                         ( unsigned int ) platformPrivate->nSize);
238            ret = -EINVAL;
239        }
240    }  else {
241        CAMHAL_LOGEA("Invalid OMX_TI_PLATFORMPRIVATE");
242        return-EINVAL;
243    }
244
245
246    if ( 0 >= platformPrivate->nMetaDataSize ) {
247        CAMHAL_LOGEB("OMX_TI_PLATFORMPRIVATE nMetaDataSize is size is %d",
248                     ( unsigned int ) platformPrivate->nMetaDataSize);
249        return -EINVAL;
250    }
251
252    extraData = (OMX_OTHER_EXTRADATATYPE *) (platformPrivate->pMetaDataBuffer);
253    if ( NULL != extraData ) {
254        CAMHAL_LOGVB("Size = %d, sizeof = %d, eType = 0x%x, nDataSize= %d, nPortIndex = 0x%x, nVersion = 0x%x",
255                     extraData->nSize,
256                     sizeof(OMX_OTHER_EXTRADATATYPE),
257                     extraData->eType,
258                     extraData->nDataSize,
259                     extraData->nPortIndex,
260                     extraData->nVersion);
261    } else {
262        CAMHAL_LOGEA("Invalid OMX_OTHER_EXTRADATATYPE");
263        return -EINVAL;
264    }
265
266    faceData = ( OMX_FACEDETECTIONTYPE * ) extraData->data;
267    if ( NULL != faceData ) {
268        if ( sizeof(OMX_FACEDETECTIONTYPE) == faceData->nSize ) {
269            CAMHAL_LOGVB("Faces detected %d",
270                         faceData->ulFaceCount,
271                         faceData->nSize,
272                         sizeof(OMX_FACEDETECTIONTYPE),
273                         faceData->eCameraView,
274                         faceData->nPortIndex,
275                         faceData->nVersion);
276        } else {
277            CAMHAL_LOGEB("OMX_FACEDETECTIONTYPE size mismatch: expected = %d, received = %d",
278                         ( unsigned int ) sizeof(OMX_FACEDETECTIONTYPE),
279                         ( unsigned int ) faceData->nSize);
280            return -EINVAL;
281        }
282    } else {
283        CAMHAL_LOGEA("Invalid OMX_FACEDETECTIONTYPE");
284        return -EINVAL;
285    }
286
287    ret = encodeFaceCoordinates(faceData, &faces, previewWidth, previewHeight);
288
289    if ( NO_ERROR == ret ) {
290        result = new CameraFDResult(faces);
291    } else {
292        result.clear();
293        result = NULL;
294    }
295
296    LOG_FUNCTION_NAME_EXIT;
297
298    return ret;
299}
300
301status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *faceData,
302                                                 camera_frame_metadata_t **pFaces,
303                                                 size_t previewWidth,
304                                                 size_t previewHeight)
305{
306    status_t ret = NO_ERROR;
307    camera_face_t *faces;
308    camera_frame_metadata_t *faceResult;
309    size_t hRange, vRange;
310    double tmp;
311
312    LOG_FUNCTION_NAME;
313
314    if ( NULL == faceData ) {
315        CAMHAL_LOGEA("Invalid OMX_FACEDETECTIONTYPE parameter");
316        return EINVAL;
317    }
318
319    LOG_FUNCTION_NAME
320
321    hRange = CameraFDResult::RIGHT - CameraFDResult::LEFT;
322    vRange = CameraFDResult::BOTTOM - CameraFDResult::TOP;
323
324    faceResult = ( camera_frame_metadata_t * ) malloc(sizeof(camera_frame_metadata_t));
325    if ( NULL == faceResult ) {
326        return -ENOMEM;
327    }
328
329    if ( 0 < faceData->ulFaceCount ) {
330        int orient_mult;
331        int trans_left, trans_top, trans_right, trans_bot;
332
333        faces = ( camera_face_t * ) malloc(sizeof(camera_face_t)*faceData->ulFaceCount);
334        if ( NULL == faces ) {
335            return -ENOMEM;
336        }
337
338        /**
339        / * When device is 180 degrees oriented to the sensor, need to translate
340        / * the output from Ducati to what Android expects
341        / * Ducati always gives face coordinates in this form, irrespective of
342        / * rotation, i.e (l,t) always represents the point towards the left eye
343        / * and top of hair.
344        / * (l, t)
345        / *   ---------------
346        / *   -   ,,,,,,,   -
347        / *   -  |       |  -
348        / *   -  |<a   <a|  -
349        / *   - (|   ^   |) -
350        / *   -  |  -=-  |  -
351        / *   -   \_____/   -
352        / *   ---------------
353        / *               (r, b)
354        / *
355        / * However, Android expects the coords to be in respect with what the
356        / * sensor is viewing, i.e Android expects sensor to see this with (l,t)
357        / * and (r,b) like so:
358        / * (l, t)
359        / *   ---------------
360        / *   -    _____    -
361        / *   -   /     \   -
362        / *   -  |  -=-  |  -
363        / *   - (|   ^   |) -
364        / *   -  |a>   a>|  -
365        / *   -  |       |  -
366        / *   -   ,,,,,,,   -
367        / *   ---------------
368        / *               (r, b)
369          */
370        if (mDeviceOrientation == 180) {
371            orient_mult = -1;
372            trans_left = 2; // right is now left
373            trans_top = 3; // bottom is now top
374            trans_right = 0; // left is now right
375            trans_bot = 1; // top is not bottom
376        } else {
377            orient_mult = 1;
378            trans_left = 0; // left
379            trans_top = 1; // top
380            trans_right = 2; // right
381            trans_bot = 3; // bottom
382
383        }
384
385        int j = 0, i = 0;
386        for ( ; j < faceData->ulFaceCount ; j++)
387            {
388             //Face filtering
389             //For real faces, it is seen that the h/w passes a score >=80
390             //For false faces, we seem to get even a score of 70 sometimes.
391             //In order to avoid any issue at application level, we filter
392             //<=70 score here.
393            if(faceData->tFacePosition[j].nScore <= FACE_DETECTION_THRESHOLD)
394             continue;
395
396            tmp = ( double ) faceData->tFacePosition[j].nLeft / ( double ) previewWidth;
397            tmp *= hRange;
398            tmp -= hRange/2;
399            faces[i].rect[trans_left] = tmp;
400
401            tmp = ( double ) faceData->tFacePosition[j].nTop / ( double )previewHeight;
402            tmp *= vRange;
403            tmp -= vRange/2;
404            faces[i].rect[trans_top] = tmp;
405
406            tmp = ( double ) faceData->tFacePosition[j].nWidth / ( double ) previewWidth;
407            tmp *= hRange;
408            tmp *= orient_mult;
409            faces[i].rect[trans_right] = faces[i].rect[trans_left] + tmp;
410
411            tmp = ( double ) faceData->tFacePosition[j].nHeight / ( double ) previewHeight;
412            tmp *= vRange;
413            tmp *= orient_mult;
414            faces[i].rect[trans_bot] = faces[i].rect[trans_top] + tmp;
415
416            faces[i].score = faceData->tFacePosition[j].nScore;
417            faces[i].id = 0;
418            faces[i].left_eye[0] = CameraFDResult::INVALID_DATA;
419            faces[i].left_eye[1] = CameraFDResult::INVALID_DATA;
420            faces[i].right_eye[0] = CameraFDResult::INVALID_DATA;
421            faces[i].right_eye[1] = CameraFDResult::INVALID_DATA;
422            faces[i].mouth[0] = CameraFDResult::INVALID_DATA;
423            faces[i].mouth[1] = CameraFDResult::INVALID_DATA;
424            i++;
425            }
426
427        faceResult->number_of_faces = i;
428        faceResult->faces = faces;
429
430    } else {
431        faceResult->number_of_faces = 0;
432        faceResult->faces = NULL;
433    }
434
435    *pFaces = faceResult;
436
437    LOG_FUNCTION_NAME_EXIT;
438
439    return ret;
440}
441
442};
443