1/*
2 * Copyright (C) Texas Instruments - http://www.ti.com/
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/**
18* @file OMXFD.cpp
19*
20* This file contains functionality for handling face detection.
21*
22*/
23
24#undef LOG_TAG
25
26#define LOG_TAG "CameraHAL"
27
28#include "CameraHal.h"
29#include "OMXCameraAdapter.h"
30
31#define FACE_DETECTION_THRESHOLD 80
32
33// constants used for face smooth filtering
34static const int HorizontalFilterThreshold = 40;
35static const int VerticalFilterThreshold = 40;
36static const int HorizontalFaceSizeThreshold = 30;
37static const int VerticalFaceSizeThreshold = 30;
38
39
40namespace android {
41
42status_t OMXCameraAdapter::setParametersFD(const CameraParameters &params,
43                                           BaseCameraAdapter::AdapterState state)
44{
45    status_t ret = NO_ERROR;
46
47    LOG_FUNCTION_NAME;
48
49    LOG_FUNCTION_NAME_EXIT;
50
51    return ret;
52}
53
54status_t OMXCameraAdapter::startFaceDetection()
55{
56    status_t ret = NO_ERROR;
57
58    Mutex::Autolock lock(mFaceDetectionLock);
59
60    ret = setFaceDetection(true, mDeviceOrientation);
61    if (ret != NO_ERROR) {
62        goto out;
63    }
64
65    if ( mFaceDetectionRunning )
66        {
67        //Disable region priority and enable face priority for AF
68        setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, false);
69        setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO , true);
70
71        //Disable Region priority and enable Face priority
72        setAlgoPriority(REGION_PRIORITY, EXPOSURE_ALGO, false);
73        setAlgoPriority(FACE_PRIORITY, EXPOSURE_ALGO, true);
74        }
75
76    // Note: White balance will not be face prioritized, since
77    // the algorithm needs full frame statistics, and not face
78    // regions alone.
79
80    faceDetectionNumFacesLastOutput = 0;
81 out:
82    return ret;
83}
84
85status_t OMXCameraAdapter::stopFaceDetection()
86{
87    status_t ret = NO_ERROR;
88    const char *str = NULL;
89    BaseCameraAdapter::AdapterState state;
90    BaseCameraAdapter::getState(state);
91
92    Mutex::Autolock lock(mFaceDetectionLock);
93
94    ret = setFaceDetection(false, mDeviceOrientation);
95    if (ret != NO_ERROR) {
96        goto out;
97    }
98
99    // Reset 3A settings
100    ret = setParameters3A(mParams, state);
101    if (ret != NO_ERROR) {
102        goto out;
103    }
104
105    if (mPending3Asettings) {
106        apply3Asettings(mParameters3A);
107    }
108
109    faceDetectionNumFacesLastOutput = 0;
110 out:
111    return ret;
112}
113
114void OMXCameraAdapter::pauseFaceDetection(bool pause)
115{
116    Mutex::Autolock lock(mFaceDetectionLock);
117    // pausing will only take affect if fd is already running
118    if (mFaceDetectionRunning) {
119        mFaceDetectionPaused = pause;
120        faceDetectionNumFacesLastOutput = 0;
121    }
122}
123
124status_t OMXCameraAdapter::setFaceDetection(bool enable, OMX_U32 orientation)
125{
126    status_t ret = NO_ERROR;
127    OMX_ERRORTYPE eError = OMX_ErrorNone;
128    OMX_CONFIG_EXTRADATATYPE extraDataControl;
129    OMX_CONFIG_OBJDETECTIONTYPE objDetection;
130
131    LOG_FUNCTION_NAME;
132
133    if ( OMX_StateInvalid == mComponentState )
134        {
135        CAMHAL_LOGEA("OMX component is in invalid state");
136        ret = -EINVAL;
137        }
138
139    if ( NO_ERROR == ret )
140        {
141        if ( orientation > 270 ) {
142            orientation = 0;
143        }
144
145        OMX_INIT_STRUCT_PTR (&objDetection, OMX_CONFIG_OBJDETECTIONTYPE);
146        objDetection.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
147        objDetection.nDeviceOrientation = orientation;
148        if  ( enable )
149            {
150            objDetection.bEnable = OMX_TRUE;
151            }
152        else
153            {
154            objDetection.bEnable = OMX_FALSE;
155            }
156
157        eError =  OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
158                                ( OMX_INDEXTYPE ) OMX_IndexConfigImageFaceDetection,
159                                &objDetection);
160        if ( OMX_ErrorNone != eError )
161            {
162            CAMHAL_LOGEB("Error while configuring face detection 0x%x", eError);
163            ret = -1;
164            }
165        else
166            {
167            CAMHAL_LOGDA("Face detection configured successfully");
168            }
169        }
170
171    if ( NO_ERROR == ret )
172        {
173        OMX_INIT_STRUCT_PTR (&extraDataControl, OMX_CONFIG_EXTRADATATYPE);
174        extraDataControl.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
175        extraDataControl.eExtraDataType = OMX_FaceDetection;
176        extraDataControl.eCameraView = OMX_2D;
177        if  ( enable )
178            {
179            extraDataControl.bEnable = OMX_TRUE;
180            }
181        else
182            {
183            extraDataControl.bEnable = OMX_FALSE;
184            }
185
186        eError =  OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
187                                ( OMX_INDEXTYPE ) OMX_IndexConfigOtherExtraDataControl,
188                                &extraDataControl);
189        if ( OMX_ErrorNone != eError )
190            {
191            CAMHAL_LOGEB("Error while configuring face detection extra data 0x%x",
192                         eError);
193            ret = -1;
194            }
195        else
196            {
197            CAMHAL_LOGDA("Face detection extra data configured successfully");
198            }
199        }
200
201    if ( NO_ERROR == ret )
202        {
203        mFaceDetectionRunning = enable;
204        mFaceDetectionPaused = !enable;
205        }
206
207    LOG_FUNCTION_NAME_EXIT;
208
209    return ret;
210}
211
212status_t OMXCameraAdapter::detectFaces(OMX_BUFFERHEADERTYPE* pBuffHeader,
213                                       sp<CameraFDResult> &result,
214                                       size_t previewWidth,
215                                       size_t previewHeight)
216{
217    status_t ret = NO_ERROR;
218    OMX_ERRORTYPE eError = OMX_ErrorNone;
219    OMX_TI_FACERESULT *faceResult;
220    OMX_OTHER_EXTRADATATYPE *extraData;
221    OMX_FACEDETECTIONTYPE *faceData;
222    OMX_TI_PLATFORMPRIVATE *platformPrivate;
223    camera_frame_metadata_t *faces;
224
225    LOG_FUNCTION_NAME;
226
227    if ( OMX_StateExecuting != mComponentState ) {
228        CAMHAL_LOGEA("OMX component is not in executing state");
229        return NO_INIT;
230    }
231
232    if ( NULL == pBuffHeader ) {
233        CAMHAL_LOGEA("Invalid Buffer header");
234        return-EINVAL;
235    }
236
237    platformPrivate = (OMX_TI_PLATFORMPRIVATE *) (pBuffHeader->pPlatformPrivate);
238    if ( NULL != platformPrivate ) {
239        if ( sizeof(OMX_TI_PLATFORMPRIVATE) == platformPrivate->nSize ) {
240            CAMHAL_LOGVB("Size = %d, sizeof = %d, pAuxBuf = 0x%x, pAuxBufSize= %d, pMetaDataBufer = 0x%x, nMetaDataSize = %d",
241                         platformPrivate->nSize,
242                         sizeof(OMX_TI_PLATFORMPRIVATE),
243                         platformPrivate->pAuxBuf1,
244                         platformPrivate->pAuxBufSize1,
245                         platformPrivate->pMetaDataBuffer,
246                         platformPrivate->nMetaDataSize);
247        } else {
248            CAMHAL_LOGEB("OMX_TI_PLATFORMPRIVATE size mismatch: expected = %d, received = %d",
249                         ( unsigned int ) sizeof(OMX_TI_PLATFORMPRIVATE),
250                         ( unsigned int ) platformPrivate->nSize);
251            ret = -EINVAL;
252        }
253    }  else {
254        CAMHAL_LOGEA("Invalid OMX_TI_PLATFORMPRIVATE");
255        return-EINVAL;
256    }
257
258
259    if ( 0 >= platformPrivate->nMetaDataSize ) {
260        CAMHAL_LOGEB("OMX_TI_PLATFORMPRIVATE nMetaDataSize is size is %d",
261                     ( unsigned int ) platformPrivate->nMetaDataSize);
262        return -EINVAL;
263    }
264
265    extraData = getExtradata((OMX_OTHER_EXTRADATATYPE *) (platformPrivate->pMetaDataBuffer),
266            (OMX_EXTRADATATYPE)OMX_FaceDetection);
267
268    if ( NULL != extraData ) {
269        CAMHAL_LOGVB("Size = %d, sizeof = %d, eType = 0x%x, nDataSize= %d, nPortIndex = 0x%x, nVersion = 0x%x",
270                     extraData->nSize,
271                     sizeof(OMX_OTHER_EXTRADATATYPE),
272                     extraData->eType,
273                     extraData->nDataSize,
274                     extraData->nPortIndex,
275                     extraData->nVersion);
276    } else {
277        CAMHAL_LOGEA("Invalid OMX_OTHER_EXTRADATATYPE");
278        return -EINVAL;
279    }
280
281    faceData = ( OMX_FACEDETECTIONTYPE * ) extraData->data;
282    if ( NULL != faceData ) {
283        if ( sizeof(OMX_FACEDETECTIONTYPE) == faceData->nSize ) {
284            CAMHAL_LOGVB("Faces detected %d",
285                         faceData->ulFaceCount,
286                         faceData->nSize,
287                         sizeof(OMX_FACEDETECTIONTYPE),
288                         faceData->eCameraView,
289                         faceData->nPortIndex,
290                         faceData->nVersion);
291        } else {
292            CAMHAL_LOGEB("OMX_FACEDETECTIONTYPE size mismatch: expected = %d, received = %d",
293                         ( unsigned int ) sizeof(OMX_FACEDETECTIONTYPE),
294                         ( unsigned int ) faceData->nSize);
295            return -EINVAL;
296        }
297    } else {
298        CAMHAL_LOGEA("Invalid OMX_FACEDETECTIONTYPE");
299        return -EINVAL;
300    }
301
302    ret = encodeFaceCoordinates(faceData, &faces, previewWidth, previewHeight);
303
304    if ( NO_ERROR == ret ) {
305        result = new CameraFDResult(faces);
306    } else {
307        result.clear();
308        result = NULL;
309    }
310
311    LOG_FUNCTION_NAME_EXIT;
312
313    return ret;
314}
315
316status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *faceData,
317                                                 camera_frame_metadata_t **pFaces,
318                                                 size_t previewWidth,
319                                                 size_t previewHeight)
320{
321    status_t ret = NO_ERROR;
322    camera_face_t *faces;
323    camera_frame_metadata_t *faceResult;
324    size_t hRange, vRange;
325    double tmp;
326
327    LOG_FUNCTION_NAME;
328
329    if ( NULL == faceData ) {
330        CAMHAL_LOGEA("Invalid OMX_FACEDETECTIONTYPE parameter");
331        return EINVAL;
332    }
333
334    LOG_FUNCTION_NAME
335
336    hRange = CameraFDResult::RIGHT - CameraFDResult::LEFT;
337    vRange = CameraFDResult::BOTTOM - CameraFDResult::TOP;
338
339    faceResult = ( camera_frame_metadata_t * ) malloc(sizeof(camera_frame_metadata_t));
340    if ( NULL == faceResult ) {
341        return -ENOMEM;
342    }
343
344    if ( 0 < faceData->ulFaceCount ) {
345        int orient_mult;
346        int trans_left, trans_top, trans_right, trans_bot;
347
348        faces = ( camera_face_t * ) malloc(sizeof(camera_face_t)*faceData->ulFaceCount);
349        if ( NULL == faces ) {
350            return -ENOMEM;
351        }
352
353        /**
354        / * When device is 180 degrees oriented to the sensor, need to translate
355        / * the output from Ducati to what Android expects
356        / * Ducati always gives face coordinates in this form, irrespective of
357        / * rotation, i.e (l,t) always represents the point towards the left eye
358        / * and top of hair.
359        / * (l, t)
360        / *   ---------------
361        / *   -   ,,,,,,,   -
362        / *   -  |       |  -
363        / *   -  |<a   <a|  -
364        / *   - (|   ^   |) -
365        / *   -  |  -=-  |  -
366        / *   -   \_____/   -
367        / *   ---------------
368        / *               (r, b)
369        / *
370        / * However, Android expects the coords to be in respect with what the
371        / * sensor is viewing, i.e Android expects sensor to see this with (l,t)
372        / * and (r,b) like so:
373        / * (l, t)
374        / *   ---------------
375        / *   -    _____    -
376        / *   -   /     \   -
377        / *   -  |  -=-  |  -
378        / *   - (|   ^   |) -
379        / *   -  |a>   a>|  -
380        / *   -  |       |  -
381        / *   -   ,,,,,,,   -
382        / *   ---------------
383        / *               (r, b)
384          */
385
386        if (mDeviceOrientation == 180) {
387            orient_mult = -1;
388            trans_left = 2; // right is now left
389            trans_top = 3; // bottom is now top
390            trans_right = 0; // left is now right
391            trans_bot = 1; // top is not bottom
392        } else {
393            orient_mult = 1;
394            trans_left = 0; // left
395            trans_top = 1; // top
396            trans_right = 2; // right
397            trans_bot = 3; // bottom
398        }
399
400        int j = 0, i = 0;
401        for ( ; j < faceData->ulFaceCount ; j++)
402            {
403             OMX_S32 nLeft = 0;
404             OMX_S32 nTop = 0;
405             //Face filtering
406             //For real faces, it is seen that the h/w passes a score >=80
407             //For false faces, we seem to get even a score of 70 sometimes.
408             //In order to avoid any issue at application level, we filter
409             //<=70 score here.
410            if(faceData->tFacePosition[j].nScore <= FACE_DETECTION_THRESHOLD)
411             continue;
412
413            if (mDeviceOrientation == 180) {
414                // from sensor pov, the left pos is the right corner of the face in pov of frame
415                nLeft = faceData->tFacePosition[j].nLeft + faceData->tFacePosition[j].nWidth;
416                nTop =  faceData->tFacePosition[j].nTop + faceData->tFacePosition[j].nHeight;
417            } else {
418                nLeft = faceData->tFacePosition[j].nLeft;
419                nTop =  faceData->tFacePosition[j].nTop;
420            }
421
422            tmp = ( double ) nLeft / ( double ) previewWidth;
423            tmp *= hRange;
424            tmp -= hRange/2;
425            faces[i].rect[trans_left] = tmp;
426
427            tmp = ( double ) nTop / ( double )previewHeight;
428            tmp *= vRange;
429            tmp -= vRange/2;
430            faces[i].rect[trans_top] = tmp;
431
432            tmp = ( double ) faceData->tFacePosition[j].nWidth / ( double ) previewWidth;
433            tmp *= hRange;
434            tmp *= orient_mult;
435            faces[i].rect[trans_right] = faces[i].rect[trans_left] + tmp;
436
437            tmp = ( double ) faceData->tFacePosition[j].nHeight / ( double ) previewHeight;
438            tmp *= vRange;
439            tmp *= orient_mult;
440            faces[i].rect[trans_bot] = faces[i].rect[trans_top] + tmp;
441
442            faces[i].score = faceData->tFacePosition[j].nScore;
443            faces[i].id = 0;
444            faces[i].left_eye[0] = CameraFDResult::INVALID_DATA;
445            faces[i].left_eye[1] = CameraFDResult::INVALID_DATA;
446            faces[i].right_eye[0] = CameraFDResult::INVALID_DATA;
447            faces[i].right_eye[1] = CameraFDResult::INVALID_DATA;
448            faces[i].mouth[0] = CameraFDResult::INVALID_DATA;
449            faces[i].mouth[1] = CameraFDResult::INVALID_DATA;
450            i++;
451            }
452
453        faceResult->number_of_faces = i;
454        faceResult->faces = faces;
455
456        for (int i = 0; i  < faceResult->number_of_faces; i++)
457        {
458            int centerX = (faces[i].rect[trans_left] + faces[i].rect[trans_right] ) / 2;
459            int centerY = (faces[i].rect[trans_top] + faces[i].rect[trans_bot] ) / 2;
460
461            int sizeX = (faces[i].rect[trans_right] - faces[i].rect[trans_left] ) ;
462            int sizeY = (faces[i].rect[trans_bot] - faces[i].rect[trans_top] ) ;
463
464            for (int j = 0; j < faceDetectionNumFacesLastOutput; j++)
465            {
466                int tempCenterX = (faceDetectionLastOutput[j].rect[trans_left] +
467                                  faceDetectionLastOutput[j].rect[trans_right] ) / 2;
468                int tempCenterY = (faceDetectionLastOutput[j].rect[trans_top] +
469                                  faceDetectionLastOutput[j].rect[trans_bot] ) / 2;
470                int tempSizeX = (faceDetectionLastOutput[j].rect[trans_right] -
471                                faceDetectionLastOutput[j].rect[trans_left] ) ;
472                int tempSizeY = (faceDetectionLastOutput[j].rect[trans_bot] -
473                                faceDetectionLastOutput[j].rect[trans_top] ) ;
474
475                if ( (abs(tempCenterX - centerX) < HorizontalFilterThreshold) &&
476                     (abs(tempCenterY - centerY) < VerticalFilterThreshold) )
477                {
478                    // Found Face. It did not move too far.
479                    // Now check size of rectangle compare to last output
480                    if ( (abs (tempSizeX -sizeX) < HorizontalFaceSizeThreshold) &&
481                         (abs (tempSizeY -sizeY) < VerticalFaceSizeThreshold) )
482                    {
483                        // Rectangle is almost same as last time
484                        // Output exactly what was done for this face last time.
485                        faces[i] = faceDetectionLastOutput[j];
486                    }
487                    else
488                    {
489                        // TODO(XXX): Rectangle size changed but position is same.
490                        // Possibly we can apply just positional correctness.
491                    }
492                }
493            }
494        }
495
496        // Save this output for next iteration
497        for (int i = 0; i  < faceResult->number_of_faces; i++)
498        {
499            faceDetectionLastOutput[i] = faces[i];
500        }
501        faceDetectionNumFacesLastOutput = faceResult->number_of_faces;
502    } else {
503        faceResult->number_of_faces = 0;
504        faceResult->faces = NULL;
505    }
506
507    *pFaces = faceResult;
508
509    LOG_FUNCTION_NAME_EXIT;
510
511    return ret;
512}
513
514};
515