QCamera3HWI.cpp revision 8b39484cf5acf6465baa90b3fbf8a411507630d1
1/* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34#include <cutils/properties.h>
35#include <hardware/camera3.h>
36#include <camera/CameraMetadata.h>
37#include <stdlib.h>
38#include <fcntl.h>
39#include <stdint.h>
40#include <utils/Log.h>
41#include <utils/Errors.h>
42#include <ui/Fence.h>
43#include <gralloc_priv.h>
44#include "QCamera3HWI.h"
45#include "QCamera3Mem.h"
46#include "QCamera3Channel.h"
47#include "QCamera3PostProc.h"
48#include "QCamera3VendorTags.h"
49
50using namespace android;
51
52namespace qcamera {
53
54#define MAX(a, b) ((a) > (b) ? (a) : (b))
55
56#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
57
58#define EMPTY_PIPELINE_DELAY 2
59
60cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
61const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
62
63pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
64    PTHREAD_MUTEX_INITIALIZER;
65unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
66
67const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
68    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
69    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
70    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
71    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
72    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
73    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
74    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
75    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
76    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
77};
78
79const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
80    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
81    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
82    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
83    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
84    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
85    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
86    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
87    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
88    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
89};
90
91const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
92    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_OFF },
93    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
94    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
95    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
96    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
97    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
98    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
99    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
100    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
101    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
102    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
103    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
104    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
105    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
106    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
107    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
108};
109
110const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
111    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
112    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
113    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
114    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
115    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
116    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
117    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
118};
119
120const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
121    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
122    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
123    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
124    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
125};
126
127const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
128    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
129    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
130    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
131    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
132    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
133};
134
135const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
136    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
137    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
138    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
139};
140
141const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
142    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
143    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
144};
145
146const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
147    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
148      CAM_FOCUS_UNCALIBRATED },
149    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
150      CAM_FOCUS_APPROXIMATE },
151    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
152      CAM_FOCUS_CALIBRATED }
153};
154
155const int32_t available_thumbnail_sizes[] = {0, 0,
156                                             176, 144,
157                                             320, 240,
158                                             432, 288,
159                                             480, 288,
160                                             512, 288,
161                                             512, 384};
162
163const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
164    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
165    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
166    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
167    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
168    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
169};
170
171/* Since there is no mapping for all the options some Android enum are not listed.
172 * Also, the order in this list is important because while mapping from HAL to Android it will
173 * traverse from lower to higher index which means that for HAL values that are map to different
174 * Android values, the traverse logic will select the first one found.
175 */
176const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
177    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
178    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
179    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
180    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
181    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
182    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
183    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
184    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
185    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
186    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
187    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
188    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
189    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
190    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
191    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
192    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
193};
194
195/* Custom tag definitions */
196
197camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
198    initialize:                         QCamera3HardwareInterface::initialize,
199    configure_streams:                  QCamera3HardwareInterface::configure_streams,
200    register_stream_buffers:            NULL,
201    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
202    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
203    get_metadata_vendor_tag_ops:        NULL,
204    dump:                               QCamera3HardwareInterface::dump,
205    flush:                              QCamera3HardwareInterface::flush,
206    reserved:                           {0},
207};
208
209int QCamera3HardwareInterface::kMaxInFlight = 5;
210
211/*===========================================================================
212 * FUNCTION   : QCamera3HardwareInterface
213 *
214 * DESCRIPTION: constructor of QCamera3HardwareInterface
215 *
216 * PARAMETERS :
217 *   @cameraId  : camera ID
218 *
219 * RETURN     : none
220 *==========================================================================*/
221QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId,
222                        const camera_module_callbacks_t *callbacks)
223    : mCameraId(cameraId),
224      mCameraHandle(NULL),
225      mCameraOpened(false),
226      mCameraInitialized(false),
227      mCallbackOps(NULL),
228      mInputStream(NULL),
229      mMetadataChannel(NULL),
230      mPictureChannel(NULL),
231      mRawChannel(NULL),
232      mSupportChannel(NULL),
233      mFirstRequest(false),
234      mParamHeap(NULL),
235      mParameters(NULL),
236      mLoopBackResult(NULL),
237      mMinProcessedFrameDuration(0),
238      mMinJpegFrameDuration(0),
239      mMinRawFrameDuration(0),
240      m_pPowerModule(NULL),
241      mHdrHint(false),
242      mMetaFrameCount(0),
243      mCallbacks(callbacks)
244{
245    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
246    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_2;
247    mCameraDevice.common.close = close_camera_device;
248    mCameraDevice.ops = &mCameraOps;
249    mCameraDevice.priv = this;
250    gCamCapability[cameraId]->version = CAM_HAL_V3;
251    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
252    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
253    gCamCapability[cameraId]->min_num_pp_bufs = 3;
254
255    pthread_cond_init(&mRequestCond, NULL);
256    mPendingRequest = 0;
257    mCurrentRequestId = -1;
258    pthread_mutex_init(&mMutex, NULL);
259
260    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
261        mDefaultMetadata[i] = NULL;
262
263#ifdef HAS_MULTIMEDIA_HINTS
264    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
265        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
266    }
267#endif
268}
269
270/*===========================================================================
271 * FUNCTION   : ~QCamera3HardwareInterface
272 *
273 * DESCRIPTION: destructor of QCamera3HardwareInterface
274 *
275 * PARAMETERS : none
276 *
277 * RETURN     : none
278 *==========================================================================*/
279QCamera3HardwareInterface::~QCamera3HardwareInterface()
280{
281    ALOGV("%s: E", __func__);
282    /* We need to stop all streams before deleting any stream */
283
284    // NOTE: 'camera3_stream_t *' objects are already freed at
285    //        this stage by the framework
286    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
287        it != mStreamInfo.end(); it++) {
288        QCamera3Channel *channel = (*it)->channel;
289        if (channel) {
290            channel->stop();
291        }
292    }
293    if (mSupportChannel)
294        mSupportChannel->stop();
295
296    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
297        it != mStreamInfo.end(); it++) {
298        QCamera3Channel *channel = (*it)->channel;
299        if (channel)
300            delete channel;
301        free (*it);
302    }
303    if (mSupportChannel) {
304        delete mSupportChannel;
305        mSupportChannel = NULL;
306    }
307
308    mPictureChannel = NULL;
309
310    /* Clean up all channels */
311    if (mCameraInitialized) {
312        if (mMetadataChannel) {
313            mMetadataChannel->stop();
314            delete mMetadataChannel;
315            mMetadataChannel = NULL;
316        }
317        deinitParameters();
318    }
319
320    if (mCameraOpened)
321        closeCamera();
322
323    mPendingBuffersMap.mPendingBufferList.clear();
324    mPendingRequestsList.clear();
325
326    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
327        if (mDefaultMetadata[i])
328            free_camera_metadata(mDefaultMetadata[i]);
329
330    pthread_cond_destroy(&mRequestCond);
331
332    pthread_mutex_destroy(&mMutex);
333    ALOGV("%s: X", __func__);
334}
335
336/*===========================================================================
337 * FUNCTION   : openCamera
338 *
339 * DESCRIPTION: open camera
340 *
341 * PARAMETERS :
342 *   @hw_device  : double ptr for camera device struct
343 *
344 * RETURN     : int32_t type of status
345 *              NO_ERROR  -- success
346 *              none-zero failure code
347 *==========================================================================*/
348int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
349{
350    int rc = 0;
351    pthread_mutex_lock(&mCameraSessionLock);
352    if (mCameraSessionActive) {
353        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
354        pthread_mutex_unlock(&mCameraSessionLock);
355        return -EUSERS;
356    }
357
358    if (mCameraOpened) {
359        *hw_device = NULL;
360        return PERMISSION_DENIED;
361    }
362
363    rc = openCamera();
364    if (rc == 0) {
365        *hw_device = &mCameraDevice.common;
366        mCameraSessionActive = 1;
367    } else
368        *hw_device = NULL;
369
370#ifdef HAS_MULTIMEDIA_HINTS
371    if (rc == 0) {
372        if (m_pPowerModule) {
373            if (m_pPowerModule->powerHint) {
374                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
375                        (void *)"state=1");
376            }
377        }
378    }
379#endif
380    pthread_mutex_unlock(&mCameraSessionLock);
381    return rc;
382}
383
384/*===========================================================================
385 * FUNCTION   : openCamera
386 *
387 * DESCRIPTION: open camera
388 *
389 * PARAMETERS : none
390 *
391 * RETURN     : int32_t type of status
392 *              NO_ERROR  -- success
393 *              none-zero failure code
394 *==========================================================================*/
395int QCamera3HardwareInterface::openCamera()
396{
397    if (mCameraHandle) {
398        ALOGE("Failure: Camera already opened");
399        return ALREADY_EXISTS;
400    }
401    mCameraHandle = camera_open(mCameraId);
402    if (!mCameraHandle) {
403        ALOGE("camera_open failed.");
404        return UNKNOWN_ERROR;
405    }
406
407    mCameraOpened = true;
408
409    return NO_ERROR;
410}
411
412/*===========================================================================
413 * FUNCTION   : closeCamera
414 *
415 * DESCRIPTION: close camera
416 *
417 * PARAMETERS : none
418 *
419 * RETURN     : int32_t type of status
420 *              NO_ERROR  -- success
421 *              none-zero failure code
422 *==========================================================================*/
423int QCamera3HardwareInterface::closeCamera()
424{
425    int rc = NO_ERROR;
426
427    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
428    mCameraHandle = NULL;
429    mCameraOpened = false;
430
431#ifdef HAS_MULTIMEDIA_HINTS
432    if (rc == NO_ERROR) {
433        if (m_pPowerModule) {
434            if (m_pPowerModule->powerHint) {
435                if(mHdrHint == true) {
436                    m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
437                            (void *)"state=3");
438                    mHdrHint = false;
439                }
440                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
441                        (void *)"state=0");
442            }
443        }
444    }
445#endif
446
447    return rc;
448}
449
450/*===========================================================================
451 * FUNCTION   : initialize
452 *
453 * DESCRIPTION: Initialize frameworks callback functions
454 *
455 * PARAMETERS :
456 *   @callback_ops : callback function to frameworks
457 *
458 * RETURN     :
459 *
460 *==========================================================================*/
461int QCamera3HardwareInterface::initialize(
462        const struct camera3_callback_ops *callback_ops)
463{
464    int rc;
465
466    pthread_mutex_lock(&mMutex);
467
468    rc = initParameters();
469    if (rc < 0) {
470        ALOGE("%s: initParamters failed %d", __func__, rc);
471       goto err1;
472    }
473    mCallbackOps = callback_ops;
474
475    pthread_mutex_unlock(&mMutex);
476    mCameraInitialized = true;
477    return 0;
478
479err1:
480    pthread_mutex_unlock(&mMutex);
481    return rc;
482}
483
484/*===========================================================================
485 * FUNCTION   : configureStreams
486 *
487 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
488 *              and output streams.
489 *
490 * PARAMETERS :
491 *   @stream_list : streams to be configured
492 *
493 * RETURN     :
494 *
495 *==========================================================================*/
496int QCamera3HardwareInterface::configureStreams(
497        camera3_stream_configuration_t *streamList)
498{
499    int rc = 0;
500
501    // Sanity check stream_list
502    if (streamList == NULL) {
503        ALOGE("%s: NULL stream configuration", __func__);
504        return BAD_VALUE;
505    }
506    if (streamList->streams == NULL) {
507        ALOGE("%s: NULL stream list", __func__);
508        return BAD_VALUE;
509    }
510
511    if (streamList->num_streams < 1) {
512        ALOGE("%s: Bad number of streams requested: %d", __func__,
513                streamList->num_streams);
514        return BAD_VALUE;
515    }
516
517    /* first invalidate all the steams in the mStreamList
518     * if they appear again, they will be validated */
519    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
520            it != mStreamInfo.end(); it++) {
521        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
522        channel->stop();
523        (*it)->status = INVALID;
524    }
525    if (mMetadataChannel) {
526        /* If content of mStreamInfo is not 0, there is metadata stream */
527        mMetadataChannel->stop();
528    }
529
530#ifdef HAS_MULTIMEDIA_HINTS
531    if(mHdrHint == true) {
532        if (m_pPowerModule) {
533            if (m_pPowerModule->powerHint) {
534                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
535                        (void *)"state=3");
536                mHdrHint = false;
537            }
538        }
539    }
540#endif
541
542    pthread_mutex_lock(&mMutex);
543
544    bool isZsl = false;
545    camera3_stream_t *inputStream = NULL;
546    camera3_stream_t *jpegStream = NULL;
547    cam_stream_size_info_t stream_config_info;
548
549    for (size_t i = 0; i < streamList->num_streams; i++) {
550        camera3_stream_t *newStream = streamList->streams[i];
551        ALOGD("%s: newStream type = %d, stream format = %d stream size : %d x %d",
552                __func__, newStream->stream_type, newStream->format,
553                 newStream->width, newStream->height);
554        //if the stream is in the mStreamList validate it
555        bool stream_exists = false;
556        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
557                it != mStreamInfo.end(); it++) {
558            if ((*it)->stream == newStream) {
559                QCamera3Channel *channel =
560                    (QCamera3Channel*)(*it)->stream->priv;
561                stream_exists = true;
562                delete channel;
563                (*it)->status = VALID;
564                (*it)->stream->priv = NULL;
565                (*it)->channel = NULL;
566            }
567        }
568        if (!stream_exists) {
569            //new stream
570            stream_info_t* stream_info;
571            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
572            stream_info->stream = newStream;
573            stream_info->status = VALID;
574            stream_info->channel = NULL;
575            mStreamInfo.push_back(stream_info);
576        }
577        if (newStream->stream_type == CAMERA3_STREAM_INPUT
578                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
579            if (inputStream != NULL) {
580                ALOGE("%s: Multiple input streams requested!", __func__);
581                pthread_mutex_unlock(&mMutex);
582                return BAD_VALUE;
583            }
584            inputStream = newStream;
585        }
586        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
587            jpegStream = newStream;
588        }
589    }
590    mInputStream = inputStream;
591
592    cleanAndSortStreamInfo();
593    if (mMetadataChannel) {
594        delete mMetadataChannel;
595        mMetadataChannel = NULL;
596    }
597
598    //Create metadata channel and initialize it
599    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
600                    mCameraHandle->ops, captureResultCb,
601                    &gCamCapability[mCameraId]->padding_info, this);
602    if (mMetadataChannel == NULL) {
603        ALOGE("%s: failed to allocate metadata channel", __func__);
604        rc = -ENOMEM;
605        pthread_mutex_unlock(&mMutex);
606        return rc;
607    }
608    rc = mMetadataChannel->initialize();
609    if (rc < 0) {
610        ALOGE("%s: metadata channel initialization failed", __func__);
611        delete mMetadataChannel;
612        mMetadataChannel = NULL;
613        pthread_mutex_unlock(&mMutex);
614        return rc;
615    }
616
617    /* Create dummy stream if there is one single raw stream */
618    if (streamList->num_streams == 1 &&
619            (streamList->streams[0]->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
620            streamList->streams[0]->format == HAL_PIXEL_FORMAT_RAW16)) {
621        mSupportChannel = new QCamera3SupportChannel(
622                mCameraHandle->camera_handle,
623                mCameraHandle->ops,
624                &gCamCapability[mCameraId]->padding_info,
625                this);
626        if (!mSupportChannel) {
627            ALOGE("%s: dummy channel cannot be created", __func__);
628            pthread_mutex_unlock(&mMutex);
629            return -ENOMEM;
630        }
631
632        rc = mSupportChannel->initialize();
633        if (rc < 0) {
634            ALOGE("%s: dummy channel initialization failed", __func__);
635            delete mSupportChannel;
636            mSupportChannel = NULL;
637            delete mMetadataChannel;
638            mMetadataChannel = NULL;
639            pthread_mutex_unlock(&mMutex);
640            return rc;
641        }
642    }
643
644    /* Allocate channel objects for the requested streams */
645    for (size_t i = 0; i < streamList->num_streams; i++) {
646        camera3_stream_t *newStream = streamList->streams[i];
647        uint32_t stream_usage = newStream->usage;
648        stream_config_info.stream_sizes[i].width = newStream->width;
649        stream_config_info.stream_sizes[i].height = newStream->height;
650        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
651            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
652            //for zsl stream the size is active array size
653            isZsl = true;
654            stream_config_info.stream_sizes[i].width =
655                    gCamCapability[mCameraId]->active_array_size.width;
656            stream_config_info.stream_sizes[i].height =
657                    gCamCapability[mCameraId]->active_array_size.height;
658            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
659        } else {
660           //for non zsl streams find out the format
661           switch (newStream->format) {
662           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
663              {
664                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
665                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
666                 } else {
667                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
668                 }
669              }
670              break;
671           case HAL_PIXEL_FORMAT_YCbCr_420_888:
672              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
673#ifdef HAS_MULTIMEDIA_HINTS
674              if (m_pPowerModule) {
675                  if (m_pPowerModule->powerHint) {
676                      m_pPowerModule->powerHint(m_pPowerModule,
677                          POWER_HINT_VIDEO_ENCODE, (void *)"state=2");
678                      mHdrHint = true;
679                  }
680              }
681#endif
682              break;
683           case HAL_PIXEL_FORMAT_BLOB:
684              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
685              break;
686           case HAL_PIXEL_FORMAT_RAW_OPAQUE:
687           case HAL_PIXEL_FORMAT_RAW16:
688              stream_config_info.type[i] = CAM_STREAM_TYPE_RAW;
689              break;
690           default:
691              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
692              break;
693           }
694        }
695        if (newStream->priv == NULL) {
696            //New stream, construct channel
697            switch (newStream->stream_type) {
698            case CAMERA3_STREAM_INPUT:
699                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
700                break;
701            case CAMERA3_STREAM_BIDIRECTIONAL:
702                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
703                    GRALLOC_USAGE_HW_CAMERA_WRITE;
704                break;
705            case CAMERA3_STREAM_OUTPUT:
706                /* For video encoding stream, set read/write rarely
707                 * flag so that they may be set to un-cached */
708                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
709                    newStream->usage =
710                         (GRALLOC_USAGE_SW_READ_RARELY |
711                         GRALLOC_USAGE_SW_WRITE_RARELY |
712                         GRALLOC_USAGE_HW_CAMERA_WRITE);
713                else
714                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
715                break;
716            default:
717                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
718                break;
719            }
720
721            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
722                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
723                QCamera3Channel *channel = NULL;
724                switch (newStream->format) {
725                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
726                case HAL_PIXEL_FORMAT_YCbCr_420_888:
727                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
728                    channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
729                            mCameraHandle->ops, captureResultCb,
730                            &gCamCapability[mCameraId]->padding_info,
731                            this,
732                            newStream,
733                            (cam_stream_type_t) stream_config_info.type[i]);
734                    if (channel == NULL) {
735                        ALOGE("%s: allocation of channel failed", __func__);
736                        pthread_mutex_unlock(&mMutex);
737                        return -ENOMEM;
738                    }
739
740                    newStream->priv = channel;
741                    break;
742                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
743                case HAL_PIXEL_FORMAT_RAW16:
744                    newStream->max_buffers = QCamera3RawChannel::kMaxBuffers;
745                    mRawChannel = new QCamera3RawChannel(
746                            mCameraHandle->camera_handle,
747                            mCameraHandle->ops, captureResultCb,
748                            &gCamCapability[mCameraId]->padding_info,
749                            this, newStream, (newStream->format == HAL_PIXEL_FORMAT_RAW16));
750                    if (mRawChannel == NULL) {
751                        ALOGE("%s: allocation of raw channel failed", __func__);
752                        pthread_mutex_unlock(&mMutex);
753                        return -ENOMEM;
754                    }
755
756                    newStream->priv = (QCamera3Channel*)mRawChannel;
757                    break;
758                case HAL_PIXEL_FORMAT_BLOB:
759                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
760                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
761                            mCameraHandle->ops, captureResultCb,
762                            &gCamCapability[mCameraId]->padding_info, this, newStream);
763                    if (mPictureChannel == NULL) {
764                        ALOGE("%s: allocation of channel failed", __func__);
765                        pthread_mutex_unlock(&mMutex);
766                        return -ENOMEM;
767                    }
768                    newStream->priv = (QCamera3Channel*)mPictureChannel;
769                    break;
770
771                default:
772                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
773                    break;
774                }
775            }
776
777            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
778                    it != mStreamInfo.end(); it++) {
779                if ((*it)->stream == newStream) {
780                    (*it)->channel = (QCamera3Channel*) newStream->priv;
781                    break;
782                }
783            }
784        } else {
785            // Channel already exists for this stream
786            // Do nothing for now
787        }
788    }
789
790    if (isZsl)
791        mPictureChannel->overrideYuvSize(
792                gCamCapability[mCameraId]->active_array_size.width,
793                gCamCapability[mCameraId]->active_array_size.height);
794
795    int32_t hal_version = CAM_HAL_V3;
796    stream_config_info.num_streams = streamList->num_streams;
797    if (mSupportChannel) {
798        stream_config_info.stream_sizes[stream_config_info.num_streams] =
799                QCamera3SupportChannel::kDim;
800        stream_config_info.type[stream_config_info.num_streams] =
801                CAM_STREAM_TYPE_CALLBACK;
802        stream_config_info.num_streams++;
803    }
804
805    // settings/parameters don't carry over for new configureStreams
806    memset(mParameters, 0, sizeof(metadata_buffer_t));
807
808    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
809    AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
810                sizeof(hal_version), &hal_version);
811
812    AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
813                sizeof(stream_config_info), &stream_config_info);
814
815    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
816
817    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
818    mPendingRequestsList.clear();
819    mPendingFrameDropList.clear();
820    // Initialize/Reset the pending buffers list
821    mPendingBuffersMap.num_buffers = 0;
822    mPendingBuffersMap.mPendingBufferList.clear();
823
824    mFirstRequest = true;
825
826    //Get min frame duration for this streams configuration
827    deriveMinFrameDuration();
828
829    pthread_mutex_unlock(&mMutex);
830    return rc;
831}
832
833/*===========================================================================
834 * FUNCTION   : validateCaptureRequest
835 *
836 * DESCRIPTION: validate a capture request from camera service
837 *
838 * PARAMETERS :
839 *   @request : request from framework to process
840 *
841 * RETURN     :
842 *
843 *==========================================================================*/
844int QCamera3HardwareInterface::validateCaptureRequest(
845                    camera3_capture_request_t *request)
846{
847    ssize_t idx = 0;
848    const camera3_stream_buffer_t *b;
849    CameraMetadata meta;
850
851    /* Sanity check the request */
852    if (request == NULL) {
853        ALOGE("%s: NULL capture request", __func__);
854        return BAD_VALUE;
855    }
856
857    if (request->settings == NULL && mFirstRequest) {
858        /*settings cannot be null for the first request*/
859        return BAD_VALUE;
860    }
861
862    uint32_t frameNumber = request->frame_number;
863    if (request->input_buffer != NULL &&
864            request->input_buffer->stream != mInputStream) {
865        ALOGE("%s: Request %d: Input buffer not from input stream!",
866                __FUNCTION__, frameNumber);
867        return BAD_VALUE;
868    }
869    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
870        ALOGE("%s: Request %d: No output buffers provided!",
871                __FUNCTION__, frameNumber);
872        return BAD_VALUE;
873    }
874    if (request->input_buffer != NULL) {
875        b = request->input_buffer;
876        QCamera3Channel *channel =
877            static_cast<QCamera3Channel*>(b->stream->priv);
878        if (channel == NULL) {
879            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
880                    __func__, frameNumber, idx);
881            return BAD_VALUE;
882        }
883        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
884            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
885                    __func__, frameNumber, idx);
886            return BAD_VALUE;
887        }
888        if (b->release_fence != -1) {
889            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
890                    __func__, frameNumber, idx);
891            return BAD_VALUE;
892        }
893        if (b->buffer == NULL) {
894            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
895                    __func__, frameNumber, idx);
896            return BAD_VALUE;
897        }
898    }
899
900    // Validate all buffers
901    b = request->output_buffers;
902    do {
903        QCamera3Channel *channel =
904                static_cast<QCamera3Channel*>(b->stream->priv);
905        if (channel == NULL) {
906            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
907                    __func__, frameNumber, idx);
908            return BAD_VALUE;
909        }
910        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
911            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
912                    __func__, frameNumber, idx);
913            return BAD_VALUE;
914        }
915        if (b->release_fence != -1) {
916            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
917                    __func__, frameNumber, idx);
918            return BAD_VALUE;
919        }
920        if (b->buffer == NULL) {
921            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
922                    __func__, frameNumber, idx);
923            return BAD_VALUE;
924        }
925        idx++;
926        b = request->output_buffers + idx;
927    } while (idx < (ssize_t)request->num_output_buffers);
928
929    return NO_ERROR;
930}
931
932/*===========================================================================
933 * FUNCTION   : deriveMinFrameDuration
934 *
935 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
936 *              on currently configured streams.
937 *
938 * PARAMETERS : NONE
939 *
940 * RETURN     : NONE
941 *
942 *==========================================================================*/
943void QCamera3HardwareInterface::deriveMinFrameDuration()
944{
945    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
946
947    maxJpegDim = 0;
948    maxProcessedDim = 0;
949    maxRawDim = 0;
950
951    // Figure out maximum jpeg, processed, and raw dimensions
952    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
953        it != mStreamInfo.end(); it++) {
954
955        // Input stream doesn't have valid stream_type
956        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
957            continue;
958
959        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
960        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
961            if (dimension > maxJpegDim)
962                maxJpegDim = dimension;
963        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
964                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
965            if (dimension > maxRawDim)
966                maxRawDim = dimension;
967        } else {
968            if (dimension > maxProcessedDim)
969                maxProcessedDim = dimension;
970        }
971    }
972
973    //Assume all jpeg dimensions are in processed dimensions.
974    if (maxJpegDim > maxProcessedDim)
975        maxProcessedDim = maxJpegDim;
976    //Find the smallest raw dimension that is greater or equal to jpeg dimension
977    if (maxProcessedDim > maxRawDim) {
978        maxRawDim = INT32_MAX;
979        for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
980            i++) {
981
982            int32_t dimension =
983                gCamCapability[mCameraId]->raw_dim[i].width *
984                gCamCapability[mCameraId]->raw_dim[i].height;
985
986            if (dimension >= maxProcessedDim && dimension < maxRawDim)
987                maxRawDim = dimension;
988        }
989    }
990
991    //Find minimum durations for processed, jpeg, and raw
992    for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
993            i++) {
994        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
995                gCamCapability[mCameraId]->raw_dim[i].height) {
996            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
997            break;
998        }
999    }
1000    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1001        if (maxProcessedDim ==
1002            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
1003            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
1004            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
1005            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
1006            break;
1007        }
1008    }
1009}
1010
1011/*===========================================================================
1012 * FUNCTION   : getMinFrameDuration
1013 *
1014 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
1015 *              and current request configuration.
1016 *
1017 * PARAMETERS : @request: requset sent by the frameworks
1018 *
1019 * RETURN     : min farme duration for a particular request
1020 *
1021 *==========================================================================*/
1022int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
1023{
1024    bool hasJpegStream = false;
1025    bool hasRawStream = false;
1026    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
1027        const camera3_stream_t *stream = request->output_buffers[i].stream;
1028        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
1029            hasJpegStream = true;
1030        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
1031                stream->format == HAL_PIXEL_FORMAT_RAW16)
1032            hasRawStream = true;
1033    }
1034
1035    if (!hasJpegStream)
1036        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
1037    else
1038        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
1039}
1040
1041/*===========================================================================
1042 * FUNCTION   : handleMetadataWithLock
1043 *
1044 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
1045 *
1046 * PARAMETERS : @metadata_buf: metadata buffer
1047 *
1048 * RETURN     :
1049 *
1050 *==========================================================================*/
1051void QCamera3HardwareInterface::handleMetadataWithLock(
1052    mm_camera_super_buf_t *metadata_buf)
1053{
1054    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1055    int32_t frame_number_valid = *(int32_t *)
1056        POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1057    uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1058        CAM_INTF_META_PENDING_REQUESTS, metadata);
1059    uint32_t frame_number = *(uint32_t *)
1060        POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1061    const struct timeval *tv = (const struct timeval *)
1062        POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1063    nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1064        tv->tv_usec * NSEC_PER_USEC;
1065    cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
1066        POINTER_OF(CAM_INTF_META_FRAME_DROPPED, metadata);
1067
1068    int32_t urgent_frame_number_valid = *(int32_t *)
1069        POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
1070    uint32_t urgent_frame_number = *(uint32_t *)
1071        POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
1072
1073    if (urgent_frame_number_valid) {
1074        ALOGV("%s: valid urgent frame_number = %d, capture_time = %lld",
1075          __func__, urgent_frame_number, capture_time);
1076
1077        //Recieved an urgent Frame Number, handle it
1078        //using HAL3.1 quirk for partial results
1079        for (List<PendingRequestInfo>::iterator i =
1080            mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
1081            camera3_notify_msg_t notify_msg;
1082            ALOGV("%s: Iterator Frame = %d urgent frame = %d",
1083                __func__, i->frame_number, urgent_frame_number);
1084
1085            if (i->frame_number < urgent_frame_number &&
1086                i->bNotified == 0) {
1087                notify_msg.type = CAMERA3_MSG_SHUTTER;
1088                notify_msg.message.shutter.frame_number = i->frame_number;
1089                notify_msg.message.shutter.timestamp = capture_time -
1090                    (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
1091                mCallbackOps->notify(mCallbackOps, &notify_msg);
1092                i->timestamp = notify_msg.message.shutter.timestamp;
1093                i->bNotified = 1;
1094                ALOGV("%s: Support notification !!!! notify frame_number = %d, capture_time = %lld",
1095                    __func__, i->frame_number, notify_msg.message.shutter.timestamp);
1096            }
1097
1098            if (i->frame_number == urgent_frame_number) {
1099
1100                camera3_capture_result_t result;
1101
1102                // Send shutter notify to frameworks
1103                notify_msg.type = CAMERA3_MSG_SHUTTER;
1104                notify_msg.message.shutter.frame_number = i->frame_number;
1105                notify_msg.message.shutter.timestamp = capture_time;
1106                mCallbackOps->notify(mCallbackOps, &notify_msg);
1107
1108                i->timestamp = capture_time;
1109                i->bNotified = 1;
1110
1111                // Extract 3A metadata
1112                result.result =
1113                    translateCbUrgentMetadataToResultMetadata(metadata);
1114                // Populate metadata result
1115                result.frame_number = urgent_frame_number;
1116                result.num_output_buffers = 0;
1117                result.output_buffers = NULL;
1118                mCallbackOps->process_capture_result(mCallbackOps, &result);
1119                ALOGV("%s: urgent frame_number = %d, capture_time = %lld",
1120                     __func__, result.frame_number, capture_time);
1121                free_camera_metadata((camera_metadata_t *)result.result);
1122                break;
1123            }
1124        }
1125    }
1126
1127    if (!frame_number_valid) {
1128        ALOGV("%s: Not a valid normal frame number, used as SOF only", __func__);
1129        mMetadataChannel->bufDone(metadata_buf);
1130        free(metadata_buf);
1131        goto done_metadata;
1132    }
1133    ALOGV("%s: valid normal frame_number = %d, capture_time = %lld", __func__,
1134            frame_number, capture_time);
1135
1136    // Go through the pending requests info and send shutter/results to frameworks
1137    for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1138        i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1139        camera3_capture_result_t result;
1140        ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1141
1142        // Flush out all entries with less or equal frame numbers.
1143        mPendingRequest--;
1144
1145        // Check whether any stream buffer corresponding to this is dropped or not
1146        // If dropped, then notify ERROR_BUFFER for the corresponding stream and
1147        // buffer with CAMERA3_BUFFER_STATUS_ERROR
1148        if (cam_frame_drop.frame_dropped) {
1149            camera3_notify_msg_t notify_msg;
1150            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1151                    j != i->buffers.end(); j++) {
1152                QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1153                uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1154                for (uint32_t k=0; k<cam_frame_drop.cam_stream_ID.num_streams; k++) {
1155                  if (streamID == cam_frame_drop.cam_stream_ID.streamID[k]) {
1156                      // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
1157                      ALOGV("%s: Start of reporting error frame#=%d, streamID=%d",
1158                             __func__, i->frame_number, streamID);
1159                      notify_msg.type = CAMERA3_MSG_ERROR;
1160                      notify_msg.message.error.frame_number = i->frame_number;
1161                      notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
1162                      notify_msg.message.error.error_stream = j->stream;
1163                      mCallbackOps->notify(mCallbackOps, &notify_msg);
1164                      ALOGV("%s: End of reporting error frame#=%d, streamID=%d",
1165                             __func__, i->frame_number, streamID);
1166                      PendingFrameDropInfo PendingFrameDrop;
1167                      PendingFrameDrop.frame_number=i->frame_number;
1168                      PendingFrameDrop.stream_ID = streamID;
1169                      // Add the Frame drop info to mPendingFrameDropList
1170                      mPendingFrameDropList.push_back(PendingFrameDrop);
1171                  }
1172                }
1173            }
1174        }
1175
1176        // Send empty metadata with already filled buffers for dropped metadata
1177        // and send valid metadata with already filled buffers for current metadata
1178        if (i->frame_number < frame_number) {
1179            CameraMetadata dummyMetadata;
1180            dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1181                    &i->timestamp, 1);
1182            dummyMetadata.update(ANDROID_REQUEST_ID,
1183                    &(i->request_id), 1);
1184            result.result = dummyMetadata.release();
1185        } else {
1186            result.result = translateFromHalMetadata(metadata,
1187                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth);
1188
1189            if (i->blob_request) {
1190                {
1191                    //Dump tuning metadata if enabled and available
1192                    char prop[PROPERTY_VALUE_MAX];
1193                    memset(prop, 0, sizeof(prop));
1194                    property_get("persist.camera.dumpmetadata", prop, "0");
1195                    int32_t enabled = atoi(prop);
1196                    if (enabled && metadata->is_tuning_params_valid) {
1197                        dumpMetadataToFile(metadata->tuning_params,
1198                               mMetaFrameCount,
1199                               enabled,
1200                               "Snapshot",
1201                               frame_number);
1202                    }
1203                }
1204
1205                //If it is a blob request then send the metadata to the picture channel
1206                metadata_buffer_t *reproc_meta =
1207                        (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
1208                if (reproc_meta == NULL) {
1209                    ALOGE("%s: Failed to allocate memory for reproc data.", __func__);
1210                    goto done_metadata;
1211                }
1212                *reproc_meta = *metadata;
1213                mPictureChannel->queueReprocMetadata(reproc_meta);
1214            }
1215            // Return metadata buffer
1216            mMetadataChannel->bufDone(metadata_buf);
1217            free(metadata_buf);
1218        }
1219        if (!result.result) {
1220            ALOGE("%s: metadata is NULL", __func__);
1221        }
1222        result.frame_number = i->frame_number;
1223        result.num_output_buffers = 0;
1224        result.output_buffers = NULL;
1225        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1226                    j != i->buffers.end(); j++) {
1227            if (j->buffer) {
1228                result.num_output_buffers++;
1229            }
1230        }
1231
1232        if (result.num_output_buffers > 0) {
1233            camera3_stream_buffer_t *result_buffers =
1234                new camera3_stream_buffer_t[result.num_output_buffers];
1235            if (!result_buffers) {
1236                ALOGE("%s: Fatal error: out of memory", __func__);
1237            }
1238            size_t result_buffers_idx = 0;
1239            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1240                    j != i->buffers.end(); j++) {
1241                if (j->buffer) {
1242                    for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1243                            m != mPendingFrameDropList.end(); m++) {
1244                        QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
1245                        uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1246                        if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
1247                            j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1248                            ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1249                                  __func__, frame_number, streamID);
1250                            m = mPendingFrameDropList.erase(m);
1251                            break;
1252                        }
1253                    }
1254
1255                    for (List<PendingBufferInfo>::iterator k =
1256                      mPendingBuffersMap.mPendingBufferList.begin();
1257                      k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
1258                      if (k->buffer == j->buffer->buffer) {
1259                        ALOGV("%s: Found buffer %p in pending buffer List "
1260                              "for frame %d, Take it out!!", __func__,
1261                               k->buffer, k->frame_number);
1262                        mPendingBuffersMap.num_buffers--;
1263                        k = mPendingBuffersMap.mPendingBufferList.erase(k);
1264                        break;
1265                      }
1266                    }
1267
1268                    result_buffers[result_buffers_idx++] = *(j->buffer);
1269                    free(j->buffer);
1270                    j->buffer = NULL;
1271                }
1272            }
1273            result.output_buffers = result_buffers;
1274
1275            mCallbackOps->process_capture_result(mCallbackOps, &result);
1276            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1277                    __func__, result.frame_number, i->timestamp);
1278            free_camera_metadata((camera_metadata_t *)result.result);
1279            delete[] result_buffers;
1280        } else {
1281            mCallbackOps->process_capture_result(mCallbackOps, &result);
1282            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1283                        __func__, result.frame_number, i->timestamp);
1284            free_camera_metadata((camera_metadata_t *)result.result);
1285        }
1286        // erase the element from the list
1287        i = mPendingRequestsList.erase(i);
1288    }
1289
1290done_metadata:
1291    for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1292        i != mPendingRequestsList.end() ;i++) {
1293        i->pipeline_depth++;
1294    }
1295    if (!pending_requests)
1296        unblockRequestIfNecessary();
1297
1298}
1299
1300/*===========================================================================
1301 * FUNCTION   : handleBufferWithLock
1302 *
1303 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
1304 *
1305 * PARAMETERS : @buffer: image buffer for the callback
1306 *              @frame_number: frame number of the image buffer
1307 *
1308 * RETURN     :
1309 *
1310 *==========================================================================*/
1311void QCamera3HardwareInterface::handleBufferWithLock(
1312    camera3_stream_buffer_t *buffer, uint32_t frame_number)
1313{
1314    // If the frame number doesn't exist in the pending request list,
1315    // directly send the buffer to the frameworks, and update pending buffers map
1316    // Otherwise, book-keep the buffer.
1317    List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1318    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1319        i++;
1320    }
1321    if (i == mPendingRequestsList.end()) {
1322        // Verify all pending requests frame_numbers are greater
1323        for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1324                j != mPendingRequestsList.end(); j++) {
1325            if (j->frame_number < frame_number) {
1326                ALOGE("%s: Error: pending frame number %d is smaller than %d",
1327                        __func__, j->frame_number, frame_number);
1328            }
1329        }
1330        camera3_capture_result_t result;
1331        result.result = NULL;
1332        result.frame_number = frame_number;
1333        result.num_output_buffers = 1;
1334        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1335                m != mPendingFrameDropList.end(); m++) {
1336            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
1337            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1338            if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
1339                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1340                ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1341                        __func__, frame_number, streamID);
1342                m = mPendingFrameDropList.erase(m);
1343                break;
1344            }
1345        }
1346        result.output_buffers = buffer;
1347        ALOGV("%s: result frame_number = %d, buffer = %p",
1348                __func__, frame_number, buffer->buffer);
1349
1350        for (List<PendingBufferInfo>::iterator k =
1351                mPendingBuffersMap.mPendingBufferList.begin();
1352                k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
1353            if (k->buffer == buffer->buffer) {
1354                ALOGV("%s: Found Frame buffer, take it out from list",
1355                        __func__);
1356
1357                mPendingBuffersMap.num_buffers--;
1358                k = mPendingBuffersMap.mPendingBufferList.erase(k);
1359                break;
1360            }
1361        }
1362        ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1363            __func__, mPendingBuffersMap.num_buffers);
1364
1365        mCallbackOps->process_capture_result(mCallbackOps, &result);
1366    } else {
1367        if (i->input_buffer_present) {
1368            camera3_capture_result result;
1369            result.result = NULL;
1370            result.frame_number = frame_number;
1371            result.num_output_buffers = 1;
1372            result.output_buffers = buffer;
1373            mCallbackOps->process_capture_result(mCallbackOps, &result);
1374            i = mPendingRequestsList.erase(i);
1375            mPendingRequest--;
1376        } else {
1377            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1378                j != i->buffers.end(); j++) {
1379                if (j->stream == buffer->stream) {
1380                    if (j->buffer != NULL) {
1381                        ALOGE("%s: Error: buffer is already set", __func__);
1382                    } else {
1383                        j->buffer = (camera3_stream_buffer_t *)malloc(
1384                            sizeof(camera3_stream_buffer_t));
1385                        *(j->buffer) = *buffer;
1386                        ALOGV("%s: cache buffer %p at result frame_number %d",
1387                            __func__, buffer, frame_number);
1388                    }
1389                }
1390            }
1391        }
1392    }
1393}
1394
1395/*===========================================================================
1396 * FUNCTION   : unblockRequestIfNecessary
1397 *
1398 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
1399 *              that mMutex is held when this function is called.
1400 *
1401 * PARAMETERS :
1402 *
1403 * RETURN     :
1404 *
1405 *==========================================================================*/
1406void QCamera3HardwareInterface::unblockRequestIfNecessary()
1407{
1408   // Unblock process_capture_request
1409   pthread_cond_signal(&mRequestCond);
1410}
1411
1412/*===========================================================================
1413 * FUNCTION   : registerStreamBuffers
1414 *
1415 * DESCRIPTION: Register buffers for a given stream with the HAL device.
1416 *
1417 * PARAMETERS :
1418 *   @stream_list : streams to be configured
1419 *
1420 * RETURN     :
1421 *
1422 *==========================================================================*/
1423int QCamera3HardwareInterface::registerStreamBuffers(
1424        const camera3_stream_buffer_set_t * /*buffer_set*/)
1425{
1426    //Deprecated
1427    return NO_ERROR;
1428}
1429
1430/*===========================================================================
1431 * FUNCTION   : processCaptureRequest
1432 *
1433 * DESCRIPTION: process a capture request from camera service
1434 *
1435 * PARAMETERS :
1436 *   @request : request from framework to process
1437 *
1438 * RETURN     :
1439 *
1440 *==========================================================================*/
1441int QCamera3HardwareInterface::processCaptureRequest(
1442                    camera3_capture_request_t *request)
1443{
1444    int rc = NO_ERROR;
1445    int32_t request_id;
1446    CameraMetadata meta;
1447
1448    pthread_mutex_lock(&mMutex);
1449
1450    rc = validateCaptureRequest(request);
1451    if (rc != NO_ERROR) {
1452        ALOGE("%s: incoming request is not valid", __func__);
1453        pthread_mutex_unlock(&mMutex);
1454        return rc;
1455    }
1456
1457    meta = request->settings;
1458
1459    // For first capture request, send capture intent, and
1460    // stream on all streams
1461    if (mFirstRequest) {
1462
1463        for (size_t i = 0; i < request->num_output_buffers; i++) {
1464            const camera3_stream_buffer_t& output = request->output_buffers[i];
1465            QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1466            rc = channel->registerBuffer(output.buffer);
1467            if (rc < 0) {
1468                ALOGE("%s: registerBuffer failed",
1469                        __func__);
1470                pthread_mutex_unlock(&mMutex);
1471                return -ENODEV;
1472            }
1473        }
1474
1475        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1476            int32_t hal_version = CAM_HAL_V3;
1477            uint8_t captureIntent =
1478                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1479
1480            memset(mParameters, 0, sizeof(metadata_buffer_t));
1481            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1482            AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1483                sizeof(hal_version), &hal_version);
1484            AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1485                sizeof(captureIntent), &captureIntent);
1486            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1487                mParameters);
1488        }
1489
1490        ALOGD("%s: Start META Channel", __func__);
1491        mMetadataChannel->start();
1492
1493        if (mSupportChannel)
1494            mSupportChannel->start();
1495
1496        //First initialize all streams
1497        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1498            it != mStreamInfo.end(); it++) {
1499            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1500            rc = channel->initialize();
1501            if (NO_ERROR != rc) {
1502                ALOGE("%s : Channel initialization failed %d", __func__, rc);
1503                if (mSupportChannel)
1504                    mSupportChannel->stop();
1505                mMetadataChannel->stop();
1506                pthread_mutex_unlock(&mMutex);
1507                return rc;
1508            }
1509        }
1510        //Then start them.
1511        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1512            it != mStreamInfo.end(); it++) {
1513            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1514            ALOGD("%s: Start Regular Channel mask=%d", __func__, channel->getStreamTypeMask());
1515            channel->start();
1516        }
1517    }
1518
1519    uint32_t frameNumber = request->frame_number;
1520    cam_stream_ID_t streamID;
1521
1522    if (meta.exists(ANDROID_REQUEST_ID)) {
1523        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1524        mCurrentRequestId = request_id;
1525        ALOGV("%s: Received request with id: %d",__func__, request_id);
1526    } else if (mFirstRequest || mCurrentRequestId == -1){
1527        ALOGE("%s: Unable to find request id field, \
1528                & no previous id available", __func__);
1529        return NAME_NOT_FOUND;
1530    } else {
1531        ALOGV("%s: Re-using old request id", __func__);
1532        request_id = mCurrentRequestId;
1533    }
1534
1535    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1536                                    __func__, __LINE__,
1537                                    request->num_output_buffers,
1538                                    request->input_buffer,
1539                                    frameNumber);
1540    // Acquire all request buffers first
1541    streamID.num_streams = 0;
1542    int blob_request = 0;
1543    for (size_t i = 0; i < request->num_output_buffers; i++) {
1544        const camera3_stream_buffer_t& output = request->output_buffers[i];
1545        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1546        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1547
1548        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1549            //Call function to store local copy of jpeg data for encode params.
1550            blob_request = 1;
1551        }
1552
1553        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1554        if (rc != OK) {
1555            ALOGE("%s: fence wait failed %d", __func__, rc);
1556            pthread_mutex_unlock(&mMutex);
1557            return rc;
1558        }
1559
1560        streamID.streamID[streamID.num_streams] =
1561            channel->getStreamID(channel->getStreamTypeMask());
1562        streamID.num_streams++;
1563    }
1564
1565    if(request->input_buffer == NULL) {
1566       rc = setFrameParameters(request, streamID);
1567        if (rc < 0) {
1568            ALOGE("%s: fail to set frame parameters", __func__);
1569            pthread_mutex_unlock(&mMutex);
1570            return rc;
1571        }
1572    }
1573
1574    /* Update pending request list and pending buffers map */
1575    PendingRequestInfo pendingRequest;
1576    pendingRequest.frame_number = frameNumber;
1577    pendingRequest.num_buffers = request->num_output_buffers;
1578    pendingRequest.request_id = request_id;
1579    pendingRequest.blob_request = blob_request;
1580    pendingRequest.bNotified = 0;
1581    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1582    pendingRequest.pipeline_depth = 0;
1583    extractJpegMetadata(pendingRequest.jpegMetadata, request);
1584
1585    for (size_t i = 0; i < request->num_output_buffers; i++) {
1586        RequestedBufferInfo requestedBuf;
1587        requestedBuf.stream = request->output_buffers[i].stream;
1588        requestedBuf.buffer = NULL;
1589        pendingRequest.buffers.push_back(requestedBuf);
1590
1591        // Add to buffer handle the pending buffers list
1592        PendingBufferInfo bufferInfo;
1593        bufferInfo.frame_number = frameNumber;
1594        bufferInfo.buffer = request->output_buffers[i].buffer;
1595        bufferInfo.stream = request->output_buffers[i].stream;
1596        mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
1597        mPendingBuffersMap.num_buffers++;
1598        ALOGV("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
1599          __func__, frameNumber, bufferInfo.buffer, bufferInfo.stream,
1600          bufferInfo.stream->format);
1601    }
1602    ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1603          __func__, mPendingBuffersMap.num_buffers);
1604
1605    mPendingRequestsList.push_back(pendingRequest);
1606
1607    // Notify metadata channel we receive a request
1608    mMetadataChannel->request(NULL, frameNumber);
1609
1610    // Call request on other streams
1611    for (size_t i = 0; i < request->num_output_buffers; i++) {
1612        const camera3_stream_buffer_t& output = request->output_buffers[i];
1613        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1614        mm_camera_buf_def_t *pInputBuffer = NULL;
1615
1616        if (channel == NULL) {
1617            ALOGE("%s: invalid channel pointer for stream", __func__);
1618            continue;
1619        }
1620
1621        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1622            QCamera3RegularChannel* inputChannel = NULL;
1623            if(request->input_buffer != NULL){
1624
1625                //Try to get the internal format
1626                inputChannel = (QCamera3RegularChannel*)
1627                    request->input_buffer->stream->priv;
1628                if(inputChannel == NULL ){
1629                    ALOGE("%s: failed to get input channel handle", __func__);
1630                } else {
1631                    pInputBuffer =
1632                        inputChannel->getInternalFormatBuffer(
1633                                request->input_buffer->buffer);
1634                    ALOGD("%s: Input buffer dump",__func__);
1635                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1636                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1637                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1638                    ALOGD("Handle:%p", request->input_buffer->buffer);
1639                }
1640                rc = channel->request(output.buffer, frameNumber,
1641                            pInputBuffer, mParameters);
1642                if (rc < 0) {
1643                    ALOGE("%s: Fail to request on picture channel", __func__);
1644                    pthread_mutex_unlock(&mMutex);
1645                    return rc;
1646                }
1647
1648                rc = setReprocParameters(request);
1649                if (rc < 0) {
1650                    ALOGE("%s: fail to set reproc parameters", __func__);
1651                    pthread_mutex_unlock(&mMutex);
1652                    return rc;
1653                }
1654            } else
1655                rc = channel->request(output.buffer, frameNumber,
1656                            NULL, mParameters);
1657        } else {
1658            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1659                __LINE__, output.buffer, frameNumber);
1660           rc = channel->request(output.buffer, frameNumber);
1661        }
1662        if (rc < 0)
1663            ALOGE("%s: request failed", __func__);
1664    }
1665
1666    mFirstRequest = false;
1667    // Added a timed condition wait
1668    struct timespec ts;
1669    uint8_t isValidTimeout = 1;
1670    rc = clock_gettime(CLOCK_REALTIME, &ts);
1671    if (rc < 0) {
1672        isValidTimeout = 0;
1673        ALOGE("%s: Error reading the real time clock!!", __func__);
1674    }
1675    else {
1676        // Make timeout as 5 sec for request to be honored
1677        ts.tv_sec += 5;
1678    }
1679    //Block on conditional variable
1680
1681    mPendingRequest++;
1682    while (mPendingRequest >= kMaxInFlight) {
1683        if (!isValidTimeout) {
1684            ALOGV("%s: Blocking on conditional wait", __func__);
1685            pthread_cond_wait(&mRequestCond, &mMutex);
1686        }
1687        else {
1688            ALOGV("%s: Blocking on timed conditional wait", __func__);
1689            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1690            if (rc == ETIMEDOUT) {
1691                rc = -ENODEV;
1692                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1693                break;
1694            }
1695        }
1696        ALOGV("%s: Unblocked", __func__);
1697    }
1698    pthread_mutex_unlock(&mMutex);
1699
1700    return rc;
1701}
1702
1703/*===========================================================================
1704 * FUNCTION   : dump
1705 *
1706 * DESCRIPTION:
1707 *
1708 * PARAMETERS :
1709 *
1710 *
1711 * RETURN     :
1712 *==========================================================================*/
1713void QCamera3HardwareInterface::dump(int /*fd*/)
1714{
1715    /*Enable lock when we implement this function*/
1716    /*
1717    pthread_mutex_lock(&mMutex);
1718
1719    pthread_mutex_unlock(&mMutex);
1720    */
1721    return;
1722}
1723
1724/*===========================================================================
1725 * FUNCTION   : flush
1726 *
1727 * DESCRIPTION:
1728 *
1729 * PARAMETERS :
1730 *
1731 *
1732 * RETURN     :
1733 *==========================================================================*/
1734int QCamera3HardwareInterface::flush()
1735{
1736
1737    unsigned int frameNum = 0;
1738    camera3_notify_msg_t notify_msg;
1739    camera3_capture_result_t result;
1740    camera3_stream_buffer_t pStream_Buf;
1741
1742    ALOGV("%s: Unblocking Process Capture Request", __func__);
1743
1744    // Stop the Streams/Channels
1745    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1746        it != mStreamInfo.end(); it++) {
1747        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1748        channel->stop();
1749        (*it)->status = INVALID;
1750    }
1751
1752    if (mSupportChannel) {
1753        mSupportChannel->stop();
1754    }
1755    if (mMetadataChannel) {
1756        /* If content of mStreamInfo is not 0, there is metadata stream */
1757        mMetadataChannel->stop();
1758    }
1759
1760    // Mutex Lock
1761    pthread_mutex_lock(&mMutex);
1762
1763    // Unblock process_capture_request
1764    mPendingRequest = 0;
1765    pthread_cond_signal(&mRequestCond);
1766
1767    List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1768    frameNum = i->frame_number;
1769    ALOGV("%s: Latest frame num on  mPendingRequestsList = %d",
1770      __func__, frameNum);
1771
1772    // Go through the pending buffers and send buffer errors
1773    for (List<PendingBufferInfo>::iterator k =
1774         mPendingBuffersMap.mPendingBufferList.begin();
1775         k != mPendingBuffersMap.mPendingBufferList.end();  ) {
1776         ALOGV("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
1777          __func__, k->frame_number, k->buffer, k->stream,
1778          k->stream->format);
1779
1780        if (k->frame_number < frameNum) {
1781            // Send Error notify to frameworks for each buffer for which
1782            // metadata buffer is already sent
1783            ALOGV("%s: Sending ERROR BUFFER for frame %d, buffer %p",
1784              __func__, k->frame_number, k->buffer);
1785
1786            notify_msg.type = CAMERA3_MSG_ERROR;
1787            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
1788            notify_msg.message.error.error_stream = k->stream;
1789            notify_msg.message.error.frame_number = k->frame_number;
1790            mCallbackOps->notify(mCallbackOps, &notify_msg);
1791            ALOGV("%s: notify frame_number = %d", __func__,
1792                    i->frame_number);
1793
1794            pStream_Buf.acquire_fence = -1;
1795            pStream_Buf.release_fence = -1;
1796            pStream_Buf.buffer = k->buffer;
1797            pStream_Buf.status = CAMERA3_BUFFER_STATUS_ERROR;
1798            pStream_Buf.stream = k->stream;
1799
1800            result.result = NULL;
1801            result.frame_number = k->frame_number;
1802            result.num_output_buffers = 1;
1803            result.output_buffers = &pStream_Buf ;
1804            mCallbackOps->process_capture_result(mCallbackOps, &result);
1805
1806            mPendingBuffersMap.num_buffers--;
1807            k = mPendingBuffersMap.mPendingBufferList.erase(k);
1808        }
1809        else {
1810          k++;
1811        }
1812    }
1813
1814    ALOGV("%s:Sending ERROR REQUEST for all pending requests", __func__);
1815
1816    // Go through the pending requests info and send error request to framework
1817    for (i = mPendingRequestsList.begin(); i != mPendingRequestsList.end(); ) {
1818        int numBuffers = 0;
1819        ALOGV("%s:Sending ERROR REQUEST for frame %d",
1820              __func__, i->frame_number);
1821
1822        // Send shutter notify to frameworks
1823        notify_msg.type = CAMERA3_MSG_ERROR;
1824        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
1825        notify_msg.message.error.error_stream = NULL;
1826        notify_msg.message.error.frame_number = i->frame_number;
1827        mCallbackOps->notify(mCallbackOps, &notify_msg);
1828
1829        result.frame_number = i->frame_number;
1830        result.num_output_buffers = 0;
1831        result.output_buffers = NULL;
1832        numBuffers = 0;
1833
1834        for (List<PendingBufferInfo>::iterator k =
1835             mPendingBuffersMap.mPendingBufferList.begin();
1836             k != mPendingBuffersMap.mPendingBufferList.end(); ) {
1837          if (k->frame_number == i->frame_number) {
1838            ALOGV("%s: Sending Error for frame = %d, buffer = %p,"
1839                   " stream = %p, stream format = %d",__func__,
1840                   k->frame_number, k->buffer, k->stream, k->stream->format);
1841
1842            pStream_Buf.acquire_fence = -1;
1843            pStream_Buf.release_fence = -1;
1844            pStream_Buf.buffer = k->buffer;
1845            pStream_Buf.status = CAMERA3_BUFFER_STATUS_ERROR;
1846            pStream_Buf.stream = k->stream;
1847
1848            result.num_output_buffers = 1;
1849            result.output_buffers = &pStream_Buf;
1850            result.result = NULL;
1851            result.frame_number = i->frame_number;
1852
1853            mCallbackOps->process_capture_result(mCallbackOps, &result);
1854            mPendingBuffersMap.num_buffers--;
1855            k = mPendingBuffersMap.mPendingBufferList.erase(k);
1856            numBuffers++;
1857          }
1858          else {
1859            k++;
1860          }
1861        }
1862        ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1863              __func__, mPendingBuffersMap.num_buffers);
1864
1865        i = mPendingRequestsList.erase(i);
1866    }
1867
1868    /* Reset pending buffer list and requests list */
1869    mPendingRequestsList.clear();
1870    /* Reset pending frame Drop list and requests list */
1871    mPendingFrameDropList.clear();
1872
1873    mPendingBuffersMap.num_buffers = 0;
1874    mPendingBuffersMap.mPendingBufferList.clear();
1875    ALOGV("%s: Cleared all the pending buffers ", __func__);
1876
1877    mFirstRequest = true;
1878    pthread_mutex_unlock(&mMutex);
1879    return 0;
1880}
1881
1882/*===========================================================================
1883 * FUNCTION   : captureResultCb
1884 *
1885 * DESCRIPTION: Callback handler for all capture result
1886 *              (streams, as well as metadata)
1887 *
1888 * PARAMETERS :
1889 *   @metadata : metadata information
1890 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1891 *               NULL if metadata.
1892 *
1893 * RETURN     : NONE
1894 *==========================================================================*/
1895void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1896                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1897{
1898    pthread_mutex_lock(&mMutex);
1899
1900    /* Assume flush() is called before any reprocessing. Send
1901     * notify and result immediately upon receipt of any callback*/
1902    if (mLoopBackResult) {
1903        /* Send notify */
1904        camera3_notify_msg_t notify_msg;
1905        notify_msg.type = CAMERA3_MSG_SHUTTER;
1906        notify_msg.message.shutter.frame_number = mLoopBackResult->frame_number;
1907        notify_msg.message.shutter.timestamp = mLoopBackTimestamp;
1908        mCallbackOps->notify(mCallbackOps, &notify_msg);
1909
1910        /* Send capture result */
1911        mCallbackOps->process_capture_result(mCallbackOps, mLoopBackResult);
1912        free_camera_metadata((camera_metadata_t *)mLoopBackResult->result);
1913        free(mLoopBackResult);
1914        mLoopBackResult = NULL;
1915    }
1916
1917    if (metadata_buf)
1918        handleMetadataWithLock(metadata_buf);
1919    else
1920        handleBufferWithLock(buffer, frame_number);
1921
1922    pthread_mutex_unlock(&mMutex);
1923    return;
1924}
1925
1926/*===========================================================================
1927 * FUNCTION   : translateFromHalMetadata
1928 *
1929 * DESCRIPTION:
1930 *
1931 * PARAMETERS :
1932 *   @metadata : metadata information from callback
1933 *
1934 * RETURN     : camera_metadata_t*
1935 *              metadata in a format specified by fwk
1936 *==========================================================================*/
1937camera_metadata_t*
1938QCamera3HardwareInterface::translateFromHalMetadata(
1939                                 metadata_buffer_t *metadata,
1940                                 nsecs_t timestamp,
1941                                 int32_t request_id,
1942                                 const CameraMetadata& jpegMetadata,
1943                                 uint8_t pipeline_depth)
1944{
1945    CameraMetadata camMetadata;
1946    camera_metadata_t* resultMetadata;
1947
1948    if (jpegMetadata.entryCount())
1949        camMetadata.append(jpegMetadata);
1950
1951    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1952    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1953    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
1954
1955    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
1956    uint8_t next_entry;
1957    while (curr_entry != CAM_INTF_PARM_MAX) {
1958       switch (curr_entry) {
1959         case CAM_INTF_META_FRAME_NUMBER:{
1960             int64_t frame_number = *(uint32_t *) POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1961             camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &frame_number, 1);
1962             break;
1963         }
1964         case CAM_INTF_META_FACE_DETECTION:{
1965             cam_face_detection_data_t *faceDetectionInfo =
1966                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1967             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1968             int32_t faceIds[MAX_ROI];
1969             uint8_t faceScores[MAX_ROI];
1970             int32_t faceRectangles[MAX_ROI * 4];
1971             int32_t faceLandmarks[MAX_ROI * 6];
1972             int j = 0, k = 0;
1973             for (int i = 0; i < numFaces; i++) {
1974                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
1975                 faceScores[i] = faceDetectionInfo->faces[i].score;
1976                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1977                         faceRectangles+j, -1);
1978                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1979                 j+= 4;
1980                 k+= 6;
1981             }
1982
1983             if (numFaces <= 0) {
1984                memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
1985                memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
1986                memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
1987                memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
1988             }
1989
1990             camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1991             camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1992             camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1993               faceRectangles, numFaces*4);
1994             camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1995               faceLandmarks, numFaces*6);
1996
1997            break;
1998            }
1999         case CAM_INTF_META_COLOR_CORRECT_MODE:{
2000             uint8_t  *color_correct_mode =
2001                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
2002             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
2003             break;
2004          }
2005
2006         // 3A state is sent in urgent partial result (uses quirk)
2007         case CAM_INTF_META_AEC_PRECAPTURE_ID:
2008         case CAM_INTF_META_AEC_STATE:
2009         case CAM_INTF_PARM_AEC_LOCK:
2010         case CAM_INTF_PARM_EV:
2011         case CAM_INTF_PARM_FOCUS_MODE:
2012         case CAM_INTF_META_AF_STATE:
2013         case CAM_INTF_META_AF_TRIGGER_ID:
2014         case CAM_INTF_PARM_WHITE_BALANCE:
2015         case CAM_INTF_META_AWB_REGIONS:
2016         case CAM_INTF_META_AWB_STATE:
2017         case CAM_INTF_PARM_AWB_LOCK:
2018         case CAM_INTF_META_PRECAPTURE_TRIGGER:
2019         case CAM_INTF_META_AEC_MODE:
2020         case CAM_INTF_PARM_LED_MODE:
2021         case CAM_INTF_PARM_REDEYE_REDUCTION:
2022         case CAM_INTF_META_AF_TRIGGER_NOTICE: {
2023           ALOGV("%s: 3A metadata: %d, do not process", __func__, curr_entry);
2024           break;
2025         }
2026
2027          case CAM_INTF_META_MODE: {
2028             uint8_t *mode =(uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
2029             camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
2030             break;
2031          }
2032
2033          case CAM_INTF_META_EDGE_MODE: {
2034             cam_edge_application_t  *edgeApplication =
2035                (cam_edge_application_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
2036             uint8_t edgeStrength = (uint8_t)edgeApplication->sharpness;
2037             camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
2038             camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
2039             break;
2040          }
2041          case CAM_INTF_META_FLASH_POWER: {
2042             uint8_t  *flashPower =
2043                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
2044             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
2045             break;
2046          }
2047          case CAM_INTF_META_FLASH_FIRING_TIME: {
2048             int64_t  *flashFiringTime =
2049                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
2050             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
2051             break;
2052          }
2053          case CAM_INTF_META_FLASH_STATE: {
2054             uint8_t  flashState =
2055                *((uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata));
2056             if (!gCamCapability[mCameraId]->flash_available) {
2057                 flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
2058             }
2059             camMetadata.update(ANDROID_FLASH_STATE, &flashState, 1);
2060             break;
2061          }
2062          case CAM_INTF_META_FLASH_MODE:{
2063             uint8_t flashMode = *((uint8_t*)
2064                 POINTER_OF(CAM_INTF_META_FLASH_MODE, metadata));
2065             uint8_t fwk_flashMode = lookupFwkName(FLASH_MODES_MAP,
2066                                          sizeof(FLASH_MODES_MAP),
2067                                          flashMode);
2068             camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
2069             break;
2070          }
2071          case CAM_INTF_META_HOTPIXEL_MODE: {
2072              uint8_t  *hotPixelMode =
2073                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
2074              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
2075              break;
2076          }
2077          case CAM_INTF_META_LENS_APERTURE:{
2078             float  *lensAperture =
2079                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
2080             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
2081             break;
2082          }
2083          case CAM_INTF_META_LENS_FILTERDENSITY: {
2084             float  *filterDensity =
2085                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
2086             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
2087             break;
2088          }
2089          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
2090             float  *focalLength =
2091                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
2092             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
2093             break;
2094          }
2095          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
2096             float  *focusDistance =
2097                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
2098             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
2099             break;
2100          }
2101          case CAM_INTF_META_LENS_FOCUS_RANGE: {
2102             float  *focusRange =
2103                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
2104             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
2105             break;
2106          }
2107          case CAM_INTF_META_LENS_STATE: {
2108             uint8_t *lensState = (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_STATE, metadata);
2109             camMetadata.update(ANDROID_LENS_STATE , lensState, 1);
2110             break;
2111          }
2112          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
2113             uint8_t  *opticalStab =
2114                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
2115             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
2116             break;
2117          }
2118          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
2119             uint8_t  *noiseRedMode =
2120                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
2121             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
2122             break;
2123          }
2124          case CAM_INTF_META_NOISE_REDUCTION_STRENGTH: {
2125             uint8_t  *noiseRedStrength =
2126                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata);
2127             camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, noiseRedStrength, 1);
2128             break;
2129          }
2130          case CAM_INTF_META_SCALER_CROP_REGION: {
2131             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
2132             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
2133             int32_t scalerCropRegion[4];
2134             scalerCropRegion[0] = hScalerCropRegion->left;
2135             scalerCropRegion[1] = hScalerCropRegion->top;
2136             scalerCropRegion[2] = hScalerCropRegion->width;
2137             scalerCropRegion[3] = hScalerCropRegion->height;
2138             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
2139             break;
2140          }
2141          case CAM_INTF_META_AEC_ROI: {
2142            cam_area_t  *hAeRegions =
2143                (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
2144            int32_t aeRegions[5];
2145            convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
2146            camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
2147            ALOGV("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d, %d, %d, %d] HAL: [%d, %d, %d, %d]",
2148                __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
2149                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width, hAeRegions->rect.height);
2150            break;
2151          }
2152          case CAM_INTF_META_AF_ROI:{
2153            /*af regions*/
2154            cam_area_t  *hAfRegions =
2155                (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
2156            int32_t afRegions[5];
2157            convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
2158            camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
2159            ALOGV("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d, %d, %d, %d] HAL: [%d, %d, %d, %d]",
2160                __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
2161                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width, hAfRegions->rect.height);
2162            break;
2163          }
2164          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
2165             int64_t  *sensorExpTime =
2166                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
2167             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
2168             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
2169             break;
2170          }
2171          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
2172             int64_t  *sensorFameDuration =
2173                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
2174             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
2175             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
2176             break;
2177          }
2178          case CAM_INTF_META_SENSOR_SENSITIVITY:{
2179             int32_t  *sensorSensitivity =
2180                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
2181             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
2182             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
2183             break;
2184          }
2185
2186          case CAM_INTF_META_SHADING_MODE: {
2187             uint8_t  *shadingMode =
2188                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
2189             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
2190             break;
2191          }
2192
2193          case CAM_INTF_META_LENS_SHADING_MAP_MODE: {
2194             uint8_t  *shadingMapMode =
2195                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata);
2196             camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, shadingMapMode, 1);
2197             break;
2198          }
2199
2200          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
2201             uint8_t  *faceDetectMode =
2202                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
2203             uint8_t fwk_faceDetectMode = (uint8_t)lookupFwkName(FACEDETECT_MODES_MAP,
2204                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
2205                                                        *faceDetectMode);
2206             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
2207             break;
2208          }
2209          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
2210             uint8_t  *histogramMode =
2211                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
2212             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
2213             break;
2214          }
2215          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
2216               uint8_t  *sharpnessMapMode =
2217                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
2218               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
2219                                  sharpnessMapMode, 1);
2220               break;
2221           }
2222          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
2223               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
2224               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
2225               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
2226                                  (int32_t*)sharpnessMap->sharpness,
2227                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
2228               break;
2229          }
2230          case CAM_INTF_META_LENS_SHADING_MAP: {
2231               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
2232               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
2233               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
2234               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
2235               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
2236                                  (float*)lensShadingMap->lens_shading,
2237                                  4*map_width*map_height);
2238               break;
2239          }
2240
2241          case CAM_INTF_META_TONEMAP_MODE: {
2242             uint8_t  *toneMapMode =
2243                (uint8_t *)POINTER_OF(CAM_INTF_META_TONEMAP_MODE, metadata);
2244             camMetadata.update(ANDROID_TONEMAP_MODE, toneMapMode, 1);
2245             break;
2246          }
2247
2248          case CAM_INTF_META_TONEMAP_CURVES:{
2249             //Populate CAM_INTF_META_TONEMAP_CURVES
2250             /* ch0 = G, ch 1 = B, ch 2 = R*/
2251             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
2252             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
2253             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
2254                                (float*)tonemap->curves[0].tonemap_points,
2255                                tonemap->tonemap_points_cnt * 2);
2256
2257             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
2258                                (float*)tonemap->curves[1].tonemap_points,
2259                                tonemap->tonemap_points_cnt * 2);
2260
2261             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
2262                                (float*)tonemap->curves[2].tonemap_points,
2263                                tonemap->tonemap_points_cnt * 2);
2264             break;
2265          }
2266
2267          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
2268             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
2269             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
2270             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
2271             break;
2272          }
2273          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
2274              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
2275              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
2276              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
2277                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
2278              break;
2279          }
2280
2281          /* DNG file realted metadata */
2282          case CAM_INTF_META_PROFILE_TONE_CURVE: {
2283             cam_profile_tone_curve *toneCurve = (cam_profile_tone_curve *)
2284             POINTER_OF(CAM_INTF_META_PROFILE_TONE_CURVE, metadata);
2285             camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
2286                                (float*)toneCurve->curve.tonemap_points,
2287                                toneCurve->tonemap_points_cnt * 2);
2288             break;
2289          }
2290
2291          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
2292             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
2293             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
2294             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
2295                       predColorCorrectionGains->gains, 4);
2296             break;
2297          }
2298          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
2299             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
2300                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
2301             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
2302                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
2303             break;
2304
2305          }
2306
2307          case CAM_INTF_META_OTP_WB_GRGB:{
2308             float *otpWbGrGb = (float*) POINTER_OF(CAM_INTF_META_OTP_WB_GRGB, metadata);
2309             camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
2310             break;
2311          }
2312
2313          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
2314             uint8_t *blackLevelLock = (uint8_t*)
2315               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
2316             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
2317             break;
2318          }
2319          case CAM_INTF_PARM_ANTIBANDING: {
2320            uint8_t *hal_ab_mode =
2321              (uint8_t *)POINTER_OF(CAM_INTF_PARM_ANTIBANDING, metadata);
2322            uint8_t fwk_ab_mode = (uint8_t)lookupFwkName(ANTIBANDING_MODES_MAP,
2323                     sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2324                     *hal_ab_mode);
2325            camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE,
2326                &fwk_ab_mode, 1);
2327            break;
2328          }
2329
2330          case CAM_INTF_META_CAPTURE_INTENT:{
2331             uint8_t *captureIntent = (uint8_t*)
2332               POINTER_OF(CAM_INTF_META_CAPTURE_INTENT, metadata);
2333             camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, captureIntent, 1);
2334             break;
2335          }
2336
2337          case CAM_INTF_META_SCENE_FLICKER:{
2338             uint8_t *sceneFlicker = (uint8_t*)
2339             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
2340             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
2341             break;
2342          }
2343          case CAM_INTF_PARM_EFFECT: {
2344             uint8_t *effectMode = (uint8_t*)
2345                  POINTER_OF(CAM_INTF_PARM_EFFECT, metadata);
2346             uint8_t fwk_effectMode = (uint8_t)lookupFwkName(EFFECT_MODES_MAP,
2347                                                    sizeof(EFFECT_MODES_MAP),
2348                                                    *effectMode);
2349             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
2350             break;
2351          }
2352          case CAM_INTF_META_TEST_PATTERN_DATA: {
2353             cam_test_pattern_data_t *testPatternData = (cam_test_pattern_data_t *)
2354                 POINTER_OF(CAM_INTF_META_TEST_PATTERN_DATA, metadata);
2355             int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
2356                     sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
2357                     testPatternData->mode);
2358             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE,
2359                     &fwk_testPatternMode, 1);
2360            int32_t fwk_testPatternData[4];
2361            fwk_testPatternData[0] = testPatternData->r;
2362            fwk_testPatternData[3] = testPatternData->b;
2363            switch (gCamCapability[mCameraId]->color_arrangement) {
2364            case CAM_FILTER_ARRANGEMENT_RGGB:
2365            case CAM_FILTER_ARRANGEMENT_GRBG:
2366                fwk_testPatternData[1] = testPatternData->gr;
2367                fwk_testPatternData[2] = testPatternData->gb;
2368                break;
2369            case CAM_FILTER_ARRANGEMENT_GBRG:
2370            case CAM_FILTER_ARRANGEMENT_BGGR:
2371                fwk_testPatternData[2] = testPatternData->gr;
2372                fwk_testPatternData[1] = testPatternData->gb;
2373                break;
2374            default:
2375                ALOGE("%s: color arrangement %d is not supported", __func__,
2376                    gCamCapability[mCameraId]->color_arrangement);
2377                break;
2378            }
2379            camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
2380            break;
2381
2382          }
2383          case CAM_INTF_META_JPEG_GPS_COORDINATES: {
2384              double *gps_coords = (double *)POINTER_OF(
2385                      CAM_INTF_META_JPEG_GPS_COORDINATES, metadata);
2386              camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
2387              break;
2388          }
2389          case CAM_INTF_META_JPEG_GPS_PROC_METHODS: {
2390              char *gps_methods = (char *)POINTER_OF(
2391                      CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata);
2392              String8 str(gps_methods);
2393              camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
2394              break;
2395          }
2396          case CAM_INTF_META_JPEG_GPS_TIMESTAMP: {
2397              int64_t *gps_timestamp = (int64_t *)POINTER_OF(
2398                      CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata);
2399              camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
2400              break;
2401          }
2402          case CAM_INTF_META_JPEG_ORIENTATION: {
2403              int32_t *jpeg_orientation = (int32_t *)POINTER_OF(
2404                      CAM_INTF_META_JPEG_ORIENTATION, metadata);
2405              camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
2406              break;
2407          }
2408          case CAM_INTF_META_JPEG_QUALITY: {
2409              uint8_t *jpeg_quality = (uint8_t *)POINTER_OF(
2410                      CAM_INTF_META_JPEG_QUALITY, metadata);
2411              camMetadata.update(ANDROID_JPEG_QUALITY, jpeg_quality, 1);
2412              break;
2413          }
2414          case CAM_INTF_META_JPEG_THUMB_QUALITY: {
2415              uint8_t *thumb_quality = (uint8_t *)POINTER_OF(
2416                      CAM_INTF_META_JPEG_THUMB_QUALITY, metadata);
2417              camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, thumb_quality, 1);
2418              break;
2419          }
2420
2421          case CAM_INTF_META_JPEG_THUMB_SIZE: {
2422              cam_dimension_t *thumb_size = (cam_dimension_t *)POINTER_OF(
2423                      CAM_INTF_META_JPEG_THUMB_SIZE, metadata);
2424              camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, (int32_t *)thumb_size, 2);
2425              break;
2426          }
2427
2428             break;
2429          case CAM_INTF_META_PRIVATE_DATA: {
2430             uint8_t *privateData = (uint8_t *)
2431                 POINTER_OF(CAM_INTF_META_PRIVATE_DATA, metadata);
2432             camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
2433                 privateData, MAX_METADATA_PAYLOAD_SIZE);
2434             break;
2435          }
2436
2437          case CAM_INTF_META_NEUTRAL_COL_POINT:{
2438             cam_neutral_col_point_t *neuColPoint = (cam_neutral_col_point_t*)
2439                 POINTER_OF(CAM_INTF_META_NEUTRAL_COL_POINT, metadata);
2440             camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
2441                     (camera_metadata_rational_t*)neuColPoint->neutral_col_point, 3);
2442             break;
2443          }
2444
2445          default:
2446             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
2447                   __func__, curr_entry);
2448             break;
2449       }
2450       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2451       curr_entry = next_entry;
2452    }
2453
2454    /* Constant metadata values to be update*/
2455    uint8_t vs_mode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
2456    camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vs_mode, 1);
2457
2458    uint8_t hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
2459    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
2460
2461    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
2462    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
2463
2464    int32_t hotPixelMap[2];
2465    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
2466
2467    resultMetadata = camMetadata.release();
2468    return resultMetadata;
2469}
2470
2471/*===========================================================================
2472 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
2473 *
2474 * DESCRIPTION:
2475 *
2476 * PARAMETERS :
2477 *   @metadata : metadata information from callback
2478 *
2479 * RETURN     : camera_metadata_t*
2480 *              metadata in a format specified by fwk
2481 *==========================================================================*/
2482camera_metadata_t*
2483QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
2484                                (metadata_buffer_t *metadata)
2485{
2486    CameraMetadata camMetadata;
2487    camera_metadata_t* resultMetadata;
2488    uint8_t *aeMode = NULL;
2489    int32_t *flashMode = NULL;
2490    int32_t *redeye = NULL;
2491
2492    uint8_t partial_result_tag = ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL;
2493    camMetadata.update(ANDROID_QUIRKS_PARTIAL_RESULT, &partial_result_tag, 1);
2494
2495    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
2496    uint8_t next_entry;
2497    while (curr_entry != CAM_INTF_PARM_MAX) {
2498      switch (curr_entry) {
2499        case CAM_INTF_META_AEC_PRECAPTURE_ID: {
2500            int32_t  *ae_precapture_id =
2501              (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
2502            camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
2503                                          ae_precapture_id, 1);
2504            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID", __func__);
2505          break;
2506        }
2507        case CAM_INTF_META_AEC_STATE:{
2508            uint8_t *ae_state =
2509                (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
2510            camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
2511            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE", __func__);
2512            break;
2513        }
2514        case CAM_INTF_PARM_AEC_LOCK: {
2515            uint8_t  *ae_lock =
2516              (uint8_t *)POINTER_OF(CAM_INTF_PARM_AEC_LOCK, metadata);
2517            camMetadata.update(ANDROID_CONTROL_AE_LOCK,
2518                                          ae_lock, 1);
2519            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_LOCK", __func__);
2520            break;
2521        }
2522        case CAM_INTF_PARM_FPS_RANGE: {
2523            int32_t fps_range[2];
2524            cam_fps_range_t * float_range =
2525              (cam_fps_range_t *)POINTER_OF(CAM_INTF_PARM_FPS_RANGE, metadata);
2526            fps_range[0] = (int32_t)float_range->min_fps;
2527            fps_range[1] = (int32_t)float_range->max_fps;
2528            camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
2529                                          fps_range, 2);
2530            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
2531                __func__, fps_range[0], fps_range[1]);
2532            break;
2533        }
2534        case CAM_INTF_PARM_EV: {
2535            int32_t  *expCompensation =
2536              (int32_t *)POINTER_OF(CAM_INTF_PARM_EV, metadata);
2537            camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2538                                          expCompensation, 1);
2539            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION",
2540                __func__);
2541            break;
2542        }
2543        case CAM_INTF_PARM_FOCUS_MODE:{
2544            uint8_t  *focusMode =
2545                (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
2546            uint8_t fwkAfMode = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2547               sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
2548            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
2549            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__);
2550            break;
2551        }
2552        case CAM_INTF_META_AF_STATE: {
2553            uint8_t  *afState =
2554               (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
2555            camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
2556            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE", __func__);
2557            break;
2558        }
2559        case CAM_INTF_META_AF_TRIGGER_ID: {
2560            int32_t  *afTriggerId =
2561                 (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
2562            camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
2563            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID", __func__);
2564            break;
2565        }
2566        case CAM_INTF_PARM_WHITE_BALANCE: {
2567           uint8_t  *whiteBalance =
2568                (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
2569             uint8_t fwkWhiteBalanceMode =
2570                    (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2571                    sizeof(WHITE_BALANCE_MODES_MAP)/
2572                    sizeof(WHITE_BALANCE_MODES_MAP[0]), *whiteBalance);
2573             camMetadata.update(ANDROID_CONTROL_AWB_MODE,
2574                 &fwkWhiteBalanceMode, 1);
2575            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE", __func__);
2576             break;
2577        }
2578        case CAM_INTF_META_AWB_REGIONS: {
2579           /*awb regions*/
2580           cam_area_t  *hAwbRegions =
2581               (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
2582           int32_t awbRegions[5];
2583           convertToRegions(hAwbRegions->rect, awbRegions,hAwbRegions->weight);
2584           camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
2585           ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_REGIONS", __func__);
2586           break;
2587        }
2588
2589
2590        case CAM_INTF_META_AWB_STATE: {
2591           uint8_t  *whiteBalanceState =
2592              (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
2593           camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
2594           ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE", __func__);
2595           break;
2596        }
2597
2598
2599        case CAM_INTF_PARM_AWB_LOCK: {
2600            uint8_t  *awb_lock =
2601              (uint8_t *)POINTER_OF(CAM_INTF_PARM_AWB_LOCK, metadata);
2602            camMetadata.update(ANDROID_CONTROL_AWB_LOCK, awb_lock, 1);
2603            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_LOCK", __func__);
2604            break;
2605        }
2606        case CAM_INTF_PARM_BESTSHOT_MODE: {
2607            uint8_t *sceneMode =
2608                (uint8_t *)POINTER_OF(CAM_INTF_PARM_BESTSHOT_MODE, metadata);
2609            uint8_t fwkSceneMode =
2610                (uint8_t)lookupFwkName(SCENE_MODES_MAP,
2611                sizeof(SCENE_MODES_MAP)/
2612                sizeof(SCENE_MODES_MAP[0]), *sceneMode);
2613            camMetadata.update(ANDROID_CONTROL_SCENE_MODE,
2614                 &fwkSceneMode, 1);
2615            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_SCENE_MODE", __func__);
2616            break;
2617        }
2618        case CAM_INTF_META_PRECAPTURE_TRIGGER: {
2619            uint8_t *precaptureTrigger =
2620                (uint8_t *)POINTER_OF(CAM_INTF_META_PRECAPTURE_TRIGGER, metadata);
2621            camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
2622                 precaptureTrigger, 1);
2623            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER",
2624                __func__);
2625            break;
2626        }
2627        case CAM_INTF_META_AF_TRIGGER_NOTICE: {
2628            uint8_t *af_trigger =
2629              (uint8_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_NOTICE, metadata);
2630            camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
2631                af_trigger, 1);
2632            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER = %d",
2633                __func__, *af_trigger);
2634            break;
2635        }
2636        case CAM_INTF_META_AEC_MODE:{
2637            aeMode = (uint8_t*)
2638            POINTER_OF(CAM_INTF_META_AEC_MODE, metadata);
2639            break;
2640        }
2641        case CAM_INTF_PARM_LED_MODE:{
2642            flashMode = (int32_t*)
2643            POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
2644            break;
2645        }
2646        case CAM_INTF_PARM_REDEYE_REDUCTION:{
2647            redeye = (int32_t*)
2648            POINTER_OF(CAM_INTF_PARM_REDEYE_REDUCTION, metadata);
2649            break;
2650        }
2651        default:
2652            ALOGV("%s: Normal Metadata %d, do not process",
2653              __func__, curr_entry);
2654            break;
2655       }
2656       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2657       curr_entry = next_entry;
2658    }
2659
2660    uint8_t fwk_aeMode;
2661    if (redeye != NULL && *redeye == 1) {
2662        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2663        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
2664    } else if (flashMode != NULL &&
2665            ((*flashMode == CAM_FLASH_MODE_AUTO)||
2666             (*flashMode == CAM_FLASH_MODE_ON))) {
2667        fwk_aeMode = (uint8_t)lookupFwkName(AE_FLASH_MODE_MAP,
2668                sizeof(AE_FLASH_MODE_MAP)/sizeof(AE_FLASH_MODE_MAP[0]),*flashMode);
2669        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
2670    } else if (aeMode != NULL && *aeMode == CAM_AE_MODE_ON) {
2671        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
2672        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
2673    } else if (aeMode != NULL && *aeMode == CAM_AE_MODE_OFF) {
2674        fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
2675        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
2676    } else {
2677        ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%p, flashMode:%p, aeMode:%p!!!",__func__, redeye, flashMode, aeMode);
2678    }
2679
2680    resultMetadata = camMetadata.release();
2681    return resultMetadata;
2682}
2683
2684/*===========================================================================
2685 * FUNCTION   : dumpMetadataToFile
2686 *
2687 * DESCRIPTION: Dumps tuning metadata to file system
2688 *
2689 * PARAMETERS :
2690 *   @meta           : tuning metadata
2691 *   @dumpFrameCount : current dump frame count
2692 *   @enabled        : Enable mask
2693 *
2694 *==========================================================================*/
2695void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
2696                                                   uint32_t &dumpFrameCount,
2697                                                   int32_t enabled,
2698                                                   const char *type,
2699                                                   uint32_t frameNumber)
2700{
2701    uint32_t frm_num = 0;
2702
2703    //Some sanity checks
2704    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
2705        ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
2706              __func__,
2707              meta.tuning_sensor_data_size,
2708              TUNING_SENSOR_DATA_MAX);
2709        return;
2710    }
2711
2712    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
2713        ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
2714              __func__,
2715              meta.tuning_vfe_data_size,
2716              TUNING_VFE_DATA_MAX);
2717        return;
2718    }
2719
2720    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
2721        ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
2722              __func__,
2723              meta.tuning_cpp_data_size,
2724              TUNING_CPP_DATA_MAX);
2725        return;
2726    }
2727
2728    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
2729        ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
2730              __func__,
2731              meta.tuning_cac_data_size,
2732              TUNING_CAC_DATA_MAX);
2733        return;
2734    }
2735    //
2736
2737    if(enabled){
2738        frm_num = ((enabled & 0xffff0000) >> 16);
2739        if(frm_num == 0) {
2740            frm_num = 10; //default 10 frames
2741        }
2742        if(frm_num > 256) {
2743            frm_num = 256; //256 buffers cycle around
2744        }
2745        if((frm_num == 256) && (dumpFrameCount >= frm_num)) {
2746            // reset frame count if cycling
2747            dumpFrameCount = 0;
2748        }
2749        ALOGV("DumpFrmCnt = %d, frm_num = %d",dumpFrameCount, frm_num);
2750        if (dumpFrameCount < frm_num) {
2751            char timeBuf[FILENAME_MAX];
2752            char buf[FILENAME_MAX];
2753            memset(buf, 0, sizeof(buf));
2754            memset(timeBuf, 0, sizeof(timeBuf));
2755            time_t current_time;
2756            struct tm * timeinfo;
2757            time (&current_time);
2758            timeinfo = localtime (&current_time);
2759            strftime (timeBuf, sizeof(timeBuf),"/data/%Y%m%d%H%M%S", timeinfo);
2760            String8 filePath(timeBuf);
2761            snprintf(buf,
2762                     sizeof(buf),
2763                     "%d_HAL_META_%s_%d.bin",
2764                     dumpFrameCount,
2765                     type,
2766                     frameNumber);
2767            filePath.append(buf);
2768            int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
2769            if (file_fd > 0) {
2770                int written_len = 0;
2771                meta.tuning_data_version = TUNING_DATA_VERSION;
2772                void *data = (void *)((uint8_t *)&meta.tuning_data_version);
2773                written_len += write(file_fd, data, sizeof(uint32_t));
2774                data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
2775                ALOGV("tuning_sensor_data_size %d",(int)(*(int *)data));
2776                written_len += write(file_fd, data, sizeof(uint32_t));
2777                data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
2778                ALOGV("tuning_vfe_data_size %d",(int)(*(int *)data));
2779                written_len += write(file_fd, data, sizeof(uint32_t));
2780                data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
2781                ALOGV("tuning_cpp_data_size %d",(int)(*(int *)data));
2782                written_len += write(file_fd, data, sizeof(uint32_t));
2783                data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
2784                ALOGV("tuning_cac_data_size %d",(int)(*(int *)data));
2785                written_len += write(file_fd, data, sizeof(uint32_t));
2786                int total_size = meta.tuning_sensor_data_size;
2787                data = (void *)((uint8_t *)&meta.data);
2788                written_len += write(file_fd, data, total_size);
2789                total_size = meta.tuning_vfe_data_size;
2790                data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
2791                written_len += write(file_fd, data, total_size);
2792                total_size = meta.tuning_cpp_data_size;
2793                data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
2794                written_len += write(file_fd, data, total_size);
2795                total_size = meta.tuning_cac_data_size;
2796                data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
2797                written_len += write(file_fd, data, total_size);
2798                close(file_fd);
2799            }else {
2800                ALOGE("%s: fail t open file for image dumping", __func__);
2801            }
2802            dumpFrameCount++;
2803        }
2804    }
2805}
2806
2807/*===========================================================================
2808 * FUNCTION   : cleanAndSortStreamInfo
2809 *
2810 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
2811 *              and sort them such that raw stream is at the end of the list
2812 *              This is a workaround for camera daemon constraint.
2813 *
2814 * PARAMETERS : None
2815 *
2816 *==========================================================================*/
2817void QCamera3HardwareInterface::cleanAndSortStreamInfo()
2818{
2819    List<stream_info_t *> newStreamInfo;
2820
2821    /*clean up invalid streams*/
2822    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2823            it != mStreamInfo.end();) {
2824        if(((*it)->status) == INVALID){
2825            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
2826            delete channel;
2827            free(*it);
2828            it = mStreamInfo.erase(it);
2829        } else {
2830            it++;
2831        }
2832    }
2833
2834    // Move preview/video/callback/snapshot streams into newList
2835    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2836            it != mStreamInfo.end();) {
2837        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
2838                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
2839            newStreamInfo.push_back(*it);
2840            it = mStreamInfo.erase(it);
2841        } else
2842            it++;
2843    }
2844    // Move raw streams into newList
2845    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2846            it != mStreamInfo.end();) {
2847        newStreamInfo.push_back(*it);
2848        it = mStreamInfo.erase(it);
2849    }
2850
2851    mStreamInfo = newStreamInfo;
2852}
2853
2854/*===========================================================================
2855 * FUNCTION   : extractJpegMetadata
2856 *
2857 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
2858 *              JPEG metadata is cached in HAL, and return as part of capture
2859 *              result when metadata is returned from camera daemon.
2860 *
2861 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
2862 *              @request:      capture request
2863 *
2864 *==========================================================================*/
2865void QCamera3HardwareInterface::extractJpegMetadata(
2866        CameraMetadata& jpegMetadata,
2867        const camera3_capture_request_t *request)
2868{
2869    CameraMetadata frame_settings;
2870    frame_settings = request->settings;
2871
2872    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
2873        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
2874                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
2875                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
2876
2877    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
2878        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
2879                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
2880                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
2881
2882    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
2883        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
2884                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
2885                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
2886
2887    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
2888        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
2889                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
2890                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
2891
2892    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
2893        jpegMetadata.update(ANDROID_JPEG_QUALITY,
2894                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
2895                frame_settings.find(ANDROID_JPEG_QUALITY).count);
2896
2897    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
2898        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
2899                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
2900                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
2901
2902    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE))
2903        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
2904                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32,
2905                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
2906}
2907
2908/*===========================================================================
2909 * FUNCTION   : convertToRegions
2910 *
2911 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
2912 *
2913 * PARAMETERS :
2914 *   @rect   : cam_rect_t struct to convert
2915 *   @region : int32_t destination array
2916 *   @weight : if we are converting from cam_area_t, weight is valid
2917 *             else weight = -1
2918 *
2919 *==========================================================================*/
2920void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
2921    region[0] = rect.left;
2922    region[1] = rect.top;
2923    region[2] = rect.left + rect.width;
2924    region[3] = rect.top + rect.height;
2925    if (weight > -1) {
2926        region[4] = weight;
2927    }
2928}
2929
2930/*===========================================================================
2931 * FUNCTION   : convertFromRegions
2932 *
2933 * DESCRIPTION: helper method to convert from array to cam_rect_t
2934 *
2935 * PARAMETERS :
2936 *   @rect   : cam_rect_t struct to convert
2937 *   @region : int32_t destination array
2938 *   @weight : if we are converting from cam_area_t, weight is valid
2939 *             else weight = -1
2940 *
2941 *==========================================================================*/
2942void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
2943                                                   const camera_metadata_t *settings,
2944                                                   uint32_t tag){
2945    CameraMetadata frame_settings;
2946    frame_settings = settings;
2947    int32_t x_min = frame_settings.find(tag).data.i32[0];
2948    int32_t y_min = frame_settings.find(tag).data.i32[1];
2949    int32_t x_max = frame_settings.find(tag).data.i32[2];
2950    int32_t y_max = frame_settings.find(tag).data.i32[3];
2951    roi->weight = frame_settings.find(tag).data.i32[4];
2952    roi->rect.left = x_min;
2953    roi->rect.top = y_min;
2954    roi->rect.width = x_max - x_min;
2955    roi->rect.height = y_max - y_min;
2956}
2957
2958/*===========================================================================
2959 * FUNCTION   : resetIfNeededROI
2960 *
2961 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
2962 *              crop region
2963 *
2964 * PARAMETERS :
2965 *   @roi       : cam_area_t struct to resize
2966 *   @scalerCropRegion : cam_crop_region_t region to compare against
2967 *
2968 *
2969 *==========================================================================*/
2970bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
2971                                                 const cam_crop_region_t* scalerCropRegion)
2972{
2973    int32_t roi_x_max = roi->rect.width + roi->rect.left;
2974    int32_t roi_y_max = roi->rect.height + roi->rect.top;
2975    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
2976    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
2977    if ((roi_x_max < scalerCropRegion->left) ||
2978        (roi_y_max < scalerCropRegion->top)  ||
2979        (roi->rect.left > crop_x_max) ||
2980        (roi->rect.top > crop_y_max)){
2981        return false;
2982    }
2983    if (roi->rect.left < scalerCropRegion->left) {
2984        roi->rect.left = scalerCropRegion->left;
2985    }
2986    if (roi->rect.top < scalerCropRegion->top) {
2987        roi->rect.top = scalerCropRegion->top;
2988    }
2989    if (roi_x_max > crop_x_max) {
2990        roi_x_max = crop_x_max;
2991    }
2992    if (roi_y_max > crop_y_max) {
2993        roi_y_max = crop_y_max;
2994    }
2995    roi->rect.width = roi_x_max - roi->rect.left;
2996    roi->rect.height = roi_y_max - roi->rect.top;
2997    return true;
2998}
2999
3000/*===========================================================================
3001 * FUNCTION   : convertLandmarks
3002 *
3003 * DESCRIPTION: helper method to extract the landmarks from face detection info
3004 *
3005 * PARAMETERS :
3006 *   @face   : cam_rect_t struct to convert
3007 *   @landmarks : int32_t destination array
3008 *
3009 *
3010 *==========================================================================*/
3011void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
3012{
3013    landmarks[0] = face.left_eye_center.x;
3014    landmarks[1] = face.left_eye_center.y;
3015    landmarks[2] = face.right_eye_center.x;
3016    landmarks[3] = face.right_eye_center.y;
3017    landmarks[4] = face.mouth_center.x;
3018    landmarks[5] = face.mouth_center.y;
3019}
3020
3021#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
3022/*===========================================================================
3023 * FUNCTION   : initCapabilities
3024 *
3025 * DESCRIPTION: initialize camera capabilities in static data struct
3026 *
3027 * PARAMETERS :
3028 *   @cameraId  : camera Id
3029 *
3030 * RETURN     : int32_t type of status
3031 *              NO_ERROR  -- success
3032 *              none-zero failure code
3033 *==========================================================================*/
3034int QCamera3HardwareInterface::initCapabilities(int cameraId)
3035{
3036    int rc = 0;
3037    mm_camera_vtbl_t *cameraHandle = NULL;
3038    QCamera3HeapMemory *capabilityHeap = NULL;
3039
3040    cameraHandle = camera_open(cameraId);
3041    if (!cameraHandle) {
3042        ALOGE("%s: camera_open failed", __func__);
3043        rc = -1;
3044        goto open_failed;
3045    }
3046
3047    capabilityHeap = new QCamera3HeapMemory();
3048    if (capabilityHeap == NULL) {
3049        ALOGE("%s: creation of capabilityHeap failed", __func__);
3050        goto heap_creation_failed;
3051    }
3052    /* Allocate memory for capability buffer */
3053    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
3054    if(rc != OK) {
3055        ALOGE("%s: No memory for cappability", __func__);
3056        goto allocate_failed;
3057    }
3058
3059    /* Map memory for capability buffer */
3060    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
3061    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
3062                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
3063                                capabilityHeap->getFd(0),
3064                                sizeof(cam_capability_t));
3065    if(rc < 0) {
3066        ALOGE("%s: failed to map capability buffer", __func__);
3067        goto map_failed;
3068    }
3069
3070    /* Query Capability */
3071    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
3072    if(rc < 0) {
3073        ALOGE("%s: failed to query capability",__func__);
3074        goto query_failed;
3075    }
3076    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
3077    if (!gCamCapability[cameraId]) {
3078        ALOGE("%s: out of memory", __func__);
3079        goto query_failed;
3080    }
3081    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
3082                                        sizeof(cam_capability_t));
3083    rc = 0;
3084
3085query_failed:
3086    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
3087                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
3088map_failed:
3089    capabilityHeap->deallocate();
3090allocate_failed:
3091    delete capabilityHeap;
3092heap_creation_failed:
3093    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
3094    cameraHandle = NULL;
3095open_failed:
3096    return rc;
3097}
3098
3099/*===========================================================================
3100 * FUNCTION   : initParameters
3101 *
3102 * DESCRIPTION: initialize camera parameters
3103 *
3104 * PARAMETERS :
3105 *
3106 * RETURN     : int32_t type of status
3107 *              NO_ERROR  -- success
3108 *              none-zero failure code
3109 *==========================================================================*/
3110int QCamera3HardwareInterface::initParameters()
3111{
3112    int rc = 0;
3113
3114    //Allocate Set Param Buffer
3115    mParamHeap = new QCamera3HeapMemory();
3116    rc = mParamHeap->allocate(1, sizeof(metadata_buffer_t), false);
3117    if(rc != OK) {
3118        rc = NO_MEMORY;
3119        ALOGE("Failed to allocate SETPARM Heap memory");
3120        delete mParamHeap;
3121        mParamHeap = NULL;
3122        return rc;
3123    }
3124
3125    //Map memory for parameters buffer
3126    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
3127            CAM_MAPPING_BUF_TYPE_PARM_BUF,
3128            mParamHeap->getFd(0),
3129            sizeof(metadata_buffer_t));
3130    if(rc < 0) {
3131        ALOGE("%s:failed to map SETPARM buffer",__func__);
3132        rc = FAILED_TRANSACTION;
3133        mParamHeap->deallocate();
3134        delete mParamHeap;
3135        mParamHeap = NULL;
3136        return rc;
3137    }
3138
3139    mParameters = (metadata_buffer_t*) DATA_PTR(mParamHeap,0);
3140    return rc;
3141}
3142
3143/*===========================================================================
3144 * FUNCTION   : deinitParameters
3145 *
3146 * DESCRIPTION: de-initialize camera parameters
3147 *
3148 * PARAMETERS :
3149 *
3150 * RETURN     : NONE
3151 *==========================================================================*/
3152void QCamera3HardwareInterface::deinitParameters()
3153{
3154    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
3155            CAM_MAPPING_BUF_TYPE_PARM_BUF);
3156
3157    mParamHeap->deallocate();
3158    delete mParamHeap;
3159    mParamHeap = NULL;
3160
3161    mParameters = NULL;
3162}
3163
3164/*===========================================================================
3165 * FUNCTION   : calcMaxJpegSize
3166 *
3167 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
3168 *
3169 * PARAMETERS :
3170 *
3171 * RETURN     : max_jpeg_size
3172 *==========================================================================*/
3173int QCamera3HardwareInterface::calcMaxJpegSize()
3174{
3175    int32_t max_jpeg_size = 0;
3176    int temp_width, temp_height;
3177    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
3178        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
3179        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
3180        if (temp_width * temp_height > max_jpeg_size ) {
3181            max_jpeg_size = temp_width * temp_height;
3182        }
3183    }
3184    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
3185    return max_jpeg_size;
3186}
3187
3188/*===========================================================================
3189 * FUNCTION   : initStaticMetadata
3190 *
3191 * DESCRIPTION: initialize the static metadata
3192 *
3193 * PARAMETERS :
3194 *   @cameraId  : camera Id
3195 *
3196 * RETURN     : int32_t type of status
3197 *              0  -- success
3198 *              non-zero failure code
3199 *==========================================================================*/
3200int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
3201{
3202    int rc = 0;
3203    CameraMetadata staticInfo;
3204
3205    /* android.info: hardware level */
3206    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
3207    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
3208        &supportedHardwareLevel, 1);
3209
3210    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
3211    /*HAL 3 only*/
3212    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
3213                    &gCamCapability[cameraId]->min_focus_distance, 1);
3214
3215    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
3216                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
3217
3218    /*should be using focal lengths but sensor doesn't provide that info now*/
3219    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
3220                      &gCamCapability[cameraId]->focal_length,
3221                      1);
3222
3223    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
3224                      gCamCapability[cameraId]->apertures,
3225                      gCamCapability[cameraId]->apertures_count);
3226
3227    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
3228                gCamCapability[cameraId]->filter_densities,
3229                gCamCapability[cameraId]->filter_densities_count);
3230
3231
3232    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
3233                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
3234                      gCamCapability[cameraId]->optical_stab_modes_count);
3235
3236    staticInfo.update(ANDROID_LENS_POSITION,
3237                      gCamCapability[cameraId]->lens_position,
3238                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
3239
3240    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
3241                                       gCamCapability[cameraId]->lens_shading_map_size.height};
3242    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
3243                      lens_shading_map_size,
3244                      sizeof(lens_shading_map_size)/sizeof(int32_t));
3245
3246    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
3247            gCamCapability[cameraId]->sensor_physical_size, 2);
3248
3249    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
3250            gCamCapability[cameraId]->exposure_time_range, 2);
3251
3252    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
3253            &gCamCapability[cameraId]->max_frame_duration, 1);
3254
3255    camera_metadata_rational baseGainFactor = {
3256            gCamCapability[cameraId]->base_gain_factor.numerator,
3257            gCamCapability[cameraId]->base_gain_factor.denominator};
3258    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
3259                      &baseGainFactor, 1);
3260
3261    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
3262                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
3263
3264    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
3265                                  gCamCapability[cameraId]->pixel_array_size.height};
3266    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
3267                      pixel_array_size, 2);
3268
3269    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
3270                                                gCamCapability[cameraId]->active_array_size.top,
3271                                                gCamCapability[cameraId]->active_array_size.width,
3272                                                gCamCapability[cameraId]->active_array_size.height};
3273    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
3274                      active_array_size, 4);
3275
3276    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
3277            &gCamCapability[cameraId]->white_level, 1);
3278
3279    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
3280            gCamCapability[cameraId]->black_level_pattern, 4);
3281
3282    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
3283                      &gCamCapability[cameraId]->flash_charge_duration, 1);
3284
3285    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
3286                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
3287
3288    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
3289    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
3290                      (int32_t*)&maxFaces, 1);
3291
3292    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
3293                      &gCamCapability[cameraId]->histogram_size, 1);
3294
3295    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
3296            &gCamCapability[cameraId]->max_histogram_count, 1);
3297
3298    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
3299                                    gCamCapability[cameraId]->sharpness_map_size.height};
3300
3301    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
3302            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
3303
3304    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
3305            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
3306
3307    int32_t scalar_formats[] = {
3308            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
3309            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
3310            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
3311            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
3312            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
3313    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
3314    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
3315                      scalar_formats,
3316                      scalar_formats_count);
3317
3318    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
3319    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
3320              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
3321              available_processed_sizes);
3322    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
3323                available_processed_sizes,
3324                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
3325
3326    int32_t available_raw_sizes[CAM_FORMAT_MAX * 2];
3327    makeTable(gCamCapability[cameraId]->raw_dim,
3328              gCamCapability[cameraId]->supported_raw_dim_cnt,
3329              available_raw_sizes);
3330    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
3331                available_raw_sizes,
3332                gCamCapability[cameraId]->supported_raw_dim_cnt * 2);
3333
3334    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
3335    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
3336                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
3337                 available_fps_ranges);
3338    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
3339            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
3340
3341    camera_metadata_rational exposureCompensationStep = {
3342            gCamCapability[cameraId]->exp_compensation_step.numerator,
3343            gCamCapability[cameraId]->exp_compensation_step.denominator};
3344    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
3345                      &exposureCompensationStep, 1);
3346
3347    /*TO DO*/
3348    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
3349    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
3350                      availableVstabModes, sizeof(availableVstabModes));
3351
3352    /** Quirk for urgent 3A state until final interface is worked out */
3353    uint8_t usePartialResultQuirk = 1;
3354    staticInfo.update(ANDROID_QUIRKS_USE_PARTIAL_RESULT,
3355                      &usePartialResultQuirk, 1);
3356
3357    /*HAL 1 and HAL 3 common*/
3358    float maxZoom = 4;
3359    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
3360            &maxZoom, 1);
3361
3362    int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
3363    if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
3364        max3aRegions[2] = 0; /* AF not supported */
3365    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
3366            max3aRegions, 3);
3367
3368    uint8_t availableFaceDetectModes[] = {
3369            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
3370            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
3371    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
3372                      availableFaceDetectModes,
3373                      sizeof(availableFaceDetectModes));
3374
3375    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
3376                                           gCamCapability[cameraId]->exposure_compensation_max};
3377    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
3378            exposureCompensationRange,
3379            sizeof(exposureCompensationRange)/sizeof(int32_t));
3380
3381    uint8_t lensFacing = (facingBack) ?
3382            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
3383    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
3384
3385    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
3386                available_processed_sizes,
3387                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
3388
3389    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
3390                      available_thumbnail_sizes,
3391                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
3392
3393    /*android.scaler.availableStreamConfigurations*/
3394    int32_t max_stream_configs_size =
3395            gCamCapability[cameraId]->picture_sizes_tbl_cnt *
3396            sizeof(scalar_formats)/sizeof(int32_t) * 4;
3397    int32_t available_stream_configs[max_stream_configs_size];
3398    int idx = 0;
3399    for (int j = 0; j < scalar_formats_count; j++) {
3400        switch (scalar_formats[j]) {
3401        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
3402        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
3403            for (int i = 0;
3404                i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
3405                available_stream_configs[idx] = scalar_formats[j];
3406                available_stream_configs[idx+1] =
3407                    gCamCapability[cameraId]->raw_dim[i].width;
3408                available_stream_configs[idx+2] =
3409                    gCamCapability[cameraId]->raw_dim[i].height;
3410                available_stream_configs[idx+3] =
3411                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
3412                idx+=4;
3413            }
3414            break;
3415        default:
3416            for (int i = 0;
3417                i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3418                available_stream_configs[idx] = scalar_formats[j];
3419                available_stream_configs[idx+1] =
3420                    gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3421                available_stream_configs[idx+2] =
3422                    gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3423                available_stream_configs[idx+3] =
3424                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
3425                idx+=4;
3426            }
3427
3428
3429            break;
3430        }
3431    }
3432    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
3433                      available_stream_configs, idx);
3434
3435    /* android.scaler.availableMinFrameDurations */
3436    int64_t available_min_durations[max_stream_configs_size];
3437    idx = 0;
3438    for (int j = 0; j < scalar_formats_count; j++) {
3439        switch (scalar_formats[j]) {
3440        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
3441        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
3442            for (int i = 0;
3443                i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
3444                available_min_durations[idx] = scalar_formats[j];
3445                available_min_durations[idx+1] =
3446                    gCamCapability[cameraId]->raw_dim[i].width;
3447                available_min_durations[idx+2] =
3448                    gCamCapability[cameraId]->raw_dim[i].height;
3449                available_min_durations[idx+3] =
3450                    gCamCapability[cameraId]->raw_min_duration[i];
3451                idx+=4;
3452            }
3453            break;
3454        default:
3455            for (int i = 0;
3456                i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3457                available_min_durations[idx] = scalar_formats[j];
3458                available_min_durations[idx+1] =
3459                    gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3460                available_min_durations[idx+2] =
3461                    gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3462                available_min_durations[idx+3] =
3463                    gCamCapability[cameraId]->picture_min_duration[i];
3464                idx+=4;
3465            }
3466            break;
3467        }
3468    }
3469    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
3470                      &available_min_durations[0], idx);
3471
3472    int32_t max_jpeg_size = 0;
3473    int temp_width, temp_height;
3474    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3475        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3476        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3477        if (temp_width * temp_height > max_jpeg_size ) {
3478            max_jpeg_size = temp_width * temp_height;
3479        }
3480    }
3481    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
3482    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
3483                      &max_jpeg_size, 1);
3484
3485    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
3486    size_t size = 0;
3487    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
3488        int32_t val = lookupFwkName(EFFECT_MODES_MAP,
3489                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
3490                                   gCamCapability[cameraId]->supported_effects[i]);
3491        if (val != NAME_NOT_FOUND) {
3492            avail_effects[size] = (uint8_t)val;
3493            size++;
3494        }
3495    }
3496    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
3497                      avail_effects,
3498                      size);
3499
3500    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
3501    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
3502    int32_t supported_scene_modes_cnt = 0;
3503    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
3504        int32_t val = lookupFwkName(SCENE_MODES_MAP,
3505                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3506                                gCamCapability[cameraId]->supported_scene_modes[i]);
3507        if (val != NAME_NOT_FOUND) {
3508            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
3509            supported_indexes[supported_scene_modes_cnt] = i;
3510            supported_scene_modes_cnt++;
3511        }
3512    }
3513
3514    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
3515                      avail_scene_modes,
3516                      supported_scene_modes_cnt);
3517
3518    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
3519    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
3520                      supported_scene_modes_cnt,
3521                      scene_mode_overrides,
3522                      supported_indexes,
3523                      cameraId);
3524    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
3525                      scene_mode_overrides,
3526                      supported_scene_modes_cnt*3);
3527
3528    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
3529    size = 0;
3530    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
3531        int32_t val = lookupFwkName(ANTIBANDING_MODES_MAP,
3532                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
3533                                 gCamCapability[cameraId]->supported_antibandings[i]);
3534        if (val != NAME_NOT_FOUND) {
3535            avail_antibanding_modes[size] = (uint8_t)val;
3536            size++;
3537        }
3538
3539    }
3540    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
3541                      avail_antibanding_modes,
3542                      size);
3543
3544    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
3545    size = 0;
3546    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
3547        int32_t val = lookupFwkName(FOCUS_MODES_MAP,
3548                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
3549                                gCamCapability[cameraId]->supported_focus_modes[i]);
3550        if (val != NAME_NOT_FOUND) {
3551            avail_af_modes[size] = (uint8_t)val;
3552            size++;
3553        }
3554    }
3555    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
3556                      avail_af_modes,
3557                      size);
3558
3559    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
3560    size = 0;
3561    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
3562        int32_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
3563                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
3564                                    gCamCapability[cameraId]->supported_white_balances[i]);
3565        if (val != NAME_NOT_FOUND) {
3566            avail_awb_modes[size] = (uint8_t)val;
3567            size++;
3568        }
3569    }
3570    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
3571                      avail_awb_modes,
3572                      size);
3573
3574    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
3575    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
3576      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
3577
3578    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
3579            available_flash_levels,
3580            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
3581
3582    uint8_t flashAvailable;
3583    if (gCamCapability[cameraId]->flash_available)
3584        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
3585    else
3586        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
3587    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
3588            &flashAvailable, 1);
3589
3590    uint8_t avail_ae_modes[5];
3591    size = 0;
3592    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
3593        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
3594        size++;
3595    }
3596    if (flashAvailable) {
3597        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
3598        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
3599        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
3600    }
3601    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
3602                      avail_ae_modes,
3603                      size);
3604
3605    int32_t sensitivity_range[2];
3606    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
3607    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
3608    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
3609                      sensitivity_range,
3610                      sizeof(sensitivity_range) / sizeof(int32_t));
3611
3612    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
3613                      &gCamCapability[cameraId]->max_analog_sensitivity,
3614                      1);
3615
3616    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
3617    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
3618                      &sensor_orientation,
3619                      1);
3620
3621    int32_t max_output_streams[3] = {1, 3, 1};
3622    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
3623                      max_output_streams,
3624                      3);
3625
3626    uint8_t avail_leds = 0;
3627    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
3628                      &avail_leds, 0);
3629
3630    uint8_t focus_dist_calibrated;
3631    int32_t val = lookupFwkName(FOCUS_CALIBRATION_MAP,
3632            sizeof(FOCUS_CALIBRATION_MAP)/sizeof(FOCUS_CALIBRATION_MAP[0]),
3633            gCamCapability[cameraId]->focus_dist_calibrated);
3634    if (val != NAME_NOT_FOUND) {
3635        focus_dist_calibrated = (uint8_t)val;
3636        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
3637                     &focus_dist_calibrated, 1);
3638    }
3639
3640    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
3641    size = 0;
3642    for (int i = 0; i < gCamCapability[cameraId]->supported_test_pattern_modes_cnt;
3643            i++) {
3644        int32_t val = lookupFwkName(TEST_PATTERN_MAP,
3645                                    sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
3646                                    gCamCapability[cameraId]->supported_test_pattern_modes[i]);
3647        if (val != NAME_NOT_FOUND) {
3648            avail_testpattern_modes[size] = val;
3649            size++;
3650        }
3651    }
3652    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
3653                      avail_testpattern_modes,
3654                      size);
3655
3656    uint8_t max_pipeline_depth = kMaxInFlight + EMPTY_PIPELINE_DELAY;
3657    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
3658                      &max_pipeline_depth,
3659                      1);
3660
3661    int32_t partial_result_count = 2;
3662    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
3663                      &partial_result_count,
3664                       1);
3665
3666    uint8_t available_capabilities[] =
3667        {ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
3668         ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR,
3669         ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING};
3670    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3671                      available_capabilities,
3672                      3);
3673
3674    int32_t max_input_streams = 0;
3675    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
3676                      &max_input_streams,
3677                      1);
3678
3679    int32_t io_format_map[] = {};
3680    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
3681                      io_format_map, 0);
3682
3683    int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
3684    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
3685                      &max_latency,
3686                      1);
3687
3688    float optical_axis_angle[2];
3689    optical_axis_angle[0] = 0; //need to verify
3690    optical_axis_angle[1] = 0; //need to verify
3691    staticInfo.update(ANDROID_LENS_OPTICAL_AXIS_ANGLE,
3692                      optical_axis_angle,
3693                      2);
3694
3695    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST};
3696    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
3697                      available_hot_pixel_modes,
3698                      1);
3699
3700    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
3701                                      ANDROID_EDGE_MODE_FAST};
3702    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
3703                      available_edge_modes,
3704                      2);
3705
3706    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
3707                                           ANDROID_NOISE_REDUCTION_MODE_FAST};
3708    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
3709                      available_noise_red_modes,
3710                      2);
3711
3712    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
3713                                         ANDROID_TONEMAP_MODE_FAST};
3714    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
3715                      available_tonemap_modes,
3716                      2);
3717
3718    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
3719    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
3720                      available_hot_pixel_map_modes,
3721                      1);
3722
3723    uint8_t fwkReferenceIlluminant = lookupFwkName(REFERENCE_ILLUMINANT_MAP,
3724        sizeof(REFERENCE_ILLUMINANT_MAP) / sizeof(REFERENCE_ILLUMINANT_MAP[0]),
3725        gCamCapability[cameraId]->reference_illuminant1);
3726    staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1,
3727                      &fwkReferenceIlluminant, 1);
3728
3729    fwkReferenceIlluminant = lookupFwkName(REFERENCE_ILLUMINANT_MAP,
3730        sizeof(REFERENCE_ILLUMINANT_MAP) / sizeof(REFERENCE_ILLUMINANT_MAP[0]),
3731        gCamCapability[cameraId]->reference_illuminant2);
3732    staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
3733                      &fwkReferenceIlluminant, 1);
3734
3735    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1,
3736                      (camera_metadata_rational_t*)gCamCapability[cameraId]->forward_matrix1,
3737                      3*3);
3738
3739    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2,
3740                      (camera_metadata_rational_t*)gCamCapability[cameraId]->forward_matrix2,
3741                      3*3);
3742
3743    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1,
3744                   (camera_metadata_rational_t*) gCamCapability[cameraId]->color_transform1,
3745                      3*3);
3746
3747    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2,
3748                   (camera_metadata_rational_t*) gCamCapability[cameraId]->color_transform2,
3749                      3*3);
3750
3751    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
3752                   (camera_metadata_rational_t*) gCamCapability[cameraId]->calibration_transform1,
3753                      3*3);
3754
3755    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2,
3756                   (camera_metadata_rational_t*) gCamCapability[cameraId]->calibration_transform2,
3757                      3*3);
3758
3759
3760    int32_t available_request_keys[] = {ANDROID_COLOR_CORRECTION_MODE,
3761       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
3762       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
3763       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
3764       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
3765       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
3766       ANDROID_CONTROL_AF_REGIONS, ANDROID_CONTROL_AF_TRIGGER,
3767       ANDROID_CONTROL_AWB_LOCK, ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_AWB_REGIONS,
3768       ANDROID_CONTROL_CAPTURE_INTENT, ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
3769       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
3770       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE, ANDROID_EDGE_STRENGTH,
3771       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
3772       ANDROID_JPEG_GPS_COORDINATES,
3773       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
3774       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
3775       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
3776       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
3777       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
3778       ANDROID_NOISE_REDUCTION_STRENGTH, ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
3779       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
3780       ANDROID_SENSOR_FRAME_DURATION,
3781       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
3782       ANDROID_SHADING_STRENGTH, ANDROID_STATISTICS_FACE_DETECT_MODE,
3783       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
3784       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
3785       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
3786       ANDROID_BLACK_LEVEL_LOCK };
3787    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
3788                      available_request_keys,
3789                      sizeof(available_request_keys)/sizeof(int32_t));
3790
3791    int32_t available_result_keys[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
3792       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
3793       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE, ANDROID_CONTROL_AF_REGIONS,
3794       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_AWB_REGIONS,
3795       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
3796       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
3797       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
3798       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
3799       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
3800       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
3801       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
3802       ANDROID_NOISE_REDUCTION_MODE, ANDROID_QUIRKS_PARTIAL_RESULT, ANDROID_REQUEST_ID,
3803       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
3804       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
3805       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
3806       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
3807       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
3808       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
3809       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
3810       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
3811       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_IDS,
3812       ANDROID_STATISTICS_FACE_LANDMARKS, ANDROID_STATISTICS_FACE_RECTANGLES,
3813       ANDROID_STATISTICS_FACE_SCORES};
3814    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
3815                      available_result_keys,
3816                      sizeof(available_result_keys)/sizeof(int32_t));
3817
3818    int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
3819       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
3820       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
3821       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
3822       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
3823       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
3824       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
3825       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
3826       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
3827       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
3828       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
3829       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
3830       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
3831       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
3832       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
3833       ANDROID_LENS_FACING, ANDROID_LENS_OPTICAL_AXIS_ANGLE,ANDROID_LENS_POSITION,
3834       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
3835       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3836       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
3837       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
3838       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
3839       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
3840       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
3841       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
3842       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
3843       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
3844       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
3845       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
3846       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
3847       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
3848       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
3849       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
3850       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
3851       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
3852       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
3853       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
3854       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
3855       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
3856       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
3857       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
3858       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
3859       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
3860       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
3861       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
3862       ANDROID_TONEMAP_MAX_CURVE_POINTS, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
3863    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
3864                      available_characteristics_keys,
3865                      sizeof(available_characteristics_keys)/sizeof(int32_t));
3866
3867    /*available stall durations depend on the hw + sw and will be different for different devices */
3868    /*have to add for raw after implementation*/
3869    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
3870    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
3871
3872    size_t available_stall_size = gCamCapability[cameraId]->picture_sizes_tbl_cnt * 4;
3873    int64_t available_stall_durations[available_stall_size];
3874    idx = 0;
3875    for (uint32_t j = 0; j < stall_formats_count; j++) {
3876       if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
3877          for (uint32_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3878             available_stall_durations[idx]   = stall_formats[j];
3879             available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3880             available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3881             available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
3882             idx+=4;
3883          }
3884       } else {
3885          for (uint32_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
3886             available_stall_durations[idx]   = stall_formats[j];
3887             available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
3888             available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
3889             available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
3890             idx+=4;
3891          }
3892       }
3893    }
3894    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
3895                      available_stall_durations,
3896                      idx);
3897    //QCAMERA3_OPAQUE_RAW
3898    uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
3899    cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
3900    switch (gCamCapability[cameraId]->opaque_raw_fmt) {
3901    case LEGACY_RAW:
3902        if (gCamCapability[cameraId]->white_level == (1<<8)-1)
3903            fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
3904        else if (gCamCapability[cameraId]->white_level == (1<<10)-1)
3905            fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
3906        else if (gCamCapability[cameraId]->white_level == (1<<12)-1)
3907            fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
3908        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
3909        break;
3910    case MIPI_RAW:
3911        if (gCamCapability[cameraId]->white_level == (1<<8)-1)
3912            fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
3913        else if (gCamCapability[cameraId]->white_level == (1<<10)-1)
3914            fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
3915        else if (gCamCapability[cameraId]->white_level == (1<<12)-1)
3916            fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
3917        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
3918        break;
3919    default:
3920        ALOGE("%s: unknown opaque_raw_format %d", __func__,
3921                gCamCapability[cameraId]->opaque_raw_fmt);
3922        break;
3923    }
3924    staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
3925
3926    int32_t strides[3*gCamCapability[cameraId]->supported_raw_dim_cnt];
3927    for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
3928        cam_stream_buf_plane_info_t buf_planes;
3929        strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
3930        strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
3931        mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
3932            &gCamCapability[cameraId]->padding_info, &buf_planes);
3933        strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
3934    }
3935    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
3936            3*gCamCapability[cameraId]->supported_raw_dim_cnt);
3937
3938    gStaticMetadata[cameraId] = staticInfo.release();
3939    return rc;
3940}
3941
3942/*===========================================================================
3943 * FUNCTION   : makeTable
3944 *
3945 * DESCRIPTION: make a table of sizes
3946 *
3947 * PARAMETERS :
3948 *
3949 *
3950 *==========================================================================*/
3951void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
3952                                          int32_t* sizeTable)
3953{
3954    int j = 0;
3955    for (int i = 0; i < size; i++) {
3956        sizeTable[j] = dimTable[i].width;
3957        sizeTable[j+1] = dimTable[i].height;
3958        j+=2;
3959    }
3960}
3961
3962/*===========================================================================
3963 * FUNCTION   : makeFPSTable
3964 *
3965 * DESCRIPTION: make a table of fps ranges
3966 *
3967 * PARAMETERS :
3968 *
3969 *==========================================================================*/
3970void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
3971                                          int32_t* fpsRangesTable)
3972{
3973    int j = 0;
3974    for (int i = 0; i < size; i++) {
3975        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
3976        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
3977        j+=2;
3978    }
3979}
3980
3981/*===========================================================================
3982 * FUNCTION   : makeOverridesList
3983 *
3984 * DESCRIPTION: make a list of scene mode overrides
3985 *
3986 * PARAMETERS :
3987 *
3988 *
3989 *==========================================================================*/
3990void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
3991                                                  uint8_t size, uint8_t* overridesList,
3992                                                  uint8_t* supported_indexes,
3993                                                  int camera_id)
3994{
3995    /*daemon will give a list of overrides for all scene modes.
3996      However we should send the fwk only the overrides for the scene modes
3997      supported by the framework*/
3998    int j = 0, index = 0, supt = 0;
3999    uint8_t focus_override;
4000    for (int i = 0; i < size; i++) {
4001        supt = 0;
4002        index = supported_indexes[i];
4003        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
4004        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
4005                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
4006                                                    overridesTable[index].awb_mode);
4007        focus_override = (uint8_t)overridesTable[index].af_mode;
4008        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
4009           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
4010              supt = 1;
4011              break;
4012           }
4013        }
4014        if (supt) {
4015           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
4016                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
4017                                              focus_override);
4018        } else {
4019           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
4020        }
4021        j+=3;
4022    }
4023}
4024
4025/*===========================================================================
4026 * FUNCTION   : getPreviewHalPixelFormat
4027 *
4028 * DESCRIPTION: convert the format to type recognized by framework
4029 *
4030 * PARAMETERS : format : the format from backend
4031 *
4032 ** RETURN    : format recognized by framework
4033 *
4034 *==========================================================================*/
4035int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
4036{
4037    int32_t halPixelFormat;
4038
4039    switch (format) {
4040    case CAM_FORMAT_YUV_420_NV12:
4041        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
4042        break;
4043    case CAM_FORMAT_YUV_420_NV21:
4044        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
4045        break;
4046    case CAM_FORMAT_YUV_420_NV21_ADRENO:
4047        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
4048        break;
4049    case CAM_FORMAT_YUV_420_YV12:
4050        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
4051        break;
4052    case CAM_FORMAT_YUV_422_NV16:
4053    case CAM_FORMAT_YUV_422_NV61:
4054    default:
4055        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
4056        break;
4057    }
4058    return halPixelFormat;
4059}
4060
4061/*===========================================================================
4062 * FUNCTION   : getSensorSensitivity
4063 *
4064 * DESCRIPTION: convert iso_mode to an integer value
4065 *
4066 * PARAMETERS : iso_mode : the iso_mode supported by sensor
4067 *
4068 ** RETURN    : sensitivity supported by sensor
4069 *
4070 *==========================================================================*/
4071int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
4072{
4073    int32_t sensitivity;
4074
4075    switch (iso_mode) {
4076    case CAM_ISO_MODE_100:
4077        sensitivity = 100;
4078        break;
4079    case CAM_ISO_MODE_200:
4080        sensitivity = 200;
4081        break;
4082    case CAM_ISO_MODE_400:
4083        sensitivity = 400;
4084        break;
4085    case CAM_ISO_MODE_800:
4086        sensitivity = 800;
4087        break;
4088    case CAM_ISO_MODE_1600:
4089        sensitivity = 1600;
4090        break;
4091    default:
4092        sensitivity = -1;
4093        break;
4094    }
4095    return sensitivity;
4096}
4097
4098/*===========================================================================
4099 * FUNCTION   : AddSetMetaEntryToBatch
4100 *
4101 * DESCRIPTION: add set parameter entry into batch
4102 *
4103 * PARAMETERS :
4104 *   @p_table     : ptr to parameter buffer
4105 *   @paramType   : parameter type
4106 *   @paramLength : length of parameter value
4107 *   @paramValue  : ptr to parameter value
4108 *
4109 * RETURN     : int32_t type of status
4110 *              NO_ERROR  -- success
4111 *              none-zero failure code
4112 *==========================================================================*/
4113int32_t QCamera3HardwareInterface::AddSetMetaEntryToBatch(metadata_buffer_t *p_table,
4114                                                          unsigned int paramType,
4115                                                          uint32_t paramLength,
4116                                                          void *paramValue)
4117{
4118    int position = paramType;
4119    int current, next;
4120
4121    /*************************************************************************
4122    *                 Code to take care of linking next flags                *
4123    *************************************************************************/
4124    current = GET_FIRST_PARAM_ID(p_table);
4125    if (position == current){
4126        //DO NOTHING
4127    } else if (position < current){
4128        SET_NEXT_PARAM_ID(position, p_table, current);
4129        SET_FIRST_PARAM_ID(p_table, position);
4130    } else {
4131        /* Search for the position in the linked list where we need to slot in*/
4132        while (position > GET_NEXT_PARAM_ID(current, p_table))
4133            current = GET_NEXT_PARAM_ID(current, p_table);
4134
4135        /*If node already exists no need to alter linking*/
4136        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
4137            next = GET_NEXT_PARAM_ID(current, p_table);
4138            SET_NEXT_PARAM_ID(current, p_table, position);
4139            SET_NEXT_PARAM_ID(position, p_table, next);
4140        }
4141    }
4142
4143    /*************************************************************************
4144    *                   Copy contents into entry                             *
4145    *************************************************************************/
4146
4147    if (paramLength > sizeof(parm_type_t)) {
4148        ALOGE("%s:Size of input larger than max entry size",__func__);
4149        return BAD_VALUE;
4150    }
4151    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
4152    SET_PARM_VALID_BIT(paramType,p_table,1);
4153    return NO_ERROR;
4154}
4155
4156/*===========================================================================
4157 * FUNCTION   : lookupFwkName
4158 *
4159 * DESCRIPTION: In case the enum is not same in fwk and backend
4160 *              make sure the parameter is correctly propogated
4161 *
4162 * PARAMETERS  :
4163 *   @arr      : map between the two enums
4164 *   @len      : len of the map
4165 *   @hal_name : name of the hal_parm to map
4166 *
4167 * RETURN     : int type of status
4168 *              fwk_name  -- success
4169 *              none-zero failure code
4170 *==========================================================================*/
4171int32_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
4172                                             int len, int hal_name)
4173{
4174
4175    for (int i = 0; i < len; i++) {
4176        if (arr[i].hal_name == hal_name)
4177            return arr[i].fwk_name;
4178    }
4179
4180    /* Not able to find matching framework type is not necessarily
4181     * an error case. This happens when mm-camera supports more attributes
4182     * than the frameworks do */
4183    ALOGD("%s: Cannot find matching framework type", __func__);
4184    return NAME_NOT_FOUND;
4185}
4186
4187/*===========================================================================
4188 * FUNCTION   : lookupHalName
4189 *
4190 * DESCRIPTION: In case the enum is not same in fwk and backend
4191 *              make sure the parameter is correctly propogated
4192 *
4193 * PARAMETERS  :
4194 *   @arr      : map between the two enums
4195 *   @len      : len of the map
4196 *   @fwk_name : name of the hal_parm to map
4197 *
4198 * RETURN     : int32_t type of status
4199 *              hal_name  -- success
4200 *              none-zero failure code
4201 *==========================================================================*/
4202int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
4203                                             int len, unsigned int fwk_name)
4204{
4205    for (int i = 0; i < len; i++) {
4206       if (arr[i].fwk_name == fwk_name)
4207           return arr[i].hal_name;
4208    }
4209    ALOGE("%s: Cannot find matching hal type", __func__);
4210    return NAME_NOT_FOUND;
4211}
4212
4213/*===========================================================================
4214 * FUNCTION   : getCapabilities
4215 *
4216 * DESCRIPTION: query camera capabilities
4217 *
4218 * PARAMETERS :
4219 *   @cameraId  : camera Id
4220 *   @info      : camera info struct to be filled in with camera capabilities
4221 *
4222 * RETURN     : int32_t type of status
4223 *              NO_ERROR  -- success
4224 *              none-zero failure code
4225 *==========================================================================*/
4226int QCamera3HardwareInterface::getCamInfo(int cameraId,
4227                                    struct camera_info *info)
4228{
4229    int rc = 0;
4230
4231    if (NULL == gCamCapability[cameraId]) {
4232        rc = initCapabilities(cameraId);
4233        if (rc < 0) {
4234            //pthread_mutex_unlock(&g_camlock);
4235            return rc;
4236        }
4237    }
4238
4239    if (NULL == gStaticMetadata[cameraId]) {
4240        rc = initStaticMetadata(cameraId);
4241        if (rc < 0) {
4242            return rc;
4243        }
4244    }
4245
4246    switch(gCamCapability[cameraId]->position) {
4247    case CAM_POSITION_BACK:
4248        info->facing = CAMERA_FACING_BACK;
4249        break;
4250
4251    case CAM_POSITION_FRONT:
4252        info->facing = CAMERA_FACING_FRONT;
4253        break;
4254
4255    default:
4256        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
4257        rc = -1;
4258        break;
4259    }
4260
4261
4262    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
4263    info->device_version = CAMERA_DEVICE_API_VERSION_3_2;
4264    info->static_camera_characteristics = gStaticMetadata[cameraId];
4265
4266    return rc;
4267}
4268
4269/*===========================================================================
4270 * FUNCTION   : translateCapabilityToMetadata
4271 *
4272 * DESCRIPTION: translate the capability into camera_metadata_t
4273 *
4274 * PARAMETERS : type of the request
4275 *
4276 *
4277 * RETURN     : success: camera_metadata_t*
4278 *              failure: NULL
4279 *
4280 *==========================================================================*/
4281camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
4282{
4283    pthread_mutex_lock(&mMutex);
4284
4285    if (mDefaultMetadata[type] != NULL) {
4286        pthread_mutex_unlock(&mMutex);
4287        return mDefaultMetadata[type];
4288    }
4289    //first time we are handling this request
4290    //fill up the metadata structure using the wrapper class
4291    CameraMetadata settings;
4292    //translate from cam_capability_t to camera_metadata_tag_t
4293    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
4294    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
4295    int32_t defaultRequestID = 0;
4296    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
4297
4298    uint8_t controlIntent = 0;
4299    uint8_t focusMode;
4300    switch (type) {
4301      case CAMERA3_TEMPLATE_PREVIEW:
4302        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
4303        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
4304        break;
4305      case CAMERA3_TEMPLATE_STILL_CAPTURE:
4306        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
4307        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
4308        break;
4309      case CAMERA3_TEMPLATE_VIDEO_RECORD:
4310        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
4311        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
4312        break;
4313      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
4314        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
4315        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
4316        break;
4317      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
4318        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
4319        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
4320        break;
4321      case CAMERA3_TEMPLATE_MANUAL:
4322        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
4323        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
4324        break;
4325      default:
4326        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
4327        break;
4328    }
4329    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
4330
4331    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
4332        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
4333    }
4334    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
4335
4336    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
4337            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
4338
4339    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
4340    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4341
4342    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
4343    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
4344
4345    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
4346    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
4347
4348    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
4349    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
4350
4351    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
4352    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
4353
4354    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
4355    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
4356
4357    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
4358    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
4359
4360    /*flash*/
4361    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
4362    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
4363
4364    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
4365    settings.update(ANDROID_FLASH_FIRING_POWER,
4366            &flashFiringLevel, 1);
4367
4368    /* lens */
4369    float default_aperture = gCamCapability[mCameraId]->apertures[0];
4370    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
4371
4372    if (gCamCapability[mCameraId]->filter_densities_count) {
4373        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
4374        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
4375                        gCamCapability[mCameraId]->filter_densities_count);
4376    }
4377
4378    float default_focal_length = gCamCapability[mCameraId]->focal_length;
4379    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
4380
4381    float default_focus_distance = 0;
4382    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
4383
4384    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
4385    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
4386
4387    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
4388    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
4389
4390    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
4391    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
4392
4393    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_FULL;
4394    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
4395
4396    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
4397    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
4398
4399    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
4400    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
4401
4402    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
4403    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
4404
4405    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
4406    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
4407
4408    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
4409    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
4410
4411    /* Exposure time(Update the Min Exposure Time)*/
4412    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
4413    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
4414
4415    /* frame duration */
4416    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
4417    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
4418
4419    /* sensitivity */
4420    static const int32_t default_sensitivity = 100;
4421    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
4422
4423    /*edge mode*/
4424    static const uint8_t edge_mode = ANDROID_EDGE_MODE_FAST;
4425    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
4426
4427    /*noise reduction mode*/
4428    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
4429    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
4430
4431    /*color correction mode*/
4432    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
4433    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
4434
4435    /*transform matrix mode*/
4436    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
4437    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
4438
4439    uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
4440    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
4441
4442    int32_t scaler_crop_region[4];
4443    scaler_crop_region[0] = 0;
4444    scaler_crop_region[1] = 0;
4445    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
4446    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
4447    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
4448
4449    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
4450    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
4451
4452    static const uint8_t vs_mode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
4453    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vs_mode, 1);
4454
4455    uint8_t opt_stab_mode = (gCamCapability[mCameraId]->optical_stab_modes_count == 2)?
4456                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON :
4457                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
4458    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &opt_stab_mode, 1);
4459
4460    /*focus distance*/
4461    float focus_distance = 0.0;
4462    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
4463
4464    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
4465    float max_range = 0.0;
4466    float max_fixed_fps = 0.0;
4467    int32_t fps_range[2] = {0, 0};
4468    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
4469            i++) {
4470        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
4471            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
4472        if (type == CAMERA3_TEMPLATE_PREVIEW ||
4473                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
4474                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
4475            if (range > max_range) {
4476                fps_range[0] =
4477                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
4478                fps_range[1] =
4479                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
4480                max_range = range;
4481            }
4482        } else {
4483            if (range < 0.01 && max_fixed_fps <
4484                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
4485                fps_range[0] =
4486                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
4487                fps_range[1] =
4488                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
4489                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
4490            }
4491        }
4492    }
4493    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
4494
4495    /*precapture trigger*/
4496    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
4497    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
4498
4499    /*af trigger*/
4500    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
4501    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
4502
4503    /* ae & af regions */
4504    int32_t active_region[] = {
4505            gCamCapability[mCameraId]->active_array_size.left,
4506            gCamCapability[mCameraId]->active_array_size.top,
4507            gCamCapability[mCameraId]->active_array_size.left +
4508                    gCamCapability[mCameraId]->active_array_size.width,
4509            gCamCapability[mCameraId]->active_array_size.top +
4510                    gCamCapability[mCameraId]->active_array_size.height,
4511            1};
4512    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region, 5);
4513    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region, 5);
4514
4515    /* black level lock */
4516    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
4517    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
4518
4519    /* face detect mode */
4520    uint8_t facedetect_mode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
4521    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &facedetect_mode, 1);
4522
4523    /* lens shading map mode */
4524    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
4525    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
4526
4527    //special defaults for manual template
4528    if (type == CAMERA3_TEMPLATE_MANUAL) {
4529        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
4530        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
4531
4532        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
4533        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
4534
4535        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
4536        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
4537
4538        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
4539        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
4540
4541        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
4542        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
4543
4544        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
4545        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
4546    }
4547    mDefaultMetadata[type] = settings.release();
4548
4549    pthread_mutex_unlock(&mMutex);
4550    return mDefaultMetadata[type];
4551}
4552
4553/*===========================================================================
4554 * FUNCTION   : setFrameParameters
4555 *
4556 * DESCRIPTION: set parameters per frame as requested in the metadata from
4557 *              framework
4558 *
4559 * PARAMETERS :
4560 *   @request   : request that needs to be serviced
4561 *   @streamID : Stream ID of all the requested streams
4562 *
4563 * RETURN     : success: NO_ERROR
4564 *              failure:
4565 *==========================================================================*/
4566int QCamera3HardwareInterface::setFrameParameters(
4567                    camera3_capture_request_t *request,
4568                    cam_stream_ID_t streamID)
4569{
4570    /*translate from camera_metadata_t type to parm_type_t*/
4571    int rc = 0;
4572    int32_t hal_version = CAM_HAL_V3;
4573
4574    memset(mParameters, 0, sizeof(metadata_buffer_t));
4575    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
4576    rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
4577                sizeof(hal_version), &hal_version);
4578    if (rc < 0) {
4579        ALOGE("%s: Failed to set hal version in the parameters", __func__);
4580        return BAD_VALUE;
4581    }
4582
4583    /*we need to update the frame number in the parameters*/
4584    rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
4585                                sizeof(request->frame_number), &(request->frame_number));
4586    if (rc < 0) {
4587        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
4588        return BAD_VALUE;
4589    }
4590
4591    /* Update stream id of all the requested buffers */
4592    rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_STREAM_ID,
4593                                sizeof(cam_stream_ID_t), &streamID);
4594
4595    if (rc < 0) {
4596        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
4597        return BAD_VALUE;
4598    }
4599
4600    if(request->settings != NULL){
4601        rc = translateToHalMetadata(request, mParameters);
4602    }
4603
4604    /*set the parameters to backend*/
4605    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
4606    return rc;
4607}
4608
4609/*===========================================================================
4610 * FUNCTION   : setReprocParameters
4611 *
4612 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
4613 *              queue it to picture channel for reprocessing.
4614 *
4615 * PARAMETERS :
4616 *   @request   : request that needs to be serviced
4617 *
4618 * RETURN     : success: NO_ERROR
4619 *              failure: non zero failure code
4620 *==========================================================================*/
4621int QCamera3HardwareInterface::setReprocParameters(
4622        camera3_capture_request_t *request)
4623{
4624    /*translate from camera_metadata_t type to parm_type_t*/
4625    int rc = 0;
4626    metadata_buffer_t *reprocParam = NULL;
4627
4628    if(request->settings != NULL){
4629        ALOGE("%s: Reprocess settings cannot be NULL", __func__);
4630        return BAD_VALUE;
4631    }
4632    reprocParam = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
4633    if (!reprocParam) {
4634        ALOGE("%s: Failed to allocate reprocessing metadata buffer", __func__);
4635        return NO_MEMORY;
4636    }
4637    memset(reprocParam, 0, sizeof(metadata_buffer_t));
4638    reprocParam->first_flagged_entry = CAM_INTF_PARM_MAX;
4639
4640    /*we need to update the frame number in the parameters*/
4641    rc = AddSetMetaEntryToBatch(reprocParam, CAM_INTF_META_FRAME_NUMBER,
4642                                sizeof(request->frame_number), &(request->frame_number));
4643    if (rc < 0) {
4644        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
4645        return BAD_VALUE;
4646    }
4647
4648
4649    rc = translateToHalMetadata(request, reprocParam);
4650    if (rc < 0) {
4651        ALOGE("%s: Failed to translate reproc request", __func__);
4652        delete reprocParam;
4653        return rc;
4654    }
4655    /*queue metadata for reprocessing*/
4656    rc = mPictureChannel->queueReprocMetadata(reprocParam);
4657    if (rc < 0) {
4658        ALOGE("%s: Failed to queue reprocessing metadata", __func__);
4659        delete reprocParam;
4660    }
4661
4662    return rc;
4663}
4664
4665/*===========================================================================
4666 * FUNCTION   : translateToHalMetadata
4667 *
4668 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
4669 *
4670 *
4671 * PARAMETERS :
4672 *   @request  : request sent from framework
4673 *
4674 *
4675 * RETURN     : success: NO_ERROR
4676 *              failure:
4677 *==========================================================================*/
4678int QCamera3HardwareInterface::translateToHalMetadata
4679                                  (const camera3_capture_request_t *request,
4680                                   metadata_buffer_t *hal_metadata)
4681{
4682    int rc = 0;
4683    CameraMetadata frame_settings;
4684    frame_settings = request->settings;
4685
4686    /* Do not change the order of the following list unless you know what you are
4687     * doing.
4688     * The order is laid out in such a way that parameters in the front of the table
4689     * may be used to override the parameters later in the table. Examples are:
4690     * 1. META_MODE should precede AEC/AWB/AF MODE
4691     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
4692     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
4693     * 4. Any mode should precede it's corresponding settings
4694     */
4695    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4696        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4697        rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_MODE,
4698                sizeof(metaMode), &metaMode);
4699        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4700           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4701           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
4702                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
4703                                             fwk_sceneMode);
4704           rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
4705                sizeof(sceneMode), &sceneMode);
4706        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
4707           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
4708           rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
4709                sizeof(sceneMode), &sceneMode);
4710        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
4711           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
4712           rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
4713                sizeof(sceneMode), &sceneMode);
4714        }
4715    }
4716
4717    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
4718        uint8_t fwk_aeMode =
4719            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
4720        uint8_t aeMode;
4721        int32_t redeye;
4722
4723        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
4724            aeMode = CAM_AE_MODE_OFF;
4725        } else {
4726            aeMode = CAM_AE_MODE_ON;
4727        }
4728        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
4729            redeye = 1;
4730        } else {
4731            redeye = 0;
4732        }
4733
4734        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
4735                                          sizeof(AE_FLASH_MODE_MAP),
4736                                          fwk_aeMode);
4737        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AEC_MODE,
4738                sizeof(aeMode), &aeMode);
4739        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
4740                sizeof(flashMode), &flashMode);
4741        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION,
4742                sizeof(redeye), &redeye);
4743    }
4744
4745    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
4746        uint8_t fwk_whiteLevel =
4747            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
4748        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
4749                sizeof(WHITE_BALANCE_MODES_MAP),
4750                fwk_whiteLevel);
4751        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE,
4752                sizeof(whiteLevel), &whiteLevel);
4753    }
4754
4755    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
4756        uint8_t fwk_focusMode =
4757            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
4758        uint8_t focusMode;
4759        focusMode = lookupHalName(FOCUS_MODES_MAP,
4760                                   sizeof(FOCUS_MODES_MAP),
4761                                   fwk_focusMode);
4762        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_FOCUS_MODE,
4763                sizeof(focusMode), &focusMode);
4764    }
4765
4766    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
4767        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
4768        rc = AddSetMetaEntryToBatch(hal_metadata,
4769                CAM_INTF_META_LENS_FOCUS_DISTANCE,
4770                sizeof(focalDistance), &focalDistance);
4771    }
4772
4773    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
4774        uint8_t fwk_antibandingMode =
4775            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
4776        uint8_t hal_antibandingMode = lookupHalName(ANTIBANDING_MODES_MAP,
4777                     sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
4778                     fwk_antibandingMode);
4779        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
4780                sizeof(hal_antibandingMode), &hal_antibandingMode);
4781    }
4782
4783    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
4784        int32_t expCompensation = frame_settings.find(
4785            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
4786        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
4787            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
4788        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
4789            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
4790        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV,
4791          sizeof(expCompensation), &expCompensation);
4792    }
4793
4794    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
4795        int32_t expCompensation = frame_settings.find(
4796            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
4797        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
4798            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
4799        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
4800            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
4801        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV,
4802          sizeof(expCompensation), &expCompensation);
4803    }
4804
4805    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
4806        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
4807        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_AEC_LOCK,
4808                sizeof(aeLock), &aeLock);
4809    }
4810    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4811        cam_fps_range_t fps_range;
4812        fps_range.min_fps =
4813            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
4814        fps_range.max_fps =
4815            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
4816        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_FPS_RANGE,
4817                sizeof(fps_range), &fps_range);
4818    }
4819
4820    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
4821        uint8_t awbLock =
4822            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
4823        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_AWB_LOCK,
4824                sizeof(awbLock), &awbLock);
4825    }
4826
4827    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
4828        uint8_t fwk_effectMode =
4829            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
4830        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
4831                sizeof(EFFECT_MODES_MAP),
4832                fwk_effectMode);
4833        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EFFECT,
4834                sizeof(effectMode), &effectMode);
4835    }
4836
4837    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
4838        uint8_t colorCorrectMode =
4839            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
4840        rc =
4841            AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
4842                    sizeof(colorCorrectMode), &colorCorrectMode);
4843    }
4844
4845    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
4846        cam_color_correct_gains_t colorCorrectGains;
4847        for (int i = 0; i < 4; i++) {
4848            colorCorrectGains.gains[i] =
4849                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
4850        }
4851        rc =
4852            AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
4853                    sizeof(colorCorrectGains), &colorCorrectGains);
4854    }
4855
4856    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
4857        cam_color_correct_matrix_t colorCorrectTransform;
4858        cam_rational_type_t transform_elem;
4859        int num = 0;
4860        for (int i = 0; i < 3; i++) {
4861           for (int j = 0; j < 3; j++) {
4862              transform_elem.numerator =
4863                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
4864              transform_elem.denominator =
4865                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
4866              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
4867              num++;
4868           }
4869        }
4870        rc =
4871            AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
4872                    sizeof(colorCorrectTransform), &colorCorrectTransform);
4873    }
4874
4875    cam_trigger_t aecTrigger;
4876    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
4877    aecTrigger.trigger_id = -1;
4878    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
4879        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
4880        aecTrigger.trigger =
4881            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
4882        aecTrigger.trigger_id =
4883            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
4884        rc = AddSetMetaEntryToBatch(hal_metadata,
4885                CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
4886                sizeof(aecTrigger), &aecTrigger);
4887    }
4888    /*af_trigger must come with a trigger id*/
4889    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
4890        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
4891        cam_trigger_t af_trigger;
4892        af_trigger.trigger =
4893            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
4894        af_trigger.trigger_id =
4895            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
4896        rc = AddSetMetaEntryToBatch(hal_metadata,
4897                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
4898    }
4899
4900    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
4901        int32_t demosaic =
4902            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
4903        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_DEMOSAIC,
4904                sizeof(demosaic), &demosaic);
4905    }
4906
4907    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
4908        cam_edge_application_t edge_application;
4909        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
4910        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
4911            edge_application.sharpness = 0;
4912        } else {
4913            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
4914                uint8_t edgeStrength =
4915                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
4916                edge_application.sharpness = (int32_t)edgeStrength;
4917            } else {
4918                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
4919            }
4920        }
4921        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_EDGE_MODE,
4922                sizeof(edge_application), &edge_application);
4923    }
4924
4925    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
4926        int32_t respectFlashMode = 1;
4927        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
4928            uint8_t fwk_aeMode =
4929                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
4930            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
4931                respectFlashMode = 0;
4932                ALOGV("%s: AE Mode controls flash, ignore android.flash.mode",
4933                    __func__);
4934            }
4935        }
4936        if (respectFlashMode) {
4937            uint8_t flashMode =
4938                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
4939            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
4940                                          sizeof(FLASH_MODES_MAP),
4941                                          flashMode);
4942            ALOGV("%s: flash mode after mapping %d", __func__, flashMode);
4943            // To check: CAM_INTF_META_FLASH_MODE usage
4944            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
4945                          sizeof(flashMode), &flashMode);
4946        }
4947    }
4948
4949    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
4950        uint8_t flashPower =
4951            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
4952        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_FLASH_POWER,
4953                sizeof(flashPower), &flashPower);
4954    }
4955
4956    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
4957        int64_t flashFiringTime =
4958            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
4959        rc = AddSetMetaEntryToBatch(hal_metadata,
4960                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
4961    }
4962
4963    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
4964        uint8_t hotPixelMode =
4965            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
4966        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
4967                sizeof(hotPixelMode), &hotPixelMode);
4968    }
4969
4970    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
4971        float lensAperture =
4972            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
4973        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_APERTURE,
4974                sizeof(lensAperture), &lensAperture);
4975    }
4976
4977    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
4978        float filterDensity =
4979            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
4980        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
4981                sizeof(filterDensity), &filterDensity);
4982    }
4983
4984    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
4985        float focalLength =
4986            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
4987        rc = AddSetMetaEntryToBatch(hal_metadata,
4988                CAM_INTF_META_LENS_FOCAL_LENGTH,
4989                sizeof(focalLength), &focalLength);
4990    }
4991
4992    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
4993        uint8_t optStabMode =
4994            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
4995        rc = AddSetMetaEntryToBatch(hal_metadata,
4996                CAM_INTF_META_LENS_OPT_STAB_MODE,
4997                sizeof(optStabMode), &optStabMode);
4998    }
4999
5000    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
5001        uint8_t noiseRedMode =
5002            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
5003        rc = AddSetMetaEntryToBatch(hal_metadata,
5004                CAM_INTF_META_NOISE_REDUCTION_MODE,
5005                sizeof(noiseRedMode), &noiseRedMode);
5006    }
5007
5008    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
5009        uint8_t noiseRedStrength =
5010            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
5011        rc = AddSetMetaEntryToBatch(hal_metadata,
5012                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
5013                sizeof(noiseRedStrength), &noiseRedStrength);
5014    }
5015
5016    cam_crop_region_t scalerCropRegion;
5017    bool scalerCropSet = false;
5018    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
5019        scalerCropRegion.left =
5020            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
5021        scalerCropRegion.top =
5022            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
5023        scalerCropRegion.width =
5024            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
5025        scalerCropRegion.height =
5026            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
5027        rc = AddSetMetaEntryToBatch(hal_metadata,
5028                CAM_INTF_META_SCALER_CROP_REGION,
5029                sizeof(scalerCropRegion), &scalerCropRegion);
5030        scalerCropSet = true;
5031    }
5032
5033    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
5034        int64_t sensorExpTime =
5035            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
5036        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
5037        rc = AddSetMetaEntryToBatch(hal_metadata,
5038                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
5039                sizeof(sensorExpTime), &sensorExpTime);
5040    }
5041
5042    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
5043        int64_t sensorFrameDuration =
5044            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
5045        int64_t minFrameDuration = getMinFrameDuration(request);
5046        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
5047        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
5048            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
5049        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
5050        rc = AddSetMetaEntryToBatch(hal_metadata,
5051                CAM_INTF_META_SENSOR_FRAME_DURATION,
5052                sizeof(sensorFrameDuration), &sensorFrameDuration);
5053    }
5054
5055    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
5056        int32_t sensorSensitivity =
5057            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
5058        if (sensorSensitivity <
5059                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
5060            sensorSensitivity =
5061                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
5062        if (sensorSensitivity >
5063                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
5064            sensorSensitivity =
5065                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
5066        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
5067        rc = AddSetMetaEntryToBatch(hal_metadata,
5068                CAM_INTF_META_SENSOR_SENSITIVITY,
5069                sizeof(sensorSensitivity), &sensorSensitivity);
5070    }
5071
5072    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
5073        int32_t shadingMode =
5074            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
5075        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_MODE,
5076                sizeof(shadingMode), &shadingMode);
5077    }
5078
5079    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
5080        uint8_t shadingStrength =
5081            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
5082        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_STRENGTH,
5083                sizeof(shadingStrength), &shadingStrength);
5084    }
5085
5086    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
5087        uint8_t fwk_facedetectMode =
5088            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
5089        uint8_t facedetectMode =
5090            lookupHalName(FACEDETECT_MODES_MAP,
5091                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
5092        rc = AddSetMetaEntryToBatch(hal_metadata,
5093                CAM_INTF_META_STATS_FACEDETECT_MODE,
5094                sizeof(facedetectMode), &facedetectMode);
5095    }
5096
5097    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
5098        uint8_t histogramMode =
5099            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
5100        rc = AddSetMetaEntryToBatch(hal_metadata,
5101                CAM_INTF_META_STATS_HISTOGRAM_MODE,
5102                sizeof(histogramMode), &histogramMode);
5103    }
5104
5105    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
5106        uint8_t sharpnessMapMode =
5107            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
5108        rc = AddSetMetaEntryToBatch(hal_metadata,
5109                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
5110                sizeof(sharpnessMapMode), &sharpnessMapMode);
5111    }
5112
5113    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
5114        uint8_t tonemapMode =
5115            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
5116        rc = AddSetMetaEntryToBatch(hal_metadata,
5117                CAM_INTF_META_TONEMAP_MODE,
5118                sizeof(tonemapMode), &tonemapMode);
5119    }
5120    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
5121    /*All tonemap channels will have the same number of points*/
5122    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
5123        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
5124        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
5125        cam_rgb_tonemap_curves tonemapCurves;
5126        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
5127
5128        /* ch0 = G*/
5129        int point = 0;
5130        cam_tonemap_curve_t tonemapCurveGreen;
5131        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
5132            for (int j = 0; j < 2; j++) {
5133               tonemapCurveGreen.tonemap_points[i][j] =
5134                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
5135               point++;
5136            }
5137        }
5138        tonemapCurves.curves[0] = tonemapCurveGreen;
5139
5140        /* ch 1 = B */
5141        point = 0;
5142        cam_tonemap_curve_t tonemapCurveBlue;
5143        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
5144            for (int j = 0; j < 2; j++) {
5145               tonemapCurveBlue.tonemap_points[i][j] =
5146                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
5147               point++;
5148            }
5149        }
5150        tonemapCurves.curves[1] = tonemapCurveBlue;
5151
5152        /* ch 2 = R */
5153        point = 0;
5154        cam_tonemap_curve_t tonemapCurveRed;
5155        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
5156            for (int j = 0; j < 2; j++) {
5157               tonemapCurveRed.tonemap_points[i][j] =
5158                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
5159               point++;
5160            }
5161        }
5162        tonemapCurves.curves[2] = tonemapCurveRed;
5163
5164        rc = AddSetMetaEntryToBatch(hal_metadata,
5165                CAM_INTF_META_TONEMAP_CURVES,
5166                sizeof(tonemapCurves), &tonemapCurves);
5167    }
5168
5169    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5170        uint8_t captureIntent =
5171            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5172        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
5173                sizeof(captureIntent), &captureIntent);
5174    }
5175
5176    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
5177        uint8_t blackLevelLock =
5178            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
5179        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
5180                sizeof(blackLevelLock), &blackLevelLock);
5181    }
5182
5183    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5184        uint8_t lensShadingMapMode =
5185            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5186        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5187                sizeof(lensShadingMapMode), &lensShadingMapMode);
5188    }
5189
5190    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
5191        cam_area_t roi;
5192        bool reset = true;
5193        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
5194        if (scalerCropSet) {
5195            reset = resetIfNeededROI(&roi, &scalerCropRegion);
5196        }
5197        if (reset) {
5198            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AEC_ROI,
5199                    sizeof(roi), &roi);
5200        }
5201    }
5202
5203    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
5204        cam_area_t roi;
5205        bool reset = true;
5206        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
5207        if (scalerCropSet) {
5208            reset = resetIfNeededROI(&roi, &scalerCropRegion);
5209        }
5210        if (reset) {
5211            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AF_ROI,
5212                    sizeof(roi), &roi);
5213        }
5214    }
5215
5216    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
5217        cam_area_t roi;
5218        bool reset = true;
5219        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
5220        if (scalerCropSet) {
5221            reset = resetIfNeededROI(&roi, &scalerCropRegion);
5222        }
5223        if (reset) {
5224            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AWB_REGIONS,
5225                    sizeof(roi), &roi);
5226        }
5227    }
5228
5229    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
5230        cam_test_pattern_data_t testPatternData;
5231        uint32_t fwk_testPatternMode = frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
5232        uint8_t testPatternMode = lookupHalName(TEST_PATTERN_MAP,
5233               sizeof(TEST_PATTERN_MAP), fwk_testPatternMode);
5234
5235        memset(&testPatternData, 0, sizeof(testPatternData));
5236        testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
5237        if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
5238                frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
5239            int32_t* fwk_testPatternData = frame_settings.find(
5240                    ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
5241            testPatternData.r = fwk_testPatternData[0];
5242            testPatternData.b = fwk_testPatternData[3];
5243            switch (gCamCapability[mCameraId]->color_arrangement) {
5244            case CAM_FILTER_ARRANGEMENT_RGGB:
5245            case CAM_FILTER_ARRANGEMENT_GRBG:
5246                testPatternData.gr = fwk_testPatternData[1];
5247                testPatternData.gb = fwk_testPatternData[2];
5248                break;
5249            case CAM_FILTER_ARRANGEMENT_GBRG:
5250            case CAM_FILTER_ARRANGEMENT_BGGR:
5251                testPatternData.gr = fwk_testPatternData[2];
5252                testPatternData.gb = fwk_testPatternData[1];
5253                break;
5254            default:
5255                ALOGE("%s: color arrangement %d is not supported", __func__,
5256                    gCamCapability[mCameraId]->color_arrangement);
5257                break;
5258            }
5259        }
5260        rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_TEST_PATTERN_DATA,
5261            sizeof(testPatternData), &testPatternData);
5262    }
5263
5264    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
5265        double *gps_coords =
5266            frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d;
5267        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES, sizeof(double)*3, gps_coords);
5268    }
5269
5270    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
5271        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
5272        const char *gps_methods_src = (const char *)
5273                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
5274        uint32_t count = frame_settings.find(
5275                ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
5276        memset(gps_methods, 0, sizeof(gps_methods));
5277        strncpy(gps_methods, gps_methods_src, sizeof(gps_methods));
5278        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS, sizeof(gps_methods), gps_methods);
5279    }
5280
5281    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
5282        int64_t gps_timestamp =
5283            frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
5284        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP, sizeof(int64_t), &gps_timestamp);
5285    }
5286
5287    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
5288        int32_t orientation =
5289            frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
5290        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, sizeof(orientation), &orientation);
5291    }
5292
5293    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
5294        int8_t quality =
5295            frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
5296        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_QUALITY, sizeof(quality), &quality);
5297    }
5298
5299    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
5300        int8_t thumb_quality =
5301            frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
5302        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY, sizeof(thumb_quality), &thumb_quality);
5303    }
5304
5305    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
5306        cam_dimension_t dim;
5307        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
5308        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
5309        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, sizeof(dim), &dim);
5310    }
5311
5312    // Internal metadata
5313    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
5314        uint8_t* privatedata =
5315            frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS).data.u8;
5316        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
5317            sizeof(uint8_t) * MAX_METADATA_PAYLOAD_SIZE, privatedata);
5318    }
5319
5320    // EV step
5321    rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV_STEP,
5322            sizeof(cam_rational_type_t), &(gCamCapability[mCameraId]->exp_compensation_step));
5323
5324    return rc;
5325}
5326
5327/*===========================================================================
5328 * FUNCTION   : captureResultCb
5329 *
5330 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
5331 *
5332 * PARAMETERS :
5333 *   @frame  : frame information from mm-camera-interface
5334 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
5335 *   @userdata: userdata
5336 *
5337 * RETURN     : NONE
5338 *==========================================================================*/
5339void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
5340                camera3_stream_buffer_t *buffer,
5341                uint32_t frame_number, void *userdata)
5342{
5343    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
5344    if (hw == NULL) {
5345        ALOGE("%s: Invalid hw %p", __func__, hw);
5346        return;
5347    }
5348
5349    hw->captureResultCb(metadata, buffer, frame_number);
5350    return;
5351}
5352
5353
5354/*===========================================================================
5355 * FUNCTION   : initialize
5356 *
5357 * DESCRIPTION: Pass framework callback pointers to HAL
5358 *
5359 * PARAMETERS :
5360 *
5361 *
5362 * RETURN     : Success : 0
5363 *              Failure: -ENODEV
5364 *==========================================================================*/
5365
5366int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
5367                                  const camera3_callback_ops_t *callback_ops)
5368{
5369    ALOGV("%s: E", __func__);
5370    QCamera3HardwareInterface *hw =
5371        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5372    if (!hw) {
5373        ALOGE("%s: NULL camera device", __func__);
5374        return -ENODEV;
5375    }
5376
5377    int rc = hw->initialize(callback_ops);
5378    ALOGV("%s: X", __func__);
5379    return rc;
5380}
5381
5382/*===========================================================================
5383 * FUNCTION   : configure_streams
5384 *
5385 * DESCRIPTION:
5386 *
5387 * PARAMETERS :
5388 *
5389 *
5390 * RETURN     : Success: 0
5391 *              Failure: -EINVAL (if stream configuration is invalid)
5392 *                       -ENODEV (fatal error)
5393 *==========================================================================*/
5394
5395int QCamera3HardwareInterface::configure_streams(
5396        const struct camera3_device *device,
5397        camera3_stream_configuration_t *stream_list)
5398{
5399    ALOGV("%s: E", __func__);
5400    QCamera3HardwareInterface *hw =
5401        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5402    if (!hw) {
5403        ALOGE("%s: NULL camera device", __func__);
5404        return -ENODEV;
5405    }
5406    int rc = hw->configureStreams(stream_list);
5407    ALOGV("%s: X", __func__);
5408    return rc;
5409}
5410
5411/*===========================================================================
5412 * FUNCTION   : register_stream_buffers
5413 *
5414 * DESCRIPTION: Register stream buffers with the device
5415 *
5416 * PARAMETERS :
5417 *
5418 * RETURN     :
5419 *==========================================================================*/
5420int QCamera3HardwareInterface::register_stream_buffers(
5421        const struct camera3_device *device,
5422        const camera3_stream_buffer_set_t *buffer_set)
5423{
5424    ALOGV("%s: E", __func__);
5425    QCamera3HardwareInterface *hw =
5426        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5427    if (!hw) {
5428        ALOGE("%s: NULL camera device", __func__);
5429        return -ENODEV;
5430    }
5431    int rc = hw->registerStreamBuffers(buffer_set);
5432    ALOGV("%s: X", __func__);
5433    return rc;
5434}
5435
5436/*===========================================================================
5437 * FUNCTION   : construct_default_request_settings
5438 *
5439 * DESCRIPTION: Configure a settings buffer to meet the required use case
5440 *
5441 * PARAMETERS :
5442 *
5443 *
5444 * RETURN     : Success: Return valid metadata
5445 *              Failure: Return NULL
5446 *==========================================================================*/
5447const camera_metadata_t* QCamera3HardwareInterface::
5448    construct_default_request_settings(const struct camera3_device *device,
5449                                        int type)
5450{
5451
5452    ALOGV("%s: E", __func__);
5453    camera_metadata_t* fwk_metadata = NULL;
5454    QCamera3HardwareInterface *hw =
5455        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5456    if (!hw) {
5457        ALOGE("%s: NULL camera device", __func__);
5458        return NULL;
5459    }
5460
5461    fwk_metadata = hw->translateCapabilityToMetadata(type);
5462
5463    ALOGV("%s: X", __func__);
5464    return fwk_metadata;
5465}
5466
5467/*===========================================================================
5468 * FUNCTION   : process_capture_request
5469 *
5470 * DESCRIPTION:
5471 *
5472 * PARAMETERS :
5473 *
5474 *
5475 * RETURN     :
5476 *==========================================================================*/
5477int QCamera3HardwareInterface::process_capture_request(
5478                    const struct camera3_device *device,
5479                    camera3_capture_request_t *request)
5480{
5481    ALOGV("%s: E", __func__);
5482    QCamera3HardwareInterface *hw =
5483        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5484    if (!hw) {
5485        ALOGE("%s: NULL camera device", __func__);
5486        return -EINVAL;
5487    }
5488
5489    int rc = hw->processCaptureRequest(request);
5490    ALOGV("%s: X", __func__);
5491    return rc;
5492}
5493
5494/*===========================================================================
5495 * FUNCTION   : dump
5496 *
5497 * DESCRIPTION:
5498 *
5499 * PARAMETERS :
5500 *
5501 *
5502 * RETURN     :
5503 *==========================================================================*/
5504
5505void QCamera3HardwareInterface::dump(
5506                const struct camera3_device *device, int fd)
5507{
5508    ALOGV("%s: E", __func__);
5509    QCamera3HardwareInterface *hw =
5510        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5511    if (!hw) {
5512        ALOGE("%s: NULL camera device", __func__);
5513        return;
5514    }
5515
5516    hw->dump(fd);
5517    ALOGV("%s: X", __func__);
5518    return;
5519}
5520
5521/*===========================================================================
5522 * FUNCTION   : flush
5523 *
5524 * DESCRIPTION:
5525 *
5526 * PARAMETERS :
5527 *
5528 *
5529 * RETURN     :
5530 *==========================================================================*/
5531
5532int QCamera3HardwareInterface::flush(
5533                const struct camera3_device *device)
5534{
5535    int rc;
5536    ALOGV("%s: E", __func__);
5537    QCamera3HardwareInterface *hw =
5538        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5539    if (!hw) {
5540        ALOGE("%s: NULL camera device", __func__);
5541        return -EINVAL;
5542    }
5543
5544    rc = hw->flush();
5545    ALOGV("%s: X", __func__);
5546    return rc;
5547}
5548
5549/*===========================================================================
5550 * FUNCTION   : close_camera_device
5551 *
5552 * DESCRIPTION:
5553 *
5554 * PARAMETERS :
5555 *
5556 *
5557 * RETURN     :
5558 *==========================================================================*/
5559int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
5560{
5561    ALOGV("%s: E", __func__);
5562    int ret = NO_ERROR;
5563    QCamera3HardwareInterface *hw =
5564        reinterpret_cast<QCamera3HardwareInterface *>(
5565            reinterpret_cast<camera3_device_t *>(device)->priv);
5566    if (!hw) {
5567        ALOGE("NULL camera device");
5568        return BAD_VALUE;
5569    }
5570    delete hw;
5571
5572    pthread_mutex_lock(&mCameraSessionLock);
5573    mCameraSessionActive = 0;
5574    pthread_mutex_unlock(&mCameraSessionLock);
5575    ALOGV("%s: X", __func__);
5576    return ret;
5577}
5578
5579/*===========================================================================
5580 * FUNCTION   : getWaveletDenoiseProcessPlate
5581 *
5582 * DESCRIPTION: query wavelet denoise process plate
5583 *
5584 * PARAMETERS : None
5585 *
5586 * RETURN     : WNR prcocess plate vlaue
5587 *==========================================================================*/
5588cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
5589{
5590    char prop[PROPERTY_VALUE_MAX];
5591    memset(prop, 0, sizeof(prop));
5592    property_get("persist.denoise.process.plates", prop, "0");
5593    int processPlate = atoi(prop);
5594    switch(processPlate) {
5595    case 0:
5596        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
5597    case 1:
5598        return CAM_WAVELET_DENOISE_CBCR_ONLY;
5599    case 2:
5600        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
5601    case 3:
5602        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
5603    default:
5604        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
5605    }
5606}
5607
5608/*===========================================================================
5609 * FUNCTION   : needRotationReprocess
5610 *
5611 * DESCRIPTION: if rotation needs to be done by reprocess in pp
5612 *
5613 * PARAMETERS : none
5614 *
5615 * RETURN     : true: needed
5616 *              false: no need
5617 *==========================================================================*/
5618bool QCamera3HardwareInterface::needRotationReprocess()
5619{
5620    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
5621        // current rotation is not zero, and pp has the capability to process rotation
5622        ALOGD("%s: need do reprocess for rotation", __func__);
5623        return true;
5624    }
5625
5626    return false;
5627}
5628
5629/*===========================================================================
5630 * FUNCTION   : needReprocess
5631 *
5632 * DESCRIPTION: if reprocess in needed
5633 *
5634 * PARAMETERS : none
5635 *
5636 * RETURN     : true: needed
5637 *              false: no need
5638 *==========================================================================*/
5639bool QCamera3HardwareInterface::needReprocess()
5640{
5641    if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
5642        // TODO: add for ZSL HDR later
5643        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
5644        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
5645        return true;
5646    }
5647    return needRotationReprocess();
5648}
5649
5650/*===========================================================================
5651 * FUNCTION   : addOfflineReprocChannel
5652 *
5653 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
5654 *              coming from input channel
5655 *
5656 * PARAMETERS :
5657 *   @pInputChannel : ptr to input channel whose frames will be post-processed
5658 *
5659 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
5660 *==========================================================================*/
5661QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
5662              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle, metadata_buffer_t *metadata)
5663{
5664    int32_t rc = NO_ERROR;
5665    QCamera3ReprocessChannel *pChannel = NULL;
5666    if (pInputChannel == NULL) {
5667        ALOGE("%s: input channel obj is NULL", __func__);
5668        return NULL;
5669    }
5670
5671    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
5672            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
5673    if (NULL == pChannel) {
5674        ALOGE("%s: no mem for reprocess channel", __func__);
5675        return NULL;
5676    }
5677
5678    rc = pChannel->initialize();
5679    if (rc != NO_ERROR) {
5680        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
5681        delete pChannel;
5682        return NULL;
5683    }
5684
5685    // pp feature config
5686    cam_pp_feature_config_t pp_config;
5687    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
5688
5689    if (IS_PARM_VALID(CAM_INTF_META_EDGE_MODE, metadata)) {
5690        cam_edge_application_t *edge = (cam_edge_application_t *)
5691                POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
5692        if (edge->edge_mode != CAM_EDGE_MODE_OFF) {
5693            pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
5694            pp_config.sharpness = edge->sharpness;
5695        }
5696    }
5697
5698    if (IS_PARM_VALID(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata)) {
5699        uint8_t *noise_mode = (uint8_t *)POINTER_OF(
5700                CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
5701        if (*noise_mode != CAM_NOISE_REDUCTION_MODE_OFF) {
5702            pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
5703            pp_config.denoise2d.denoise_enable = 1;
5704            pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
5705        }
5706    }
5707
5708    if (IS_PARM_VALID(CAM_INTF_META_JPEG_ORIENTATION, metadata)) {
5709        int32_t *rotation = (int32_t *)POINTER_OF(
5710                CAM_INTF_META_JPEG_ORIENTATION, metadata);
5711
5712        if (needRotationReprocess()) {
5713            pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
5714            if (*rotation == 0) {
5715                pp_config.rotation = ROTATE_0;
5716            } else if (*rotation == 90) {
5717                pp_config.rotation = ROTATE_90;
5718            } else if (*rotation == 180) {
5719                pp_config.rotation = ROTATE_180;
5720            } else if (*rotation == 270) {
5721                pp_config.rotation = ROTATE_270;
5722            }
5723        }
5724    }
5725
5726    rc = pChannel->addReprocStreamsFromSource(pp_config,
5727                                             pInputChannel,
5728                                             mMetadataChannel);
5729
5730    if (rc != NO_ERROR) {
5731        delete pChannel;
5732        return NULL;
5733    }
5734    return pChannel;
5735}
5736
5737}; //end namespace qcamera
5738