QCamera3HWI.cpp revision 9bb9e05544fc46557da111db2cc6dbb017feee76
1/* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34#include <cutils/properties.h>
35#include <hardware/camera3.h>
36#include <camera/CameraMetadata.h>
37#include <stdlib.h>
38#include <fcntl.h>
39#include <stdint.h>
40#include <utils/Log.h>
41#include <utils/Errors.h>
42#include <ui/Fence.h>
43#include <gralloc_priv.h>
44#include "QCamera3HWI.h"
45#include "QCamera3Mem.h"
46#include "QCamera3Channel.h"
47#include "QCamera3PostProc.h"
48#include "QCamera3VendorTags.h"
49
50using namespace android;
51
52namespace qcamera {
53
54#define MAX(a, b) ((a) > (b) ? (a) : (b))
55
56#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
57
58#define EMPTY_PIPELINE_DELAY 2
59
60cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
61const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
62
63pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
64    PTHREAD_MUTEX_INITIALIZER;
65unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
66
67const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
68    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
69    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
70    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
71    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
72    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
73    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
74    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
75    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
76    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
77};
78
79const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
80    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
81    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
82    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
83    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
84    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
85    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
86    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
87    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
88    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
89};
90
91const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
92    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_OFF },
93    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
94    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
95    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
96    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
97    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
98    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
99    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
100    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
101    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
102    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
103    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
104    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
105    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
106    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
107    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
108};
109
110const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
111    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
112    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
113    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
114    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
115    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
116    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
117    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
118};
119
120const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
121    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
122    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
123    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
124    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
125};
126
127const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
128    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
129    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
130    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
131    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
132    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
133};
134
135const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
136    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
137    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
138    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
139};
140
141const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
142    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
143    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
144};
145
146const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
147    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
148      CAM_FOCUS_UNCALIBRATED },
149    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
150      CAM_FOCUS_APPROXIMATE },
151    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
152      CAM_FOCUS_CALIBRATED }
153};
154
155const int32_t available_thumbnail_sizes[] = {0, 0,
156                                             176, 144,
157                                             320, 240,
158                                             432, 288,
159                                             480, 288,
160                                             512, 288,
161                                             512, 384};
162
163const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
164    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
165    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
166    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
167    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
168    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
169};
170
171/* Since there is no mapping for all the options some Android enum are not listed.
172 * Also, the order in this list is important because while mapping from HAL to Android it will
173 * traverse from lower to higher index which means that for HAL values that are map to different
174 * Android values, the traverse logic will select the first one found.
175 */
176const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
177    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
178    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
179    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
180    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
181    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
182    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
183    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
184    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
185    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
186    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
187    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
188    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
189    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
190    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
191    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
192    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
193};
194
195/* Custom tag definitions */
196
197camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
198    initialize:                         QCamera3HardwareInterface::initialize,
199    configure_streams:                  QCamera3HardwareInterface::configure_streams,
200    register_stream_buffers:            NULL,
201    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
202    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
203    get_metadata_vendor_tag_ops:        NULL,
204    dump:                               QCamera3HardwareInterface::dump,
205    flush:                              QCamera3HardwareInterface::flush,
206    reserved:                           {0},
207};
208
209int QCamera3HardwareInterface::kMaxInFlight = 5;
210
211/*===========================================================================
212 * FUNCTION   : QCamera3HardwareInterface
213 *
214 * DESCRIPTION: constructor of QCamera3HardwareInterface
215 *
216 * PARAMETERS :
217 *   @cameraId  : camera ID
218 *
219 * RETURN     : none
220 *==========================================================================*/
221QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId,
222                        const camera_module_callbacks_t *callbacks)
223    : mCameraId(cameraId),
224      mCameraHandle(NULL),
225      mCameraOpened(false),
226      mCameraInitialized(false),
227      mCallbackOps(NULL),
228      mInputStream(NULL),
229      mMetadataChannel(NULL),
230      mPictureChannel(NULL),
231      mRawChannel(NULL),
232      mSupportChannel(NULL),
233      mFirstRequest(false),
234      mParamHeap(NULL),
235      mParameters(NULL),
236      mLoopBackResult(NULL),
237      mMinProcessedFrameDuration(0),
238      mMinJpegFrameDuration(0),
239      mMinRawFrameDuration(0),
240      m_pPowerModule(NULL),
241      mHdrHint(false),
242      mMetaFrameCount(0),
243      mCallbacks(callbacks)
244{
245    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
246    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_2;
247    mCameraDevice.common.close = close_camera_device;
248    mCameraDevice.ops = &mCameraOps;
249    mCameraDevice.priv = this;
250    gCamCapability[cameraId]->version = CAM_HAL_V3;
251    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
252    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
253    gCamCapability[cameraId]->min_num_pp_bufs = 3;
254
255    pthread_cond_init(&mRequestCond, NULL);
256    mPendingRequest = 0;
257    mCurrentRequestId = -1;
258    pthread_mutex_init(&mMutex, NULL);
259
260    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
261        mDefaultMetadata[i] = NULL;
262
263#ifdef HAS_MULTIMEDIA_HINTS
264    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
265        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
266    }
267#endif
268}
269
270/*===========================================================================
271 * FUNCTION   : ~QCamera3HardwareInterface
272 *
273 * DESCRIPTION: destructor of QCamera3HardwareInterface
274 *
275 * PARAMETERS : none
276 *
277 * RETURN     : none
278 *==========================================================================*/
279QCamera3HardwareInterface::~QCamera3HardwareInterface()
280{
281    ALOGV("%s: E", __func__);
282    /* We need to stop all streams before deleting any stream */
283
284    // NOTE: 'camera3_stream_t *' objects are already freed at
285    //        this stage by the framework
286    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
287        it != mStreamInfo.end(); it++) {
288        QCamera3Channel *channel = (*it)->channel;
289        if (channel) {
290            channel->stop();
291        }
292    }
293    if (mSupportChannel)
294        mSupportChannel->stop();
295
296    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
297        it != mStreamInfo.end(); it++) {
298        QCamera3Channel *channel = (*it)->channel;
299        if (channel)
300            delete channel;
301        free (*it);
302    }
303    if (mSupportChannel) {
304        delete mSupportChannel;
305        mSupportChannel = NULL;
306    }
307
308    mPictureChannel = NULL;
309
310    /* Clean up all channels */
311    if (mCameraInitialized) {
312        if (mMetadataChannel) {
313            mMetadataChannel->stop();
314            delete mMetadataChannel;
315            mMetadataChannel = NULL;
316        }
317        deinitParameters();
318    }
319
320    if (mCameraOpened)
321        closeCamera();
322
323    mPendingBuffersMap.mPendingBufferList.clear();
324    mPendingRequestsList.clear();
325
326    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
327        if (mDefaultMetadata[i])
328            free_camera_metadata(mDefaultMetadata[i]);
329
330    pthread_cond_destroy(&mRequestCond);
331
332    pthread_mutex_destroy(&mMutex);
333    ALOGV("%s: X", __func__);
334}
335
336/*===========================================================================
337 * FUNCTION   : openCamera
338 *
339 * DESCRIPTION: open camera
340 *
341 * PARAMETERS :
342 *   @hw_device  : double ptr for camera device struct
343 *
344 * RETURN     : int32_t type of status
345 *              NO_ERROR  -- success
346 *              none-zero failure code
347 *==========================================================================*/
348int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
349{
350    int rc = 0;
351    pthread_mutex_lock(&mCameraSessionLock);
352    if (mCameraSessionActive) {
353        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
354        pthread_mutex_unlock(&mCameraSessionLock);
355        return -EUSERS;
356    }
357
358    if (mCameraOpened) {
359        *hw_device = NULL;
360        return PERMISSION_DENIED;
361    }
362
363    rc = openCamera();
364    if (rc == 0) {
365        *hw_device = &mCameraDevice.common;
366        mCameraSessionActive = 1;
367    } else
368        *hw_device = NULL;
369
370#ifdef HAS_MULTIMEDIA_HINTS
371    if (rc == 0) {
372        if (m_pPowerModule) {
373            if (m_pPowerModule->powerHint) {
374                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
375                        (void *)"state=1");
376            }
377        }
378    }
379#endif
380    pthread_mutex_unlock(&mCameraSessionLock);
381    return rc;
382}
383
384/*===========================================================================
385 * FUNCTION   : openCamera
386 *
387 * DESCRIPTION: open camera
388 *
389 * PARAMETERS : none
390 *
391 * RETURN     : int32_t type of status
392 *              NO_ERROR  -- success
393 *              none-zero failure code
394 *==========================================================================*/
395int QCamera3HardwareInterface::openCamera()
396{
397    if (mCameraHandle) {
398        ALOGE("Failure: Camera already opened");
399        return ALREADY_EXISTS;
400    }
401    mCameraHandle = camera_open(mCameraId);
402    if (!mCameraHandle) {
403        ALOGE("camera_open failed.");
404        return UNKNOWN_ERROR;
405    }
406
407    mCameraOpened = true;
408
409    return NO_ERROR;
410}
411
412/*===========================================================================
413 * FUNCTION   : closeCamera
414 *
415 * DESCRIPTION: close camera
416 *
417 * PARAMETERS : none
418 *
419 * RETURN     : int32_t type of status
420 *              NO_ERROR  -- success
421 *              none-zero failure code
422 *==========================================================================*/
423int QCamera3HardwareInterface::closeCamera()
424{
425    int rc = NO_ERROR;
426
427    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
428    mCameraHandle = NULL;
429    mCameraOpened = false;
430
431#ifdef HAS_MULTIMEDIA_HINTS
432    if (rc == NO_ERROR) {
433        if (m_pPowerModule) {
434            if (m_pPowerModule->powerHint) {
435                if(mHdrHint == true) {
436                    m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
437                            (void *)"state=3");
438                    mHdrHint = false;
439                }
440                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
441                        (void *)"state=0");
442            }
443        }
444    }
445#endif
446
447    return rc;
448}
449
450/*===========================================================================
451 * FUNCTION   : initialize
452 *
453 * DESCRIPTION: Initialize frameworks callback functions
454 *
455 * PARAMETERS :
456 *   @callback_ops : callback function to frameworks
457 *
458 * RETURN     :
459 *
460 *==========================================================================*/
461int QCamera3HardwareInterface::initialize(
462        const struct camera3_callback_ops *callback_ops)
463{
464    int rc;
465
466    pthread_mutex_lock(&mMutex);
467
468    rc = initParameters();
469    if (rc < 0) {
470        ALOGE("%s: initParamters failed %d", __func__, rc);
471       goto err1;
472    }
473    mCallbackOps = callback_ops;
474
475    pthread_mutex_unlock(&mMutex);
476    mCameraInitialized = true;
477    return 0;
478
479err1:
480    pthread_mutex_unlock(&mMutex);
481    return rc;
482}
483
484/*===========================================================================
485 * FUNCTION   : configureStreams
486 *
487 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
488 *              and output streams.
489 *
490 * PARAMETERS :
491 *   @stream_list : streams to be configured
492 *
493 * RETURN     :
494 *
495 *==========================================================================*/
496int QCamera3HardwareInterface::configureStreams(
497        camera3_stream_configuration_t *streamList)
498{
499    int rc = 0;
500
501    // Sanity check stream_list
502    if (streamList == NULL) {
503        ALOGE("%s: NULL stream configuration", __func__);
504        return BAD_VALUE;
505    }
506    if (streamList->streams == NULL) {
507        ALOGE("%s: NULL stream list", __func__);
508        return BAD_VALUE;
509    }
510
511    if (streamList->num_streams < 1) {
512        ALOGE("%s: Bad number of streams requested: %d", __func__,
513                streamList->num_streams);
514        return BAD_VALUE;
515    }
516
517    /* first invalidate all the steams in the mStreamList
518     * if they appear again, they will be validated */
519    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
520            it != mStreamInfo.end(); it++) {
521        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
522        channel->stop();
523        (*it)->status = INVALID;
524    }
525    if (mMetadataChannel) {
526        /* If content of mStreamInfo is not 0, there is metadata stream */
527        mMetadataChannel->stop();
528    }
529
530#ifdef HAS_MULTIMEDIA_HINTS
531    if(mHdrHint == true) {
532        if (m_pPowerModule) {
533            if (m_pPowerModule->powerHint) {
534                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
535                        (void *)"state=3");
536                mHdrHint = false;
537            }
538        }
539    }
540#endif
541
542    pthread_mutex_lock(&mMutex);
543
544    bool isZsl = false;
545    camera3_stream_t *inputStream = NULL;
546    camera3_stream_t *jpegStream = NULL;
547    cam_stream_size_info_t stream_config_info;
548
549    for (size_t i = 0; i < streamList->num_streams; i++) {
550        camera3_stream_t *newStream = streamList->streams[i];
551        ALOGD("%s: newStream type = %d, stream format = %d stream size : %d x %d",
552                __func__, newStream->stream_type, newStream->format,
553                 newStream->width, newStream->height);
554        //if the stream is in the mStreamList validate it
555        bool stream_exists = false;
556        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
557                it != mStreamInfo.end(); it++) {
558            if ((*it)->stream == newStream) {
559                QCamera3Channel *channel =
560                    (QCamera3Channel*)(*it)->stream->priv;
561                stream_exists = true;
562                delete channel;
563                (*it)->status = VALID;
564                (*it)->stream->priv = NULL;
565                (*it)->channel = NULL;
566            }
567        }
568        if (!stream_exists) {
569            //new stream
570            stream_info_t* stream_info;
571            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
572            stream_info->stream = newStream;
573            stream_info->status = VALID;
574            stream_info->channel = NULL;
575            mStreamInfo.push_back(stream_info);
576        }
577        if (newStream->stream_type == CAMERA3_STREAM_INPUT
578                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
579            if (inputStream != NULL) {
580                ALOGE("%s: Multiple input streams requested!", __func__);
581                pthread_mutex_unlock(&mMutex);
582                return BAD_VALUE;
583            }
584            inputStream = newStream;
585        }
586        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
587            jpegStream = newStream;
588        }
589    }
590    mInputStream = inputStream;
591
592    cleanAndSortStreamInfo();
593    if (mMetadataChannel) {
594        delete mMetadataChannel;
595        mMetadataChannel = NULL;
596    }
597
598    //Create metadata channel and initialize it
599    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
600                    mCameraHandle->ops, captureResultCb,
601                    &gCamCapability[mCameraId]->padding_info, this);
602    if (mMetadataChannel == NULL) {
603        ALOGE("%s: failed to allocate metadata channel", __func__);
604        rc = -ENOMEM;
605        pthread_mutex_unlock(&mMutex);
606        return rc;
607    }
608    rc = mMetadataChannel->initialize();
609    if (rc < 0) {
610        ALOGE("%s: metadata channel initialization failed", __func__);
611        delete mMetadataChannel;
612        mMetadataChannel = NULL;
613        pthread_mutex_unlock(&mMutex);
614        return rc;
615    }
616
617    /* Create dummy stream if there is one single raw stream */
618    if (streamList->num_streams == 1 &&
619            (streamList->streams[0]->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
620            streamList->streams[0]->format == HAL_PIXEL_FORMAT_RAW16)) {
621        mSupportChannel = new QCamera3SupportChannel(
622                mCameraHandle->camera_handle,
623                mCameraHandle->ops,
624                &gCamCapability[mCameraId]->padding_info,
625                this);
626        if (!mSupportChannel) {
627            ALOGE("%s: dummy channel cannot be created", __func__);
628            pthread_mutex_unlock(&mMutex);
629            return -ENOMEM;
630        }
631
632        rc = mSupportChannel->initialize();
633        if (rc < 0) {
634            ALOGE("%s: dummy channel initialization failed", __func__);
635            delete mSupportChannel;
636            mSupportChannel = NULL;
637            delete mMetadataChannel;
638            mMetadataChannel = NULL;
639            pthread_mutex_unlock(&mMutex);
640            return rc;
641        }
642    }
643
644    /* Allocate channel objects for the requested streams */
645    for (size_t i = 0; i < streamList->num_streams; i++) {
646        camera3_stream_t *newStream = streamList->streams[i];
647        uint32_t stream_usage = newStream->usage;
648        stream_config_info.stream_sizes[i].width = newStream->width;
649        stream_config_info.stream_sizes[i].height = newStream->height;
650        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
651            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
652            //for zsl stream the size is active array size
653            isZsl = true;
654            stream_config_info.stream_sizes[i].width =
655                    gCamCapability[mCameraId]->active_array_size.width;
656            stream_config_info.stream_sizes[i].height =
657                    gCamCapability[mCameraId]->active_array_size.height;
658            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
659        } else {
660           //for non zsl streams find out the format
661           switch (newStream->format) {
662           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
663              {
664                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
665                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
666                 } else {
667                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
668                 }
669              }
670              break;
671           case HAL_PIXEL_FORMAT_YCbCr_420_888:
672              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
673#ifdef HAS_MULTIMEDIA_HINTS
674              if (m_pPowerModule) {
675                  if (m_pPowerModule->powerHint) {
676                      m_pPowerModule->powerHint(m_pPowerModule,
677                          POWER_HINT_VIDEO_ENCODE, (void *)"state=2");
678                      mHdrHint = true;
679                  }
680              }
681#endif
682              break;
683           case HAL_PIXEL_FORMAT_BLOB:
684              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
685              break;
686           case HAL_PIXEL_FORMAT_RAW_OPAQUE:
687           case HAL_PIXEL_FORMAT_RAW16:
688              stream_config_info.type[i] = CAM_STREAM_TYPE_RAW;
689              break;
690           default:
691              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
692              break;
693           }
694        }
695        if (newStream->priv == NULL) {
696            //New stream, construct channel
697            switch (newStream->stream_type) {
698            case CAMERA3_STREAM_INPUT:
699                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
700                break;
701            case CAMERA3_STREAM_BIDIRECTIONAL:
702                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
703                    GRALLOC_USAGE_HW_CAMERA_WRITE;
704                break;
705            case CAMERA3_STREAM_OUTPUT:
706                /* For video encoding stream, set read/write rarely
707                 * flag so that they may be set to un-cached */
708                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
709                    newStream->usage =
710                         (GRALLOC_USAGE_SW_READ_RARELY |
711                         GRALLOC_USAGE_SW_WRITE_RARELY |
712                         GRALLOC_USAGE_HW_CAMERA_WRITE);
713                else
714                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
715                break;
716            default:
717                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
718                break;
719            }
720
721            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
722                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
723                QCamera3Channel *channel = NULL;
724                switch (newStream->format) {
725                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
726                case HAL_PIXEL_FORMAT_YCbCr_420_888:
727                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
728                    channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
729                            mCameraHandle->ops, captureResultCb,
730                            &gCamCapability[mCameraId]->padding_info,
731                            this,
732                            newStream,
733                            (cam_stream_type_t) stream_config_info.type[i]);
734                    if (channel == NULL) {
735                        ALOGE("%s: allocation of channel failed", __func__);
736                        pthread_mutex_unlock(&mMutex);
737                        return -ENOMEM;
738                    }
739
740                    newStream->priv = channel;
741                    break;
742                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
743                case HAL_PIXEL_FORMAT_RAW16:
744                    newStream->max_buffers = QCamera3RawChannel::kMaxBuffers;
745                    mRawChannel = new QCamera3RawChannel(
746                            mCameraHandle->camera_handle,
747                            mCameraHandle->ops, captureResultCb,
748                            &gCamCapability[mCameraId]->padding_info,
749                            this, newStream, (newStream->format == HAL_PIXEL_FORMAT_RAW16));
750                    if (mRawChannel == NULL) {
751                        ALOGE("%s: allocation of raw channel failed", __func__);
752                        pthread_mutex_unlock(&mMutex);
753                        return -ENOMEM;
754                    }
755
756                    newStream->priv = (QCamera3Channel*)mRawChannel;
757                    break;
758                case HAL_PIXEL_FORMAT_BLOB:
759                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
760                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
761                            mCameraHandle->ops, captureResultCb,
762                            &gCamCapability[mCameraId]->padding_info, this, newStream);
763                    if (mPictureChannel == NULL) {
764                        ALOGE("%s: allocation of channel failed", __func__);
765                        pthread_mutex_unlock(&mMutex);
766                        return -ENOMEM;
767                    }
768                    newStream->priv = (QCamera3Channel*)mPictureChannel;
769                    break;
770
771                default:
772                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
773                    break;
774                }
775            }
776
777            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
778                    it != mStreamInfo.end(); it++) {
779                if ((*it)->stream == newStream) {
780                    (*it)->channel = (QCamera3Channel*) newStream->priv;
781                    break;
782                }
783            }
784        } else {
785            // Channel already exists for this stream
786            // Do nothing for now
787        }
788    }
789
790    if (isZsl)
791        mPictureChannel->overrideYuvSize(
792                gCamCapability[mCameraId]->active_array_size.width,
793                gCamCapability[mCameraId]->active_array_size.height);
794
795    int32_t hal_version = CAM_HAL_V3;
796    stream_config_info.num_streams = streamList->num_streams;
797    if (mSupportChannel) {
798        stream_config_info.stream_sizes[stream_config_info.num_streams] =
799                QCamera3SupportChannel::kDim;
800        stream_config_info.type[stream_config_info.num_streams] =
801                CAM_STREAM_TYPE_CALLBACK;
802        stream_config_info.num_streams++;
803    }
804
805    // settings/parameters don't carry over for new configureStreams
806    memset(mParameters, 0, sizeof(metadata_buffer_t));
807
808    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
809    AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
810                sizeof(hal_version), &hal_version);
811
812    AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
813                sizeof(stream_config_info), &stream_config_info);
814
815    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
816
817    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
818    mPendingRequestsList.clear();
819    mPendingFrameDropList.clear();
820    // Initialize/Reset the pending buffers list
821    mPendingBuffersMap.num_buffers = 0;
822    mPendingBuffersMap.mPendingBufferList.clear();
823
824    mFirstRequest = true;
825
826    //Get min frame duration for this streams configuration
827    deriveMinFrameDuration();
828
829    pthread_mutex_unlock(&mMutex);
830    return rc;
831}
832
833/*===========================================================================
834 * FUNCTION   : validateCaptureRequest
835 *
836 * DESCRIPTION: validate a capture request from camera service
837 *
838 * PARAMETERS :
839 *   @request : request from framework to process
840 *
841 * RETURN     :
842 *
843 *==========================================================================*/
844int QCamera3HardwareInterface::validateCaptureRequest(
845                    camera3_capture_request_t *request)
846{
847    ssize_t idx = 0;
848    const camera3_stream_buffer_t *b;
849    CameraMetadata meta;
850
851    /* Sanity check the request */
852    if (request == NULL) {
853        ALOGE("%s: NULL capture request", __func__);
854        return BAD_VALUE;
855    }
856
857    if (request->settings == NULL && mFirstRequest) {
858        /*settings cannot be null for the first request*/
859        return BAD_VALUE;
860    }
861
862    uint32_t frameNumber = request->frame_number;
863    if (request->input_buffer != NULL &&
864            request->input_buffer->stream != mInputStream) {
865        ALOGE("%s: Request %d: Input buffer not from input stream!",
866                __FUNCTION__, frameNumber);
867        return BAD_VALUE;
868    }
869    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
870        ALOGE("%s: Request %d: No output buffers provided!",
871                __FUNCTION__, frameNumber);
872        return BAD_VALUE;
873    }
874    if (request->input_buffer != NULL) {
875        b = request->input_buffer;
876        QCamera3Channel *channel =
877            static_cast<QCamera3Channel*>(b->stream->priv);
878        if (channel == NULL) {
879            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
880                    __func__, frameNumber, idx);
881            return BAD_VALUE;
882        }
883        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
884            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
885                    __func__, frameNumber, idx);
886            return BAD_VALUE;
887        }
888        if (b->release_fence != -1) {
889            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
890                    __func__, frameNumber, idx);
891            return BAD_VALUE;
892        }
893        if (b->buffer == NULL) {
894            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
895                    __func__, frameNumber, idx);
896            return BAD_VALUE;
897        }
898    }
899
900    // Validate all buffers
901    b = request->output_buffers;
902    do {
903        QCamera3Channel *channel =
904                static_cast<QCamera3Channel*>(b->stream->priv);
905        if (channel == NULL) {
906            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
907                    __func__, frameNumber, idx);
908            return BAD_VALUE;
909        }
910        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
911            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
912                    __func__, frameNumber, idx);
913            return BAD_VALUE;
914        }
915        if (b->release_fence != -1) {
916            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
917                    __func__, frameNumber, idx);
918            return BAD_VALUE;
919        }
920        if (b->buffer == NULL) {
921            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
922                    __func__, frameNumber, idx);
923            return BAD_VALUE;
924        }
925        idx++;
926        b = request->output_buffers + idx;
927    } while (idx < (ssize_t)request->num_output_buffers);
928
929    return NO_ERROR;
930}
931
932/*===========================================================================
933 * FUNCTION   : deriveMinFrameDuration
934 *
935 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
936 *              on currently configured streams.
937 *
938 * PARAMETERS : NONE
939 *
940 * RETURN     : NONE
941 *
942 *==========================================================================*/
943void QCamera3HardwareInterface::deriveMinFrameDuration()
944{
945    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
946
947    maxJpegDim = 0;
948    maxProcessedDim = 0;
949    maxRawDim = 0;
950
951    // Figure out maximum jpeg, processed, and raw dimensions
952    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
953        it != mStreamInfo.end(); it++) {
954
955        // Input stream doesn't have valid stream_type
956        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
957            continue;
958
959        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
960        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
961            if (dimension > maxJpegDim)
962                maxJpegDim = dimension;
963        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
964                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
965            if (dimension > maxRawDim)
966                maxRawDim = dimension;
967        } else {
968            if (dimension > maxProcessedDim)
969                maxProcessedDim = dimension;
970        }
971    }
972
973    //Assume all jpeg dimensions are in processed dimensions.
974    if (maxJpegDim > maxProcessedDim)
975        maxProcessedDim = maxJpegDim;
976    //Find the smallest raw dimension that is greater or equal to jpeg dimension
977    if (maxProcessedDim > maxRawDim) {
978        maxRawDim = INT32_MAX;
979        for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
980            i++) {
981
982            int32_t dimension =
983                gCamCapability[mCameraId]->raw_dim[i].width *
984                gCamCapability[mCameraId]->raw_dim[i].height;
985
986            if (dimension >= maxProcessedDim && dimension < maxRawDim)
987                maxRawDim = dimension;
988        }
989    }
990
991    //Find minimum durations for processed, jpeg, and raw
992    for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
993            i++) {
994        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
995                gCamCapability[mCameraId]->raw_dim[i].height) {
996            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
997            break;
998        }
999    }
1000    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1001        if (maxProcessedDim ==
1002            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
1003            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
1004            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
1005            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
1006            break;
1007        }
1008    }
1009}
1010
1011/*===========================================================================
1012 * FUNCTION   : getMinFrameDuration
1013 *
1014 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
1015 *              and current request configuration.
1016 *
1017 * PARAMETERS : @request: requset sent by the frameworks
1018 *
1019 * RETURN     : min farme duration for a particular request
1020 *
1021 *==========================================================================*/
1022int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
1023{
1024    bool hasJpegStream = false;
1025    bool hasRawStream = false;
1026    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
1027        const camera3_stream_t *stream = request->output_buffers[i].stream;
1028        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
1029            hasJpegStream = true;
1030        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
1031                stream->format == HAL_PIXEL_FORMAT_RAW16)
1032            hasRawStream = true;
1033    }
1034
1035    if (!hasJpegStream)
1036        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
1037    else
1038        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
1039}
1040
1041/*===========================================================================
1042 * FUNCTION   : handleMetadataWithLock
1043 *
1044 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
1045 *
1046 * PARAMETERS : @metadata_buf: metadata buffer
1047 *
1048 * RETURN     :
1049 *
1050 *==========================================================================*/
1051void QCamera3HardwareInterface::handleMetadataWithLock(
1052    mm_camera_super_buf_t *metadata_buf)
1053{
1054    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1055    int32_t frame_number_valid = *(int32_t *)
1056        POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1057    uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1058        CAM_INTF_META_PENDING_REQUESTS, metadata);
1059    uint32_t frame_number = *(uint32_t *)
1060        POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1061    const struct timeval *tv = (const struct timeval *)
1062        POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1063    nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1064        tv->tv_usec * NSEC_PER_USEC;
1065    cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
1066        POINTER_OF(CAM_INTF_META_FRAME_DROPPED, metadata);
1067
1068    int32_t urgent_frame_number_valid = *(int32_t *)
1069        POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
1070    uint32_t urgent_frame_number = *(uint32_t *)
1071        POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
1072
1073    if (urgent_frame_number_valid) {
1074        ALOGV("%s: valid urgent frame_number = %d, capture_time = %lld",
1075          __func__, urgent_frame_number, capture_time);
1076
1077        //Recieved an urgent Frame Number, handle it
1078        //using HAL3.1 quirk for partial results
1079        for (List<PendingRequestInfo>::iterator i =
1080            mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
1081            camera3_notify_msg_t notify_msg;
1082            ALOGV("%s: Iterator Frame = %d urgent frame = %d",
1083                __func__, i->frame_number, urgent_frame_number);
1084
1085            if (i->frame_number < urgent_frame_number &&
1086                i->bNotified == 0) {
1087                notify_msg.type = CAMERA3_MSG_SHUTTER;
1088                notify_msg.message.shutter.frame_number = i->frame_number;
1089                notify_msg.message.shutter.timestamp = capture_time -
1090                    (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
1091                mCallbackOps->notify(mCallbackOps, &notify_msg);
1092                i->timestamp = notify_msg.message.shutter.timestamp;
1093                i->bNotified = 1;
1094                ALOGV("%s: Support notification !!!! notify frame_number = %d, capture_time = %lld",
1095                    __func__, i->frame_number, notify_msg.message.shutter.timestamp);
1096            }
1097
1098            if (i->frame_number == urgent_frame_number) {
1099
1100                camera3_capture_result_t result;
1101
1102                // Send shutter notify to frameworks
1103                notify_msg.type = CAMERA3_MSG_SHUTTER;
1104                notify_msg.message.shutter.frame_number = i->frame_number;
1105                notify_msg.message.shutter.timestamp = capture_time;
1106                mCallbackOps->notify(mCallbackOps, &notify_msg);
1107
1108                i->timestamp = capture_time;
1109                i->bNotified = 1;
1110
1111                // Extract 3A metadata
1112                result.result =
1113                    translateCbUrgentMetadataToResultMetadata(metadata);
1114                // Populate metadata result
1115                result.frame_number = urgent_frame_number;
1116                result.num_output_buffers = 0;
1117                result.output_buffers = NULL;
1118                mCallbackOps->process_capture_result(mCallbackOps, &result);
1119                ALOGV("%s: urgent frame_number = %d, capture_time = %lld",
1120                     __func__, result.frame_number, capture_time);
1121                free_camera_metadata((camera_metadata_t *)result.result);
1122                break;
1123            }
1124        }
1125    }
1126
1127    if (!frame_number_valid) {
1128        ALOGV("%s: Not a valid normal frame number, used as SOF only", __func__);
1129        mMetadataChannel->bufDone(metadata_buf);
1130        free(metadata_buf);
1131        goto done_metadata;
1132    }
1133    ALOGV("%s: valid normal frame_number = %d, capture_time = %lld", __func__,
1134            frame_number, capture_time);
1135
1136    // Go through the pending requests info and send shutter/results to frameworks
1137    for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1138        i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1139        camera3_capture_result_t result;
1140        ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1141
1142        // Flush out all entries with less or equal frame numbers.
1143        mPendingRequest--;
1144
1145        // Check whether any stream buffer corresponding to this is dropped or not
1146        // If dropped, then notify ERROR_BUFFER for the corresponding stream and
1147        // buffer with CAMERA3_BUFFER_STATUS_ERROR
1148        if (cam_frame_drop.frame_dropped) {
1149            camera3_notify_msg_t notify_msg;
1150            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1151                    j != i->buffers.end(); j++) {
1152                QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1153                uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1154                for (uint32_t k=0; k<cam_frame_drop.cam_stream_ID.num_streams; k++) {
1155                  if (streamID == cam_frame_drop.cam_stream_ID.streamID[k]) {
1156                      // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
1157                      ALOGV("%s: Start of reporting error frame#=%d, streamID=%d",
1158                             __func__, i->frame_number, streamID);
1159                      notify_msg.type = CAMERA3_MSG_ERROR;
1160                      notify_msg.message.error.frame_number = i->frame_number;
1161                      notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
1162                      notify_msg.message.error.error_stream = j->stream;
1163                      mCallbackOps->notify(mCallbackOps, &notify_msg);
1164                      ALOGV("%s: End of reporting error frame#=%d, streamID=%d",
1165                             __func__, i->frame_number, streamID);
1166                      PendingFrameDropInfo PendingFrameDrop;
1167                      PendingFrameDrop.frame_number=i->frame_number;
1168                      PendingFrameDrop.stream_ID = streamID;
1169                      // Add the Frame drop info to mPendingFrameDropList
1170                      mPendingFrameDropList.push_back(PendingFrameDrop);
1171                  }
1172                }
1173            }
1174        }
1175
1176        // Send empty metadata with already filled buffers for dropped metadata
1177        // and send valid metadata with already filled buffers for current metadata
1178        if (i->frame_number < frame_number) {
1179            CameraMetadata dummyMetadata;
1180            dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1181                    &i->timestamp, 1);
1182            dummyMetadata.update(ANDROID_REQUEST_ID,
1183                    &(i->request_id), 1);
1184            result.result = dummyMetadata.release();
1185        } else {
1186            result.result = translateFromHalMetadata(metadata,
1187                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth);
1188
1189            if (i->blob_request) {
1190                {
1191                    //Dump tuning metadata if enabled and available
1192                    char prop[PROPERTY_VALUE_MAX];
1193                    memset(prop, 0, sizeof(prop));
1194                    property_get("persist.camera.dumpmetadata", prop, "0");
1195                    int32_t enabled = atoi(prop);
1196                    if (enabled && metadata->is_tuning_params_valid) {
1197                        dumpMetadataToFile(metadata->tuning_params,
1198                               mMetaFrameCount,
1199                               enabled,
1200                               "Snapshot",
1201                               frame_number);
1202                    }
1203                }
1204
1205                //If it is a blob request then send the metadata to the picture channel
1206                metadata_buffer_t *reproc_meta =
1207                        (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
1208                if (reproc_meta == NULL) {
1209                    ALOGE("%s: Failed to allocate memory for reproc data.", __func__);
1210                    goto done_metadata;
1211                }
1212                *reproc_meta = *metadata;
1213                mPictureChannel->queueReprocMetadata(reproc_meta);
1214            }
1215            // Return metadata buffer
1216            mMetadataChannel->bufDone(metadata_buf);
1217            free(metadata_buf);
1218        }
1219        if (!result.result) {
1220            ALOGE("%s: metadata is NULL", __func__);
1221        }
1222        result.frame_number = i->frame_number;
1223        result.num_output_buffers = 0;
1224        result.output_buffers = NULL;
1225        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1226                    j != i->buffers.end(); j++) {
1227            if (j->buffer) {
1228                result.num_output_buffers++;
1229            }
1230        }
1231
1232        if (result.num_output_buffers > 0) {
1233            camera3_stream_buffer_t *result_buffers =
1234                new camera3_stream_buffer_t[result.num_output_buffers];
1235            if (!result_buffers) {
1236                ALOGE("%s: Fatal error: out of memory", __func__);
1237            }
1238            size_t result_buffers_idx = 0;
1239            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1240                    j != i->buffers.end(); j++) {
1241                if (j->buffer) {
1242                    for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1243                            m != mPendingFrameDropList.end(); m++) {
1244                        QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
1245                        uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1246                        if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
1247                            j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1248                            ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1249                                  __func__, frame_number, streamID);
1250                            m = mPendingFrameDropList.erase(m);
1251                            break;
1252                        }
1253                    }
1254
1255                    for (List<PendingBufferInfo>::iterator k =
1256                      mPendingBuffersMap.mPendingBufferList.begin();
1257                      k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
1258                      if (k->buffer == j->buffer->buffer) {
1259                        ALOGV("%s: Found buffer %p in pending buffer List "
1260                              "for frame %d, Take it out!!", __func__,
1261                               k->buffer, k->frame_number);
1262                        mPendingBuffersMap.num_buffers--;
1263                        k = mPendingBuffersMap.mPendingBufferList.erase(k);
1264                        break;
1265                      }
1266                    }
1267
1268                    result_buffers[result_buffers_idx++] = *(j->buffer);
1269                    free(j->buffer);
1270                    j->buffer = NULL;
1271                }
1272            }
1273            result.output_buffers = result_buffers;
1274
1275            mCallbackOps->process_capture_result(mCallbackOps, &result);
1276            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1277                    __func__, result.frame_number, i->timestamp);
1278            free_camera_metadata((camera_metadata_t *)result.result);
1279            delete[] result_buffers;
1280        } else {
1281            mCallbackOps->process_capture_result(mCallbackOps, &result);
1282            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1283                        __func__, result.frame_number, i->timestamp);
1284            free_camera_metadata((camera_metadata_t *)result.result);
1285        }
1286        // erase the element from the list
1287        i = mPendingRequestsList.erase(i);
1288    }
1289
1290done_metadata:
1291    for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1292        i != mPendingRequestsList.end() ;i++) {
1293        i->pipeline_depth++;
1294    }
1295    if (!pending_requests)
1296        unblockRequestIfNecessary();
1297
1298}
1299
1300/*===========================================================================
1301 * FUNCTION   : handleBufferWithLock
1302 *
1303 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
1304 *
1305 * PARAMETERS : @buffer: image buffer for the callback
1306 *              @frame_number: frame number of the image buffer
1307 *
1308 * RETURN     :
1309 *
1310 *==========================================================================*/
1311void QCamera3HardwareInterface::handleBufferWithLock(
1312    camera3_stream_buffer_t *buffer, uint32_t frame_number)
1313{
1314    // If the frame number doesn't exist in the pending request list,
1315    // directly send the buffer to the frameworks, and update pending buffers map
1316    // Otherwise, book-keep the buffer.
1317    List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1318    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1319        i++;
1320    }
1321    if (i == mPendingRequestsList.end()) {
1322        // Verify all pending requests frame_numbers are greater
1323        for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1324                j != mPendingRequestsList.end(); j++) {
1325            if (j->frame_number < frame_number) {
1326                ALOGE("%s: Error: pending frame number %d is smaller than %d",
1327                        __func__, j->frame_number, frame_number);
1328            }
1329        }
1330        camera3_capture_result_t result;
1331        result.result = NULL;
1332        result.frame_number = frame_number;
1333        result.num_output_buffers = 1;
1334        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1335                m != mPendingFrameDropList.end(); m++) {
1336            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
1337            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1338            if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
1339                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1340                ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1341                        __func__, frame_number, streamID);
1342                m = mPendingFrameDropList.erase(m);
1343                break;
1344            }
1345        }
1346        result.output_buffers = buffer;
1347        ALOGV("%s: result frame_number = %d, buffer = %p",
1348                __func__, frame_number, buffer->buffer);
1349
1350        for (List<PendingBufferInfo>::iterator k =
1351                mPendingBuffersMap.mPendingBufferList.begin();
1352                k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
1353            if (k->buffer == buffer->buffer) {
1354                ALOGV("%s: Found Frame buffer, take it out from list",
1355                        __func__);
1356
1357                mPendingBuffersMap.num_buffers--;
1358                k = mPendingBuffersMap.mPendingBufferList.erase(k);
1359                break;
1360            }
1361        }
1362        ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1363            __func__, mPendingBuffersMap.num_buffers);
1364
1365        mCallbackOps->process_capture_result(mCallbackOps, &result);
1366    } else {
1367        if (i->input_buffer_present) {
1368            camera3_capture_result result;
1369            result.result = NULL;
1370            result.frame_number = frame_number;
1371            result.num_output_buffers = 1;
1372            result.output_buffers = buffer;
1373            mCallbackOps->process_capture_result(mCallbackOps, &result);
1374            i = mPendingRequestsList.erase(i);
1375            mPendingRequest--;
1376        } else {
1377            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1378                j != i->buffers.end(); j++) {
1379                if (j->stream == buffer->stream) {
1380                    if (j->buffer != NULL) {
1381                        ALOGE("%s: Error: buffer is already set", __func__);
1382                    } else {
1383                        j->buffer = (camera3_stream_buffer_t *)malloc(
1384                            sizeof(camera3_stream_buffer_t));
1385                        *(j->buffer) = *buffer;
1386                        ALOGV("%s: cache buffer %p at result frame_number %d",
1387                            __func__, buffer, frame_number);
1388                    }
1389                }
1390            }
1391        }
1392    }
1393}
1394
1395/*===========================================================================
1396 * FUNCTION   : unblockRequestIfNecessary
1397 *
1398 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
1399 *              that mMutex is held when this function is called.
1400 *
1401 * PARAMETERS :
1402 *
1403 * RETURN     :
1404 *
1405 *==========================================================================*/
1406void QCamera3HardwareInterface::unblockRequestIfNecessary()
1407{
1408   // Unblock process_capture_request
1409   pthread_cond_signal(&mRequestCond);
1410}
1411
1412/*===========================================================================
1413 * FUNCTION   : registerStreamBuffers
1414 *
1415 * DESCRIPTION: Register buffers for a given stream with the HAL device.
1416 *
1417 * PARAMETERS :
1418 *   @stream_list : streams to be configured
1419 *
1420 * RETURN     :
1421 *
1422 *==========================================================================*/
1423int QCamera3HardwareInterface::registerStreamBuffers(
1424        const camera3_stream_buffer_set_t * /*buffer_set*/)
1425{
1426    //Deprecated
1427    return NO_ERROR;
1428}
1429
1430/*===========================================================================
1431 * FUNCTION   : processCaptureRequest
1432 *
1433 * DESCRIPTION: process a capture request from camera service
1434 *
1435 * PARAMETERS :
1436 *   @request : request from framework to process
1437 *
1438 * RETURN     :
1439 *
1440 *==========================================================================*/
1441int QCamera3HardwareInterface::processCaptureRequest(
1442                    camera3_capture_request_t *request)
1443{
1444    int rc = NO_ERROR;
1445    int32_t request_id;
1446    CameraMetadata meta;
1447
1448    pthread_mutex_lock(&mMutex);
1449
1450    rc = validateCaptureRequest(request);
1451    if (rc != NO_ERROR) {
1452        ALOGE("%s: incoming request is not valid", __func__);
1453        pthread_mutex_unlock(&mMutex);
1454        return rc;
1455    }
1456
1457    meta = request->settings;
1458
1459    // For first capture request, send capture intent, and
1460    // stream on all streams
1461    if (mFirstRequest) {
1462
1463        for (size_t i = 0; i < request->num_output_buffers; i++) {
1464            const camera3_stream_buffer_t& output = request->output_buffers[i];
1465            QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1466            rc = channel->registerBuffer(output.buffer);
1467            if (rc < 0) {
1468                ALOGE("%s: registerBuffer failed",
1469                        __func__);
1470                pthread_mutex_unlock(&mMutex);
1471                return -ENODEV;
1472            }
1473        }
1474
1475        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1476            int32_t hal_version = CAM_HAL_V3;
1477            uint8_t captureIntent =
1478                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1479
1480            memset(mParameters, 0, sizeof(metadata_buffer_t));
1481            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1482            AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1483                sizeof(hal_version), &hal_version);
1484            AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1485                sizeof(captureIntent), &captureIntent);
1486            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1487                mParameters);
1488        }
1489
1490        ALOGD("%s: Start META Channel", __func__);
1491        mMetadataChannel->start();
1492
1493        if (mSupportChannel)
1494            mSupportChannel->start();
1495
1496        //First initialize all streams
1497        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1498            it != mStreamInfo.end(); it++) {
1499            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1500            rc = channel->initialize();
1501            if (NO_ERROR != rc) {
1502                ALOGE("%s : Channel initialization failed %d", __func__, rc);
1503                if (mSupportChannel)
1504                    mSupportChannel->stop();
1505                mMetadataChannel->stop();
1506                pthread_mutex_unlock(&mMutex);
1507                return rc;
1508            }
1509        }
1510        //Then start them.
1511        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1512            it != mStreamInfo.end(); it++) {
1513            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1514            ALOGD("%s: Start Regular Channel mask=%d", __func__, channel->getStreamTypeMask());
1515            channel->start();
1516        }
1517    }
1518
1519    uint32_t frameNumber = request->frame_number;
1520    cam_stream_ID_t streamID;
1521
1522    if (meta.exists(ANDROID_REQUEST_ID)) {
1523        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1524        mCurrentRequestId = request_id;
1525        ALOGV("%s: Received request with id: %d",__func__, request_id);
1526    } else if (mFirstRequest || mCurrentRequestId == -1){
1527        ALOGE("%s: Unable to find request id field, \
1528                & no previous id available", __func__);
1529        return NAME_NOT_FOUND;
1530    } else {
1531        ALOGV("%s: Re-using old request id", __func__);
1532        request_id = mCurrentRequestId;
1533    }
1534
1535    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1536                                    __func__, __LINE__,
1537                                    request->num_output_buffers,
1538                                    request->input_buffer,
1539                                    frameNumber);
1540    // Acquire all request buffers first
1541    streamID.num_streams = 0;
1542    int blob_request = 0;
1543    for (size_t i = 0; i < request->num_output_buffers; i++) {
1544        const camera3_stream_buffer_t& output = request->output_buffers[i];
1545        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1546        sp<Fence> acquireFence = new Fence(output.acquire_fence);
1547
1548        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1549            //Call function to store local copy of jpeg data for encode params.
1550            blob_request = 1;
1551        }
1552
1553        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1554        if (rc != OK) {
1555            ALOGE("%s: fence wait failed %d", __func__, rc);
1556            pthread_mutex_unlock(&mMutex);
1557            return rc;
1558        }
1559
1560        streamID.streamID[streamID.num_streams] =
1561            channel->getStreamID(channel->getStreamTypeMask());
1562        streamID.num_streams++;
1563    }
1564
1565    if(request->input_buffer == NULL) {
1566       rc = setFrameParameters(request, streamID);
1567        if (rc < 0) {
1568            ALOGE("%s: fail to set frame parameters", __func__);
1569            pthread_mutex_unlock(&mMutex);
1570            return rc;
1571        }
1572    }
1573
1574    /* Update pending request list and pending buffers map */
1575    PendingRequestInfo pendingRequest;
1576    pendingRequest.frame_number = frameNumber;
1577    pendingRequest.num_buffers = request->num_output_buffers;
1578    pendingRequest.request_id = request_id;
1579    pendingRequest.blob_request = blob_request;
1580    pendingRequest.bNotified = 0;
1581    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1582    pendingRequest.pipeline_depth = 0;
1583    extractJpegMetadata(pendingRequest.jpegMetadata, request);
1584
1585    for (size_t i = 0; i < request->num_output_buffers; i++) {
1586        RequestedBufferInfo requestedBuf;
1587        requestedBuf.stream = request->output_buffers[i].stream;
1588        requestedBuf.buffer = NULL;
1589        pendingRequest.buffers.push_back(requestedBuf);
1590
1591        // Add to buffer handle the pending buffers list
1592        PendingBufferInfo bufferInfo;
1593        bufferInfo.frame_number = frameNumber;
1594        bufferInfo.buffer = request->output_buffers[i].buffer;
1595        bufferInfo.stream = request->output_buffers[i].stream;
1596        mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
1597        mPendingBuffersMap.num_buffers++;
1598        ALOGV("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
1599          __func__, frameNumber, bufferInfo.buffer, bufferInfo.stream,
1600          bufferInfo.stream->format);
1601    }
1602    ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1603          __func__, mPendingBuffersMap.num_buffers);
1604
1605    mPendingRequestsList.push_back(pendingRequest);
1606
1607    // Notify metadata channel we receive a request
1608    mMetadataChannel->request(NULL, frameNumber);
1609
1610    // Call request on other streams
1611    for (size_t i = 0; i < request->num_output_buffers; i++) {
1612        const camera3_stream_buffer_t& output = request->output_buffers[i];
1613        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1614        mm_camera_buf_def_t *pInputBuffer = NULL;
1615
1616        if (channel == NULL) {
1617            ALOGE("%s: invalid channel pointer for stream", __func__);
1618            continue;
1619        }
1620
1621        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1622            QCamera3RegularChannel* inputChannel = NULL;
1623            if(request->input_buffer != NULL){
1624
1625                //Try to get the internal format
1626                inputChannel = (QCamera3RegularChannel*)
1627                    request->input_buffer->stream->priv;
1628                if(inputChannel == NULL ){
1629                    ALOGE("%s: failed to get input channel handle", __func__);
1630                } else {
1631                    pInputBuffer =
1632                        inputChannel->getInternalFormatBuffer(
1633                                request->input_buffer->buffer);
1634                    ALOGD("%s: Input buffer dump",__func__);
1635                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
1636                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
1637                    ALOGD("frame len:%d", pInputBuffer->frame_len);
1638                    ALOGD("Handle:%p", request->input_buffer->buffer);
1639                }
1640                rc = channel->request(output.buffer, frameNumber,
1641                            pInputBuffer, mParameters);
1642                if (rc < 0) {
1643                    ALOGE("%s: Fail to request on picture channel", __func__);
1644                    pthread_mutex_unlock(&mMutex);
1645                    return rc;
1646                }
1647
1648                rc = setReprocParameters(request);
1649                if (rc < 0) {
1650                    ALOGE("%s: fail to set reproc parameters", __func__);
1651                    pthread_mutex_unlock(&mMutex);
1652                    return rc;
1653                }
1654            } else
1655                rc = channel->request(output.buffer, frameNumber,
1656                            NULL, mParameters);
1657        } else {
1658            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1659                __LINE__, output.buffer, frameNumber);
1660           rc = channel->request(output.buffer, frameNumber);
1661        }
1662        if (rc < 0)
1663            ALOGE("%s: request failed", __func__);
1664    }
1665
1666    mFirstRequest = false;
1667    // Added a timed condition wait
1668    struct timespec ts;
1669    uint8_t isValidTimeout = 1;
1670    rc = clock_gettime(CLOCK_REALTIME, &ts);
1671    if (rc < 0) {
1672        isValidTimeout = 0;
1673        ALOGE("%s: Error reading the real time clock!!", __func__);
1674    }
1675    else {
1676        // Make timeout as 5 sec for request to be honored
1677        ts.tv_sec += 5;
1678    }
1679    //Block on conditional variable
1680
1681    mPendingRequest++;
1682    while (mPendingRequest >= kMaxInFlight) {
1683        if (!isValidTimeout) {
1684            ALOGV("%s: Blocking on conditional wait", __func__);
1685            pthread_cond_wait(&mRequestCond, &mMutex);
1686        }
1687        else {
1688            ALOGV("%s: Blocking on timed conditional wait", __func__);
1689            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1690            if (rc == ETIMEDOUT) {
1691                rc = -ENODEV;
1692                ALOGE("%s: Unblocked on timeout!!!!", __func__);
1693                break;
1694            }
1695        }
1696        ALOGV("%s: Unblocked", __func__);
1697    }
1698    pthread_mutex_unlock(&mMutex);
1699
1700    return rc;
1701}
1702
1703/*===========================================================================
1704 * FUNCTION   : dump
1705 *
1706 * DESCRIPTION:
1707 *
1708 * PARAMETERS :
1709 *
1710 *
1711 * RETURN     :
1712 *==========================================================================*/
1713void QCamera3HardwareInterface::dump(int /*fd*/)
1714{
1715    /*Enable lock when we implement this function*/
1716    /*
1717    pthread_mutex_lock(&mMutex);
1718
1719    pthread_mutex_unlock(&mMutex);
1720    */
1721    return;
1722}
1723
1724/*===========================================================================
1725 * FUNCTION   : flush
1726 *
1727 * DESCRIPTION:
1728 *
1729 * PARAMETERS :
1730 *
1731 *
1732 * RETURN     :
1733 *==========================================================================*/
1734int QCamera3HardwareInterface::flush()
1735{
1736
1737    unsigned int frameNum = 0;
1738    camera3_notify_msg_t notify_msg;
1739    camera3_capture_result_t result;
1740    camera3_stream_buffer_t pStream_Buf;
1741
1742    ALOGV("%s: Unblocking Process Capture Request", __func__);
1743
1744    // Stop the Streams/Channels
1745    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1746        it != mStreamInfo.end(); it++) {
1747        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1748        channel->stop();
1749        (*it)->status = INVALID;
1750    }
1751
1752    if (mSupportChannel) {
1753        mSupportChannel->stop();
1754    }
1755    if (mMetadataChannel) {
1756        /* If content of mStreamInfo is not 0, there is metadata stream */
1757        mMetadataChannel->stop();
1758    }
1759
1760    // Mutex Lock
1761    pthread_mutex_lock(&mMutex);
1762
1763    // Unblock process_capture_request
1764    mPendingRequest = 0;
1765    pthread_cond_signal(&mRequestCond);
1766
1767    List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1768    frameNum = i->frame_number;
1769    ALOGV("%s: Latest frame num on  mPendingRequestsList = %d",
1770      __func__, frameNum);
1771
1772    // Go through the pending buffers and send buffer errors
1773    for (List<PendingBufferInfo>::iterator k =
1774         mPendingBuffersMap.mPendingBufferList.begin();
1775         k != mPendingBuffersMap.mPendingBufferList.end();  ) {
1776         ALOGV("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
1777          __func__, k->frame_number, k->buffer, k->stream,
1778          k->stream->format);
1779
1780        if (k->frame_number < frameNum) {
1781            // Send Error notify to frameworks for each buffer for which
1782            // metadata buffer is already sent
1783            ALOGV("%s: Sending ERROR BUFFER for frame %d, buffer %p",
1784              __func__, k->frame_number, k->buffer);
1785
1786            notify_msg.type = CAMERA3_MSG_ERROR;
1787            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
1788            notify_msg.message.error.error_stream = k->stream;
1789            notify_msg.message.error.frame_number = k->frame_number;
1790            mCallbackOps->notify(mCallbackOps, &notify_msg);
1791            ALOGV("%s: notify frame_number = %d", __func__,
1792                    i->frame_number);
1793
1794            pStream_Buf.acquire_fence = -1;
1795            pStream_Buf.release_fence = -1;
1796            pStream_Buf.buffer = k->buffer;
1797            pStream_Buf.status = CAMERA3_BUFFER_STATUS_ERROR;
1798            pStream_Buf.stream = k->stream;
1799
1800            result.result = NULL;
1801            result.frame_number = k->frame_number;
1802            result.num_output_buffers = 1;
1803            result.output_buffers = &pStream_Buf ;
1804            mCallbackOps->process_capture_result(mCallbackOps, &result);
1805
1806            mPendingBuffersMap.num_buffers--;
1807            k = mPendingBuffersMap.mPendingBufferList.erase(k);
1808        }
1809        else {
1810          k++;
1811        }
1812    }
1813
1814    ALOGV("%s:Sending ERROR REQUEST for all pending requests", __func__);
1815
1816    // Go through the pending requests info and send error request to framework
1817    for (i = mPendingRequestsList.begin(); i != mPendingRequestsList.end(); ) {
1818        int numBuffers = 0;
1819        ALOGV("%s:Sending ERROR REQUEST for frame %d",
1820              __func__, i->frame_number);
1821
1822        // Send shutter notify to frameworks
1823        notify_msg.type = CAMERA3_MSG_ERROR;
1824        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
1825        notify_msg.message.error.error_stream = NULL;
1826        notify_msg.message.error.frame_number = i->frame_number;
1827        mCallbackOps->notify(mCallbackOps, &notify_msg);
1828
1829        result.frame_number = i->frame_number;
1830        result.num_output_buffers = 0;
1831        result.output_buffers = NULL;
1832        numBuffers = 0;
1833
1834        for (List<PendingBufferInfo>::iterator k =
1835             mPendingBuffersMap.mPendingBufferList.begin();
1836             k != mPendingBuffersMap.mPendingBufferList.end(); ) {
1837          if (k->frame_number == i->frame_number) {
1838            ALOGV("%s: Sending Error for frame = %d, buffer = %p,"
1839                   " stream = %p, stream format = %d",__func__,
1840                   k->frame_number, k->buffer, k->stream, k->stream->format);
1841
1842            pStream_Buf.acquire_fence = -1;
1843            pStream_Buf.release_fence = -1;
1844            pStream_Buf.buffer = k->buffer;
1845            pStream_Buf.status = CAMERA3_BUFFER_STATUS_ERROR;
1846            pStream_Buf.stream = k->stream;
1847
1848            result.num_output_buffers = 1;
1849            result.output_buffers = &pStream_Buf;
1850            result.result = NULL;
1851            result.frame_number = i->frame_number;
1852
1853            mCallbackOps->process_capture_result(mCallbackOps, &result);
1854            mPendingBuffersMap.num_buffers--;
1855            k = mPendingBuffersMap.mPendingBufferList.erase(k);
1856            numBuffers++;
1857          }
1858          else {
1859            k++;
1860          }
1861        }
1862        ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1863              __func__, mPendingBuffersMap.num_buffers);
1864
1865        i = mPendingRequestsList.erase(i);
1866    }
1867
1868    /* Reset pending buffer list and requests list */
1869    mPendingRequestsList.clear();
1870    /* Reset pending frame Drop list and requests list */
1871    mPendingFrameDropList.clear();
1872
1873    mPendingBuffersMap.num_buffers = 0;
1874    mPendingBuffersMap.mPendingBufferList.clear();
1875    ALOGV("%s: Cleared all the pending buffers ", __func__);
1876
1877    mFirstRequest = true;
1878    pthread_mutex_unlock(&mMutex);
1879    return 0;
1880}
1881
1882/*===========================================================================
1883 * FUNCTION   : captureResultCb
1884 *
1885 * DESCRIPTION: Callback handler for all capture result
1886 *              (streams, as well as metadata)
1887 *
1888 * PARAMETERS :
1889 *   @metadata : metadata information
1890 *   @buffer   : actual gralloc buffer to be returned to frameworks.
1891 *               NULL if metadata.
1892 *
1893 * RETURN     : NONE
1894 *==========================================================================*/
1895void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1896                camera3_stream_buffer_t *buffer, uint32_t frame_number)
1897{
1898    pthread_mutex_lock(&mMutex);
1899
1900    /* Assume flush() is called before any reprocessing. Send
1901     * notify and result immediately upon receipt of any callback*/
1902    if (mLoopBackResult) {
1903        /* Send notify */
1904        camera3_notify_msg_t notify_msg;
1905        notify_msg.type = CAMERA3_MSG_SHUTTER;
1906        notify_msg.message.shutter.frame_number = mLoopBackResult->frame_number;
1907        notify_msg.message.shutter.timestamp = mLoopBackTimestamp;
1908        mCallbackOps->notify(mCallbackOps, &notify_msg);
1909
1910        /* Send capture result */
1911        mCallbackOps->process_capture_result(mCallbackOps, mLoopBackResult);
1912        free_camera_metadata((camera_metadata_t *)mLoopBackResult->result);
1913        free(mLoopBackResult);
1914        mLoopBackResult = NULL;
1915    }
1916
1917    if (metadata_buf)
1918        handleMetadataWithLock(metadata_buf);
1919    else
1920        handleBufferWithLock(buffer, frame_number);
1921
1922    pthread_mutex_unlock(&mMutex);
1923    return;
1924}
1925
1926/*===========================================================================
1927 * FUNCTION   : translateFromHalMetadata
1928 *
1929 * DESCRIPTION:
1930 *
1931 * PARAMETERS :
1932 *   @metadata : metadata information from callback
1933 *
1934 * RETURN     : camera_metadata_t*
1935 *              metadata in a format specified by fwk
1936 *==========================================================================*/
1937camera_metadata_t*
1938QCamera3HardwareInterface::translateFromHalMetadata(
1939                                 metadata_buffer_t *metadata,
1940                                 nsecs_t timestamp,
1941                                 int32_t request_id,
1942                                 const CameraMetadata& jpegMetadata,
1943                                 uint8_t pipeline_depth)
1944{
1945    CameraMetadata camMetadata;
1946    camera_metadata_t* resultMetadata;
1947
1948    if (jpegMetadata.entryCount())
1949        camMetadata.append(jpegMetadata);
1950
1951    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1952    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1953    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
1954
1955    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
1956    uint8_t next_entry;
1957    while (curr_entry != CAM_INTF_PARM_MAX) {
1958       switch (curr_entry) {
1959         case CAM_INTF_META_FRAME_NUMBER:{
1960             int64_t frame_number = *(uint32_t *) POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1961             camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &frame_number, 1);
1962             break;
1963         }
1964         case CAM_INTF_META_FACE_DETECTION:{
1965             cam_face_detection_data_t *faceDetectionInfo =
1966                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1967             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1968             int32_t faceIds[MAX_ROI];
1969             uint8_t faceScores[MAX_ROI];
1970             int32_t faceRectangles[MAX_ROI * 4];
1971             int32_t faceLandmarks[MAX_ROI * 6];
1972             int j = 0, k = 0;
1973             for (int i = 0; i < numFaces; i++) {
1974                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
1975                 faceScores[i] = faceDetectionInfo->faces[i].score;
1976                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1977                         faceRectangles+j, -1);
1978                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1979                 j+= 4;
1980                 k+= 6;
1981             }
1982
1983             if (numFaces <= 0) {
1984                memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
1985                memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
1986                memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
1987                memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
1988             }
1989
1990             camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1991             camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1992             camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1993               faceRectangles, numFaces*4);
1994             camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1995               faceLandmarks, numFaces*6);
1996
1997            break;
1998            }
1999         case CAM_INTF_META_COLOR_CORRECT_MODE:{
2000             uint8_t  *color_correct_mode =
2001                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
2002             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
2003             break;
2004          }
2005
2006         // 3A state is sent in urgent partial result (uses quirk)
2007         case CAM_INTF_META_AEC_PRECAPTURE_ID:
2008         case CAM_INTF_META_AEC_ROI:
2009         case CAM_INTF_META_AEC_STATE:
2010         case CAM_INTF_PARM_AEC_LOCK:
2011         case CAM_INTF_PARM_EV:
2012         case CAM_INTF_PARM_FOCUS_MODE:
2013         case CAM_INTF_META_AF_ROI:
2014         case CAM_INTF_META_AF_STATE:
2015         case CAM_INTF_META_AF_TRIGGER_ID:
2016         case CAM_INTF_PARM_WHITE_BALANCE:
2017         case CAM_INTF_META_AWB_REGIONS:
2018         case CAM_INTF_META_AWB_STATE:
2019         case CAM_INTF_PARM_AWB_LOCK:
2020         case CAM_INTF_META_PRECAPTURE_TRIGGER:
2021         case CAM_INTF_META_AF_TRIGGER_NOTICE:
2022         case CAM_INTF_META_MODE: {
2023           ALOGV("%s: 3A metadata: %d, do not process", __func__, curr_entry);
2024           break;
2025         }
2026
2027          case CAM_INTF_META_EDGE_MODE: {
2028             cam_edge_application_t  *edgeApplication =
2029                (cam_edge_application_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
2030             uint8_t edgeStrength = (uint8_t)edgeApplication->sharpness;
2031             camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
2032             camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
2033             break;
2034          }
2035          case CAM_INTF_META_FLASH_POWER: {
2036             uint8_t  *flashPower =
2037                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
2038             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
2039             break;
2040          }
2041          case CAM_INTF_META_FLASH_FIRING_TIME: {
2042             int64_t  *flashFiringTime =
2043                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
2044             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
2045             break;
2046          }
2047          case CAM_INTF_META_FLASH_STATE: {
2048             uint8_t  flashState =
2049                *((uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata));
2050             if (!gCamCapability[mCameraId]->flash_available) {
2051                 flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
2052             }
2053             camMetadata.update(ANDROID_FLASH_STATE, &flashState, 1);
2054             break;
2055          }
2056          case CAM_INTF_META_FLASH_MODE:{
2057             uint8_t flashMode = *((uint8_t*)
2058                 POINTER_OF(CAM_INTF_META_FLASH_MODE, metadata));
2059             uint8_t fwk_flashMode = lookupFwkName(FLASH_MODES_MAP,
2060                                          sizeof(FLASH_MODES_MAP),
2061                                          flashMode);
2062             camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
2063             break;
2064          }
2065          case CAM_INTF_META_HOTPIXEL_MODE: {
2066              uint8_t  *hotPixelMode =
2067                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
2068              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
2069              break;
2070          }
2071          case CAM_INTF_META_LENS_APERTURE:{
2072             float  *lensAperture =
2073                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
2074             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
2075             break;
2076          }
2077          case CAM_INTF_META_LENS_FILTERDENSITY: {
2078             float  *filterDensity =
2079                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
2080             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
2081             break;
2082          }
2083          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
2084             float  *focalLength =
2085                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
2086             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
2087             break;
2088          }
2089          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
2090             float  *focusDistance =
2091                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
2092             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
2093             break;
2094          }
2095          case CAM_INTF_META_LENS_FOCUS_RANGE: {
2096             float  *focusRange =
2097                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
2098             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
2099             break;
2100          }
2101          case CAM_INTF_META_LENS_STATE: {
2102             uint8_t *lensState = (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_STATE, metadata);
2103             camMetadata.update(ANDROID_LENS_STATE , lensState, 1);
2104             break;
2105          }
2106          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
2107             uint8_t  *opticalStab =
2108                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
2109             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
2110             break;
2111          }
2112          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
2113             uint8_t  *noiseRedMode =
2114                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
2115             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
2116             break;
2117          }
2118          case CAM_INTF_META_NOISE_REDUCTION_STRENGTH: {
2119             uint8_t  *noiseRedStrength =
2120                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata);
2121             camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, noiseRedStrength, 1);
2122             break;
2123          }
2124          case CAM_INTF_META_SCALER_CROP_REGION: {
2125             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
2126             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
2127             int32_t scalerCropRegion[4];
2128             scalerCropRegion[0] = hScalerCropRegion->left;
2129             scalerCropRegion[1] = hScalerCropRegion->top;
2130             scalerCropRegion[2] = hScalerCropRegion->width;
2131             scalerCropRegion[3] = hScalerCropRegion->height;
2132             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
2133             break;
2134          }
2135          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
2136             int64_t  *sensorExpTime =
2137                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
2138             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
2139             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
2140             break;
2141          }
2142          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
2143             int64_t  *sensorFameDuration =
2144                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
2145             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
2146             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
2147             break;
2148          }
2149          case CAM_INTF_META_SENSOR_SENSITIVITY:{
2150             int32_t  *sensorSensitivity =
2151                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
2152             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
2153             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
2154             break;
2155          }
2156
2157          case CAM_INTF_META_SHADING_MODE: {
2158             uint8_t  *shadingMode =
2159                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
2160             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
2161             break;
2162          }
2163
2164          case CAM_INTF_META_LENS_SHADING_MAP_MODE: {
2165             uint8_t  *shadingMapMode =
2166                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata);
2167             camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, shadingMapMode, 1);
2168             break;
2169          }
2170
2171          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
2172             uint8_t  *faceDetectMode =
2173                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
2174             uint8_t fwk_faceDetectMode = (uint8_t)lookupFwkName(FACEDETECT_MODES_MAP,
2175                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
2176                                                        *faceDetectMode);
2177             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
2178             break;
2179          }
2180          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
2181             uint8_t  *histogramMode =
2182                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
2183             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
2184             break;
2185          }
2186          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
2187               uint8_t  *sharpnessMapMode =
2188                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
2189               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
2190                                  sharpnessMapMode, 1);
2191               break;
2192           }
2193          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
2194               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
2195               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
2196               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
2197                                  (int32_t*)sharpnessMap->sharpness,
2198                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
2199               break;
2200          }
2201          case CAM_INTF_META_LENS_SHADING_MAP: {
2202               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
2203               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
2204               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
2205               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
2206               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
2207                                  (float*)lensShadingMap->lens_shading,
2208                                  4*map_width*map_height);
2209               break;
2210          }
2211
2212          case CAM_INTF_META_TONEMAP_MODE: {
2213             uint8_t  *toneMapMode =
2214                (uint8_t *)POINTER_OF(CAM_INTF_META_TONEMAP_MODE, metadata);
2215             camMetadata.update(ANDROID_TONEMAP_MODE, toneMapMode, 1);
2216             break;
2217          }
2218
2219          case CAM_INTF_META_TONEMAP_CURVES:{
2220             //Populate CAM_INTF_META_TONEMAP_CURVES
2221             /* ch0 = G, ch 1 = B, ch 2 = R*/
2222             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
2223             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
2224             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
2225                                (float*)tonemap->curves[0].tonemap_points,
2226                                tonemap->tonemap_points_cnt * 2);
2227
2228             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
2229                                (float*)tonemap->curves[1].tonemap_points,
2230                                tonemap->tonemap_points_cnt * 2);
2231
2232             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
2233                                (float*)tonemap->curves[2].tonemap_points,
2234                                tonemap->tonemap_points_cnt * 2);
2235             break;
2236          }
2237
2238          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
2239             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
2240             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
2241             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
2242             break;
2243          }
2244          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
2245              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
2246              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
2247              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
2248                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
2249              break;
2250          }
2251
2252          /* DNG file realted metadata */
2253          case CAM_INTF_META_PROFILE_TONE_CURVE: {
2254             cam_profile_tone_curve *toneCurve = (cam_profile_tone_curve *)
2255             POINTER_OF(CAM_INTF_META_PROFILE_TONE_CURVE, metadata);
2256             camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
2257                                (float*)toneCurve->curve.tonemap_points,
2258                                toneCurve->tonemap_points_cnt * 2);
2259             break;
2260          }
2261
2262          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
2263             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
2264             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
2265             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
2266                       predColorCorrectionGains->gains, 4);
2267             break;
2268          }
2269          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
2270             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
2271                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
2272             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
2273                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
2274             break;
2275
2276          }
2277
2278          case CAM_INTF_META_OTP_WB_GRGB:{
2279             float *otpWbGrGb = (float*) POINTER_OF(CAM_INTF_META_OTP_WB_GRGB, metadata);
2280             camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
2281             break;
2282          }
2283
2284          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
2285             uint8_t *blackLevelLock = (uint8_t*)
2286               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
2287             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
2288             break;
2289          }
2290          case CAM_INTF_META_SCENE_FLICKER:{
2291             uint8_t *sceneFlicker = (uint8_t*)
2292             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
2293             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
2294             break;
2295          }
2296          case CAM_INTF_PARM_LED_MODE:
2297             break;
2298          case CAM_INTF_PARM_EFFECT: {
2299             uint8_t *effectMode = (uint8_t*)
2300                  POINTER_OF(CAM_INTF_PARM_EFFECT, metadata);
2301             uint8_t fwk_effectMode = (uint8_t)lookupFwkName(EFFECT_MODES_MAP,
2302                                                    sizeof(EFFECT_MODES_MAP),
2303                                                    *effectMode);
2304             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
2305             break;
2306          }
2307          case CAM_INTF_META_TEST_PATTERN_DATA: {
2308             cam_test_pattern_data_t *testPatternData = (cam_test_pattern_data_t *)
2309                 POINTER_OF(CAM_INTF_META_TEST_PATTERN_DATA, metadata);
2310             int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
2311                     sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
2312                     testPatternData->mode);
2313             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE,
2314                     &fwk_testPatternMode, 1);
2315             break;
2316          }
2317          case CAM_INTF_META_JPEG_GPS_COORDINATES: {
2318              double *gps_coords = (double *)POINTER_OF(
2319                      CAM_INTF_META_JPEG_GPS_COORDINATES, metadata);
2320              camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
2321              break;
2322          }
2323          case CAM_INTF_META_JPEG_GPS_PROC_METHODS: {
2324              char *gps_methods = (char *)POINTER_OF(
2325                      CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata);
2326              String8 str(gps_methods);
2327              camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
2328              break;
2329          }
2330          case CAM_INTF_META_JPEG_GPS_TIMESTAMP: {
2331              int64_t *gps_timestamp = (int64_t *)POINTER_OF(
2332                      CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata);
2333              camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
2334              break;
2335          }
2336          case CAM_INTF_META_JPEG_ORIENTATION: {
2337              int32_t *jpeg_orientation = (int32_t *)POINTER_OF(
2338                      CAM_INTF_META_JPEG_ORIENTATION, metadata);
2339              camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
2340              break;
2341          }
2342          case CAM_INTF_META_JPEG_QUALITY: {
2343              uint8_t *jpeg_quality = (uint8_t *)POINTER_OF(
2344                      CAM_INTF_META_JPEG_QUALITY, metadata);
2345              camMetadata.update(ANDROID_JPEG_QUALITY, jpeg_quality, 1);
2346              break;
2347          }
2348          case CAM_INTF_META_JPEG_THUMB_QUALITY: {
2349              uint8_t *thumb_quality = (uint8_t *)POINTER_OF(
2350                      CAM_INTF_META_JPEG_THUMB_QUALITY, metadata);
2351              camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, thumb_quality, 1);
2352              break;
2353          }
2354
2355          case CAM_INTF_META_JPEG_THUMB_SIZE: {
2356              cam_dimension_t *thumb_size = (cam_dimension_t *)POINTER_OF(
2357                      CAM_INTF_META_JPEG_THUMB_SIZE, metadata);
2358              camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, (int32_t *)thumb_size, 2);
2359              break;
2360          }
2361
2362             break;
2363          case CAM_INTF_META_PRIVATE_DATA: {
2364             uint8_t *privateData = (uint8_t *)
2365                 POINTER_OF(CAM_INTF_META_PRIVATE_DATA, metadata);
2366             camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
2367                 privateData, MAX_METADATA_PAYLOAD_SIZE);
2368             break;
2369          }
2370
2371          case CAM_INTF_META_NEUTRAL_COL_POINT:{
2372             cam_neutral_col_point_t *neuColPoint = (cam_neutral_col_point_t*)
2373                 POINTER_OF(CAM_INTF_META_NEUTRAL_COL_POINT, metadata);
2374             camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
2375                     (camera_metadata_rational_t*)neuColPoint->neutral_col_point, 3);
2376             break;
2377          }
2378
2379          default:
2380             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
2381                   __func__, curr_entry);
2382             break;
2383       }
2384       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2385       curr_entry = next_entry;
2386    }
2387
2388    uint8_t hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
2389    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
2390
2391    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
2392    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
2393
2394    int32_t hotPixelMap[2];
2395    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
2396
2397    resultMetadata = camMetadata.release();
2398    return resultMetadata;
2399}
2400
2401/*===========================================================================
2402 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
2403 *
2404 * DESCRIPTION:
2405 *
2406 * PARAMETERS :
2407 *   @metadata : metadata information from callback
2408 *
2409 * RETURN     : camera_metadata_t*
2410 *              metadata in a format specified by fwk
2411 *==========================================================================*/
2412camera_metadata_t*
2413QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
2414                                (metadata_buffer_t *metadata)
2415{
2416    CameraMetadata camMetadata;
2417    camera_metadata_t* resultMetadata;
2418
2419    uint8_t partial_result_tag = ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL;
2420    camMetadata.update(ANDROID_QUIRKS_PARTIAL_RESULT, &partial_result_tag, 1);
2421
2422    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
2423    uint8_t next_entry;
2424    while (curr_entry != CAM_INTF_PARM_MAX) {
2425      switch (curr_entry) {
2426        case CAM_INTF_META_AEC_PRECAPTURE_ID: {
2427            int32_t  *ae_precapture_id =
2428              (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
2429            camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
2430                                          ae_precapture_id, 1);
2431            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID", __func__);
2432          break;
2433        }
2434        case CAM_INTF_META_AEC_ROI: {
2435            cam_area_t  *hAeRegions =
2436                (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
2437            int32_t aeRegions[5];
2438            convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
2439            camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
2440            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_REGIONS", __func__);
2441            break;
2442        }
2443        case CAM_INTF_META_AEC_STATE:{
2444            uint8_t *ae_state =
2445                (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
2446            camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
2447            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE", __func__);
2448            break;
2449        }
2450        case CAM_INTF_PARM_AEC_LOCK: {
2451            uint8_t  *ae_lock =
2452              (uint8_t *)POINTER_OF(CAM_INTF_PARM_AEC_LOCK, metadata);
2453            camMetadata.update(ANDROID_CONTROL_AE_LOCK,
2454                                          ae_lock, 1);
2455            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_LOCK", __func__);
2456        }
2457        case CAM_INTF_PARM_EV: {
2458            int32_t  *expCompensation =
2459              (int32_t *)POINTER_OF(CAM_INTF_PARM_EV, metadata);
2460            camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2461                                          expCompensation, 1);
2462            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION",
2463                __func__);
2464        }
2465        case CAM_INTF_PARM_FOCUS_MODE:{
2466            uint8_t  *focusMode =
2467                (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
2468            uint8_t fwkAfMode = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2469               sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
2470            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
2471            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__);
2472            break;
2473        }
2474        case CAM_INTF_META_AF_ROI:{
2475            /*af regions*/
2476            cam_area_t  *hAfRegions =
2477                (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
2478            int32_t afRegions[5];
2479            convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
2480            camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
2481            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_REGIONS", __func__);
2482            break;
2483        }
2484        case CAM_INTF_META_AF_STATE: {
2485            uint8_t  *afState =
2486               (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
2487            camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
2488            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE", __func__);
2489            break;
2490        }
2491        case CAM_INTF_META_AF_TRIGGER_ID: {
2492            int32_t  *afTriggerId =
2493                 (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
2494            camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
2495            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID", __func__);
2496            break;
2497        }
2498        case CAM_INTF_PARM_WHITE_BALANCE: {
2499           uint8_t  *whiteBalance =
2500                (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
2501             uint8_t fwkWhiteBalanceMode =
2502                    (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2503                    sizeof(WHITE_BALANCE_MODES_MAP)/
2504                    sizeof(WHITE_BALANCE_MODES_MAP[0]), *whiteBalance);
2505             camMetadata.update(ANDROID_CONTROL_AWB_MODE,
2506                 &fwkWhiteBalanceMode, 1);
2507            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE", __func__);
2508             break;
2509        }
2510        case CAM_INTF_META_AWB_REGIONS: {
2511           /*awb regions*/
2512           cam_area_t  *hAwbRegions =
2513               (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
2514           int32_t awbRegions[5];
2515           convertToRegions(hAwbRegions->rect, awbRegions,hAwbRegions->weight);
2516           camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
2517           ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_REGIONS", __func__);
2518           break;
2519        }
2520        case CAM_INTF_META_AWB_STATE: {
2521           uint8_t  *whiteBalanceState =
2522              (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
2523           camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
2524           ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE", __func__);
2525           break;
2526        }
2527        case CAM_INTF_PARM_AWB_LOCK: {
2528            uint8_t  *awb_lock =
2529              (uint8_t *)POINTER_OF(CAM_INTF_PARM_AWB_LOCK, metadata);
2530            camMetadata.update(ANDROID_CONTROL_AWB_LOCK, awb_lock, 1);
2531            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_LOCK", __func__);
2532            break;
2533        }
2534        case CAM_INTF_META_MODE: {
2535            uint8_t *mode =(uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
2536            camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
2537            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_MODE", __func__);
2538            break;
2539        }
2540        case CAM_INTF_PARM_BESTSHOT_MODE: {
2541            uint8_t *sceneMode =
2542                (uint8_t *)POINTER_OF(CAM_INTF_PARM_BESTSHOT_MODE, metadata);
2543            uint8_t fwkSceneMode =
2544                (uint8_t)lookupFwkName(SCENE_MODES_MAP,
2545                sizeof(SCENE_MODES_MAP)/
2546                sizeof(SCENE_MODES_MAP[0]), *sceneMode);
2547            camMetadata.update(ANDROID_CONTROL_SCENE_MODE,
2548                 &fwkSceneMode, 1);
2549            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_SCENE_MODE", __func__);
2550            break;
2551        }
2552        case CAM_INTF_META_PRECAPTURE_TRIGGER: {
2553            uint8_t *precaptureTrigger =
2554                (uint8_t *)POINTER_OF(CAM_INTF_META_PRECAPTURE_TRIGGER, metadata);
2555            camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
2556                 precaptureTrigger, 1);
2557            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER",
2558                __func__);
2559            break;
2560        }
2561        case CAM_INTF_META_AF_TRIGGER_NOTICE: {
2562            uint8_t *af_trigger =
2563              (uint8_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_NOTICE, metadata);
2564            camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
2565                af_trigger, 1);
2566            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER = %d",
2567                __func__, *af_trigger);
2568        }
2569        default:
2570            ALOGV("%s: Normal Metadata %d, do not process",
2571              __func__, curr_entry);
2572       }
2573       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2574       curr_entry = next_entry;
2575    }
2576    resultMetadata = camMetadata.release();
2577    return resultMetadata;
2578}
2579
2580/*===========================================================================
2581 * FUNCTION   : dumpMetadataToFile
2582 *
2583 * DESCRIPTION: Dumps tuning metadata to file system
2584 *
2585 * PARAMETERS :
2586 *   @meta           : tuning metadata
2587 *   @dumpFrameCount : current dump frame count
2588 *   @enabled        : Enable mask
2589 *
2590 *==========================================================================*/
2591void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
2592                                                   uint32_t &dumpFrameCount,
2593                                                   int32_t enabled,
2594                                                   const char *type,
2595                                                   uint32_t frameNumber)
2596{
2597    uint32_t frm_num = 0;
2598
2599    //Some sanity checks
2600    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
2601        ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
2602              __func__,
2603              meta.tuning_sensor_data_size,
2604              TUNING_SENSOR_DATA_MAX);
2605        return;
2606    }
2607
2608    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
2609        ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
2610              __func__,
2611              meta.tuning_vfe_data_size,
2612              TUNING_VFE_DATA_MAX);
2613        return;
2614    }
2615
2616    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
2617        ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
2618              __func__,
2619              meta.tuning_cpp_data_size,
2620              TUNING_CPP_DATA_MAX);
2621        return;
2622    }
2623
2624    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
2625        ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
2626              __func__,
2627              meta.tuning_cac_data_size,
2628              TUNING_CAC_DATA_MAX);
2629        return;
2630    }
2631    //
2632
2633    if(enabled){
2634        frm_num = ((enabled & 0xffff0000) >> 16);
2635        if(frm_num == 0) {
2636            frm_num = 10; //default 10 frames
2637        }
2638        if(frm_num > 256) {
2639            frm_num = 256; //256 buffers cycle around
2640        }
2641        if((frm_num == 256) && (dumpFrameCount >= frm_num)) {
2642            // reset frame count if cycling
2643            dumpFrameCount = 0;
2644        }
2645        ALOGV("DumpFrmCnt = %d, frm_num = %d",dumpFrameCount, frm_num);
2646        if (dumpFrameCount < frm_num) {
2647            char timeBuf[FILENAME_MAX];
2648            char buf[FILENAME_MAX];
2649            memset(buf, 0, sizeof(buf));
2650            memset(timeBuf, 0, sizeof(timeBuf));
2651            time_t current_time;
2652            struct tm * timeinfo;
2653            time (&current_time);
2654            timeinfo = localtime (&current_time);
2655            strftime (timeBuf, sizeof(timeBuf),"/data/%Y%m%d%H%M%S", timeinfo);
2656            String8 filePath(timeBuf);
2657            snprintf(buf,
2658                     sizeof(buf),
2659                     "%d_HAL_META_%s_%d.bin",
2660                     dumpFrameCount,
2661                     type,
2662                     frameNumber);
2663            filePath.append(buf);
2664            int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
2665            if (file_fd > 0) {
2666                int written_len = 0;
2667                meta.tuning_data_version = TUNING_DATA_VERSION;
2668                void *data = (void *)((uint8_t *)&meta.tuning_data_version);
2669                written_len += write(file_fd, data, sizeof(uint32_t));
2670                data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
2671                ALOGV("tuning_sensor_data_size %d",(int)(*(int *)data));
2672                written_len += write(file_fd, data, sizeof(uint32_t));
2673                data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
2674                ALOGV("tuning_vfe_data_size %d",(int)(*(int *)data));
2675                written_len += write(file_fd, data, sizeof(uint32_t));
2676                data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
2677                ALOGV("tuning_cpp_data_size %d",(int)(*(int *)data));
2678                written_len += write(file_fd, data, sizeof(uint32_t));
2679                data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
2680                ALOGV("tuning_cac_data_size %d",(int)(*(int *)data));
2681                written_len += write(file_fd, data, sizeof(uint32_t));
2682                int total_size = meta.tuning_sensor_data_size;
2683                data = (void *)((uint8_t *)&meta.data);
2684                written_len += write(file_fd, data, total_size);
2685                total_size = meta.tuning_vfe_data_size;
2686                data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
2687                written_len += write(file_fd, data, total_size);
2688                total_size = meta.tuning_cpp_data_size;
2689                data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
2690                written_len += write(file_fd, data, total_size);
2691                total_size = meta.tuning_cac_data_size;
2692                data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
2693                written_len += write(file_fd, data, total_size);
2694                close(file_fd);
2695            }else {
2696                ALOGE("%s: fail t open file for image dumping", __func__);
2697            }
2698            dumpFrameCount++;
2699        }
2700    }
2701}
2702
2703/*===========================================================================
2704 * FUNCTION   : cleanAndSortStreamInfo
2705 *
2706 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
2707 *              and sort them such that raw stream is at the end of the list
2708 *              This is a workaround for camera daemon constraint.
2709 *
2710 * PARAMETERS : None
2711 *
2712 *==========================================================================*/
2713void QCamera3HardwareInterface::cleanAndSortStreamInfo()
2714{
2715    List<stream_info_t *> newStreamInfo;
2716
2717    /*clean up invalid streams*/
2718    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2719            it != mStreamInfo.end();) {
2720        if(((*it)->status) == INVALID){
2721            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
2722            delete channel;
2723            free(*it);
2724            it = mStreamInfo.erase(it);
2725        } else {
2726            it++;
2727        }
2728    }
2729
2730    // Move preview/video/callback/snapshot streams into newList
2731    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2732            it != mStreamInfo.end();) {
2733        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
2734                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
2735            newStreamInfo.push_back(*it);
2736            it = mStreamInfo.erase(it);
2737        } else
2738            it++;
2739    }
2740    // Move raw streams into newList
2741    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2742            it != mStreamInfo.end();) {
2743        newStreamInfo.push_back(*it);
2744        it = mStreamInfo.erase(it);
2745    }
2746
2747    mStreamInfo = newStreamInfo;
2748}
2749
2750/*===========================================================================
2751 * FUNCTION   : extractJpegMetadata
2752 *
2753 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
2754 *              JPEG metadata is cached in HAL, and return as part of capture
2755 *              result when metadata is returned from camera daemon.
2756 *
2757 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
2758 *              @request:      capture request
2759 *
2760 *==========================================================================*/
2761void QCamera3HardwareInterface::extractJpegMetadata(
2762        CameraMetadata& jpegMetadata,
2763        const camera3_capture_request_t *request)
2764{
2765    CameraMetadata frame_settings;
2766    frame_settings = request->settings;
2767
2768    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
2769        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
2770                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
2771                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
2772
2773    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
2774        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
2775                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
2776                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
2777
2778    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
2779        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
2780                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
2781                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
2782
2783    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
2784        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
2785                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
2786                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
2787
2788    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
2789        jpegMetadata.update(ANDROID_JPEG_QUALITY,
2790                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
2791                frame_settings.find(ANDROID_JPEG_QUALITY).count);
2792
2793    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
2794        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
2795                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
2796                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
2797
2798    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE))
2799        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
2800                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32,
2801                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
2802}
2803
2804/*===========================================================================
2805 * FUNCTION   : convertToRegions
2806 *
2807 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
2808 *
2809 * PARAMETERS :
2810 *   @rect   : cam_rect_t struct to convert
2811 *   @region : int32_t destination array
2812 *   @weight : if we are converting from cam_area_t, weight is valid
2813 *             else weight = -1
2814 *
2815 *==========================================================================*/
2816void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
2817    region[0] = rect.left;
2818    region[1] = rect.top;
2819    region[2] = rect.left + rect.width;
2820    region[3] = rect.top + rect.height;
2821    if (weight > -1) {
2822        region[4] = weight;
2823    }
2824}
2825
2826/*===========================================================================
2827 * FUNCTION   : convertFromRegions
2828 *
2829 * DESCRIPTION: helper method to convert from array to cam_rect_t
2830 *
2831 * PARAMETERS :
2832 *   @rect   : cam_rect_t struct to convert
2833 *   @region : int32_t destination array
2834 *   @weight : if we are converting from cam_area_t, weight is valid
2835 *             else weight = -1
2836 *
2837 *==========================================================================*/
2838void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
2839                                                   const camera_metadata_t *settings,
2840                                                   uint32_t tag){
2841    CameraMetadata frame_settings;
2842    frame_settings = settings;
2843    int32_t x_min = frame_settings.find(tag).data.i32[0];
2844    int32_t y_min = frame_settings.find(tag).data.i32[1];
2845    int32_t x_max = frame_settings.find(tag).data.i32[2];
2846    int32_t y_max = frame_settings.find(tag).data.i32[3];
2847    roi->weight = frame_settings.find(tag).data.i32[4];
2848    roi->rect.left = x_min;
2849    roi->rect.top = y_min;
2850    roi->rect.width = x_max - x_min;
2851    roi->rect.height = y_max - y_min;
2852}
2853
2854/*===========================================================================
2855 * FUNCTION   : resetIfNeededROI
2856 *
2857 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
2858 *              crop region
2859 *
2860 * PARAMETERS :
2861 *   @roi       : cam_area_t struct to resize
2862 *   @scalerCropRegion : cam_crop_region_t region to compare against
2863 *
2864 *
2865 *==========================================================================*/
2866bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
2867                                                 const cam_crop_region_t* scalerCropRegion)
2868{
2869    int32_t roi_x_max = roi->rect.width + roi->rect.left;
2870    int32_t roi_y_max = roi->rect.height + roi->rect.top;
2871    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
2872    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
2873    if ((roi_x_max < scalerCropRegion->left) ||
2874        (roi_y_max < scalerCropRegion->top)  ||
2875        (roi->rect.left > crop_x_max) ||
2876        (roi->rect.top > crop_y_max)){
2877        return false;
2878    }
2879    if (roi->rect.left < scalerCropRegion->left) {
2880        roi->rect.left = scalerCropRegion->left;
2881    }
2882    if (roi->rect.top < scalerCropRegion->top) {
2883        roi->rect.top = scalerCropRegion->top;
2884    }
2885    if (roi_x_max > crop_x_max) {
2886        roi_x_max = crop_x_max;
2887    }
2888    if (roi_y_max > crop_y_max) {
2889        roi_y_max = crop_y_max;
2890    }
2891    roi->rect.width = roi_x_max - roi->rect.left;
2892    roi->rect.height = roi_y_max - roi->rect.top;
2893    return true;
2894}
2895
2896/*===========================================================================
2897 * FUNCTION   : convertLandmarks
2898 *
2899 * DESCRIPTION: helper method to extract the landmarks from face detection info
2900 *
2901 * PARAMETERS :
2902 *   @face   : cam_rect_t struct to convert
2903 *   @landmarks : int32_t destination array
2904 *
2905 *
2906 *==========================================================================*/
2907void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
2908{
2909    landmarks[0] = face.left_eye_center.x;
2910    landmarks[1] = face.left_eye_center.y;
2911    landmarks[2] = face.right_eye_center.x;
2912    landmarks[3] = face.right_eye_center.y;
2913    landmarks[4] = face.mouth_center.x;
2914    landmarks[5] = face.mouth_center.y;
2915}
2916
2917#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
2918/*===========================================================================
2919 * FUNCTION   : initCapabilities
2920 *
2921 * DESCRIPTION: initialize camera capabilities in static data struct
2922 *
2923 * PARAMETERS :
2924 *   @cameraId  : camera Id
2925 *
2926 * RETURN     : int32_t type of status
2927 *              NO_ERROR  -- success
2928 *              none-zero failure code
2929 *==========================================================================*/
2930int QCamera3HardwareInterface::initCapabilities(int cameraId)
2931{
2932    int rc = 0;
2933    mm_camera_vtbl_t *cameraHandle = NULL;
2934    QCamera3HeapMemory *capabilityHeap = NULL;
2935
2936    cameraHandle = camera_open(cameraId);
2937    if (!cameraHandle) {
2938        ALOGE("%s: camera_open failed", __func__);
2939        rc = -1;
2940        goto open_failed;
2941    }
2942
2943    capabilityHeap = new QCamera3HeapMemory();
2944    if (capabilityHeap == NULL) {
2945        ALOGE("%s: creation of capabilityHeap failed", __func__);
2946        goto heap_creation_failed;
2947    }
2948    /* Allocate memory for capability buffer */
2949    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
2950    if(rc != OK) {
2951        ALOGE("%s: No memory for cappability", __func__);
2952        goto allocate_failed;
2953    }
2954
2955    /* Map memory for capability buffer */
2956    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
2957    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
2958                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
2959                                capabilityHeap->getFd(0),
2960                                sizeof(cam_capability_t));
2961    if(rc < 0) {
2962        ALOGE("%s: failed to map capability buffer", __func__);
2963        goto map_failed;
2964    }
2965
2966    /* Query Capability */
2967    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
2968    if(rc < 0) {
2969        ALOGE("%s: failed to query capability",__func__);
2970        goto query_failed;
2971    }
2972    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
2973    if (!gCamCapability[cameraId]) {
2974        ALOGE("%s: out of memory", __func__);
2975        goto query_failed;
2976    }
2977    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
2978                                        sizeof(cam_capability_t));
2979    rc = 0;
2980
2981query_failed:
2982    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
2983                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
2984map_failed:
2985    capabilityHeap->deallocate();
2986allocate_failed:
2987    delete capabilityHeap;
2988heap_creation_failed:
2989    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
2990    cameraHandle = NULL;
2991open_failed:
2992    return rc;
2993}
2994
2995/*===========================================================================
2996 * FUNCTION   : initParameters
2997 *
2998 * DESCRIPTION: initialize camera parameters
2999 *
3000 * PARAMETERS :
3001 *
3002 * RETURN     : int32_t type of status
3003 *              NO_ERROR  -- success
3004 *              none-zero failure code
3005 *==========================================================================*/
3006int QCamera3HardwareInterface::initParameters()
3007{
3008    int rc = 0;
3009
3010    //Allocate Set Param Buffer
3011    mParamHeap = new QCamera3HeapMemory();
3012    rc = mParamHeap->allocate(1, sizeof(metadata_buffer_t), false);
3013    if(rc != OK) {
3014        rc = NO_MEMORY;
3015        ALOGE("Failed to allocate SETPARM Heap memory");
3016        delete mParamHeap;
3017        mParamHeap = NULL;
3018        return rc;
3019    }
3020
3021    //Map memory for parameters buffer
3022    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
3023            CAM_MAPPING_BUF_TYPE_PARM_BUF,
3024            mParamHeap->getFd(0),
3025            sizeof(metadata_buffer_t));
3026    if(rc < 0) {
3027        ALOGE("%s:failed to map SETPARM buffer",__func__);
3028        rc = FAILED_TRANSACTION;
3029        mParamHeap->deallocate();
3030        delete mParamHeap;
3031        mParamHeap = NULL;
3032        return rc;
3033    }
3034
3035    mParameters = (metadata_buffer_t*) DATA_PTR(mParamHeap,0);
3036    return rc;
3037}
3038
3039/*===========================================================================
3040 * FUNCTION   : deinitParameters
3041 *
3042 * DESCRIPTION: de-initialize camera parameters
3043 *
3044 * PARAMETERS :
3045 *
3046 * RETURN     : NONE
3047 *==========================================================================*/
3048void QCamera3HardwareInterface::deinitParameters()
3049{
3050    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
3051            CAM_MAPPING_BUF_TYPE_PARM_BUF);
3052
3053    mParamHeap->deallocate();
3054    delete mParamHeap;
3055    mParamHeap = NULL;
3056
3057    mParameters = NULL;
3058}
3059
3060/*===========================================================================
3061 * FUNCTION   : calcMaxJpegSize
3062 *
3063 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
3064 *
3065 * PARAMETERS :
3066 *
3067 * RETURN     : max_jpeg_size
3068 *==========================================================================*/
3069int QCamera3HardwareInterface::calcMaxJpegSize()
3070{
3071    int32_t max_jpeg_size = 0;
3072    int temp_width, temp_height;
3073    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
3074        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
3075        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
3076        if (temp_width * temp_height > max_jpeg_size ) {
3077            max_jpeg_size = temp_width * temp_height;
3078        }
3079    }
3080    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
3081    return max_jpeg_size;
3082}
3083
3084/*===========================================================================
3085 * FUNCTION   : initStaticMetadata
3086 *
3087 * DESCRIPTION: initialize the static metadata
3088 *
3089 * PARAMETERS :
3090 *   @cameraId  : camera Id
3091 *
3092 * RETURN     : int32_t type of status
3093 *              0  -- success
3094 *              non-zero failure code
3095 *==========================================================================*/
3096int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
3097{
3098    int rc = 0;
3099    CameraMetadata staticInfo;
3100
3101    /* android.info: hardware level */
3102    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
3103    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
3104        &supportedHardwareLevel, 1);
3105
3106    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
3107    /*HAL 3 only*/
3108    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
3109                    &gCamCapability[cameraId]->min_focus_distance, 1);
3110
3111    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
3112                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
3113
3114    /*should be using focal lengths but sensor doesn't provide that info now*/
3115    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
3116                      &gCamCapability[cameraId]->focal_length,
3117                      1);
3118
3119    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
3120                      gCamCapability[cameraId]->apertures,
3121                      gCamCapability[cameraId]->apertures_count);
3122
3123    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
3124                gCamCapability[cameraId]->filter_densities,
3125                gCamCapability[cameraId]->filter_densities_count);
3126
3127
3128    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
3129                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
3130                      gCamCapability[cameraId]->optical_stab_modes_count);
3131
3132    staticInfo.update(ANDROID_LENS_POSITION,
3133                      gCamCapability[cameraId]->lens_position,
3134                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
3135
3136    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
3137                                       gCamCapability[cameraId]->lens_shading_map_size.height};
3138    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
3139                      lens_shading_map_size,
3140                      sizeof(lens_shading_map_size)/sizeof(int32_t));
3141
3142    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
3143            gCamCapability[cameraId]->sensor_physical_size, 2);
3144
3145    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
3146            gCamCapability[cameraId]->exposure_time_range, 2);
3147
3148    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
3149            &gCamCapability[cameraId]->max_frame_duration, 1);
3150
3151    camera_metadata_rational baseGainFactor = {
3152            gCamCapability[cameraId]->base_gain_factor.numerator,
3153            gCamCapability[cameraId]->base_gain_factor.denominator};
3154    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
3155                      &baseGainFactor, 1);
3156
3157    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
3158                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
3159
3160    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
3161                                  gCamCapability[cameraId]->pixel_array_size.height};
3162    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
3163                      pixel_array_size, 2);
3164
3165    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
3166                                                gCamCapability[cameraId]->active_array_size.top,
3167                                                gCamCapability[cameraId]->active_array_size.width,
3168                                                gCamCapability[cameraId]->active_array_size.height};
3169    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
3170                      active_array_size, 4);
3171
3172    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
3173            &gCamCapability[cameraId]->white_level, 1);
3174
3175    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
3176            gCamCapability[cameraId]->black_level_pattern, 4);
3177
3178    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
3179                      &gCamCapability[cameraId]->flash_charge_duration, 1);
3180
3181    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
3182                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
3183
3184    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
3185    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
3186                      (int32_t*)&maxFaces, 1);
3187
3188    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
3189                      &gCamCapability[cameraId]->histogram_size, 1);
3190
3191    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
3192            &gCamCapability[cameraId]->max_histogram_count, 1);
3193
3194    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
3195                                    gCamCapability[cameraId]->sharpness_map_size.height};
3196
3197    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
3198            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
3199
3200    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
3201            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
3202
3203    int32_t scalar_formats[] = {
3204            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
3205            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
3206            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
3207            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
3208            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
3209    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
3210    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
3211                      scalar_formats,
3212                      scalar_formats_count);
3213
3214    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
3215    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
3216              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
3217              available_processed_sizes);
3218    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
3219                available_processed_sizes,
3220                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
3221
3222    int32_t available_raw_sizes[CAM_FORMAT_MAX * 2];
3223    makeTable(gCamCapability[cameraId]->raw_dim,
3224              gCamCapability[cameraId]->supported_raw_dim_cnt,
3225              available_raw_sizes);
3226    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
3227                available_raw_sizes,
3228                gCamCapability[cameraId]->supported_raw_dim_cnt * 2);
3229
3230    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
3231    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
3232                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
3233                 available_fps_ranges);
3234    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
3235            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
3236
3237    camera_metadata_rational exposureCompensationStep = {
3238            gCamCapability[cameraId]->exp_compensation_step.numerator,
3239            gCamCapability[cameraId]->exp_compensation_step.denominator};
3240    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
3241                      &exposureCompensationStep, 1);
3242
3243    /*TO DO*/
3244    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
3245    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
3246                      availableVstabModes, sizeof(availableVstabModes));
3247
3248    /** Quirk for urgent 3A state until final interface is worked out */
3249    uint8_t usePartialResultQuirk = 1;
3250    staticInfo.update(ANDROID_QUIRKS_USE_PARTIAL_RESULT,
3251                      &usePartialResultQuirk, 1);
3252
3253    /*HAL 1 and HAL 3 common*/
3254    float maxZoom = 4;
3255    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
3256            &maxZoom, 1);
3257
3258    int32_t max3aRegions[] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
3259    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
3260            max3aRegions, 3);
3261
3262    uint8_t availableFaceDetectModes[] = {
3263            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
3264            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
3265    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
3266                      availableFaceDetectModes,
3267                      sizeof(availableFaceDetectModes));
3268
3269    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
3270                                           gCamCapability[cameraId]->exposure_compensation_max};
3271    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
3272            exposureCompensationRange,
3273            sizeof(exposureCompensationRange)/sizeof(int32_t));
3274
3275    uint8_t lensFacing = (facingBack) ?
3276            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
3277    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
3278
3279    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
3280                available_processed_sizes,
3281                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
3282
3283    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
3284                      available_thumbnail_sizes,
3285                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
3286
3287    /*android.scaler.availableStreamConfigurations*/
3288    int32_t max_stream_configs_size =
3289            gCamCapability[cameraId]->picture_sizes_tbl_cnt *
3290            sizeof(scalar_formats)/sizeof(int32_t) * 4;
3291    int32_t available_stream_configs[max_stream_configs_size];
3292    int idx = 0;
3293    for (int j = 0; j < scalar_formats_count; j++) {
3294        switch (scalar_formats[j]) {
3295        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
3296        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
3297            for (int i = 0;
3298                i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
3299                available_stream_configs[idx] = scalar_formats[j];
3300                available_stream_configs[idx+1] =
3301                    gCamCapability[cameraId]->raw_dim[i].width;
3302                available_stream_configs[idx+2] =
3303                    gCamCapability[cameraId]->raw_dim[i].height;
3304                available_stream_configs[idx+3] =
3305                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
3306                idx+=4;
3307            }
3308            break;
3309        default:
3310            for (int i = 0;
3311                i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3312                available_stream_configs[idx] = scalar_formats[j];
3313                available_stream_configs[idx+1] =
3314                    gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3315                available_stream_configs[idx+2] =
3316                    gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3317                available_stream_configs[idx+3] =
3318                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
3319                idx+=4;
3320            }
3321
3322
3323            break;
3324        }
3325    }
3326    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
3327                      available_stream_configs, idx);
3328
3329    /* android.scaler.availableMinFrameDurations */
3330    int64_t available_min_durations[max_stream_configs_size];
3331    idx = 0;
3332    for (int j = 0; j < scalar_formats_count; j++) {
3333        switch (scalar_formats[j]) {
3334        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
3335        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
3336            for (int i = 0;
3337                i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
3338                available_min_durations[idx] = scalar_formats[j];
3339                available_min_durations[idx+1] =
3340                    gCamCapability[cameraId]->raw_dim[i].width;
3341                available_min_durations[idx+2] =
3342                    gCamCapability[cameraId]->raw_dim[i].height;
3343                available_min_durations[idx+3] =
3344                    gCamCapability[cameraId]->raw_min_duration[i];
3345                idx+=4;
3346            }
3347            break;
3348        default:
3349            for (int i = 0;
3350                i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3351                available_min_durations[idx] = scalar_formats[j];
3352                available_min_durations[idx+1] =
3353                    gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3354                available_min_durations[idx+2] =
3355                    gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3356                available_min_durations[idx+3] =
3357                    gCamCapability[cameraId]->picture_min_duration[i];
3358                idx+=4;
3359            }
3360            break;
3361        }
3362    }
3363    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
3364                      &available_min_durations[0], idx);
3365
3366    int32_t max_jpeg_size = 0;
3367    int temp_width, temp_height;
3368    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3369        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3370        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3371        if (temp_width * temp_height > max_jpeg_size ) {
3372            max_jpeg_size = temp_width * temp_height;
3373        }
3374    }
3375    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
3376    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
3377                      &max_jpeg_size, 1);
3378
3379    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
3380    size_t size = 0;
3381    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
3382        int32_t val = lookupFwkName(EFFECT_MODES_MAP,
3383                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
3384                                   gCamCapability[cameraId]->supported_effects[i]);
3385        if (val != NAME_NOT_FOUND) {
3386            avail_effects[size] = (uint8_t)val;
3387            size++;
3388        }
3389    }
3390    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
3391                      avail_effects,
3392                      size);
3393
3394    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
3395    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
3396    int32_t supported_scene_modes_cnt = 0;
3397    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
3398        int32_t val = lookupFwkName(SCENE_MODES_MAP,
3399                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3400                                gCamCapability[cameraId]->supported_scene_modes[i]);
3401        if (val != NAME_NOT_FOUND) {
3402            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
3403            supported_indexes[supported_scene_modes_cnt] = i;
3404            supported_scene_modes_cnt++;
3405        }
3406    }
3407
3408    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
3409                      avail_scene_modes,
3410                      supported_scene_modes_cnt);
3411
3412    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
3413    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
3414                      supported_scene_modes_cnt,
3415                      scene_mode_overrides,
3416                      supported_indexes,
3417                      cameraId);
3418    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
3419                      scene_mode_overrides,
3420                      supported_scene_modes_cnt*3);
3421
3422    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
3423    size = 0;
3424    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
3425        int32_t val = lookupFwkName(ANTIBANDING_MODES_MAP,
3426                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
3427                                 gCamCapability[cameraId]->supported_antibandings[i]);
3428        if (val != NAME_NOT_FOUND) {
3429            avail_antibanding_modes[size] = (uint8_t)val;
3430            size++;
3431        }
3432
3433    }
3434    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
3435                      avail_antibanding_modes,
3436                      size);
3437
3438    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
3439    size = 0;
3440    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
3441        int32_t val = lookupFwkName(FOCUS_MODES_MAP,
3442                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
3443                                gCamCapability[cameraId]->supported_focus_modes[i]);
3444        if (val != NAME_NOT_FOUND) {
3445            avail_af_modes[size] = (uint8_t)val;
3446            size++;
3447        }
3448    }
3449    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
3450                      avail_af_modes,
3451                      size);
3452
3453    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
3454    size = 0;
3455    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
3456        int32_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
3457                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
3458                                    gCamCapability[cameraId]->supported_white_balances[i]);
3459        if (val != NAME_NOT_FOUND) {
3460            avail_awb_modes[size] = (uint8_t)val;
3461            size++;
3462        }
3463    }
3464    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
3465                      avail_awb_modes,
3466                      size);
3467
3468    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
3469    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
3470      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
3471
3472    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
3473            available_flash_levels,
3474            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
3475
3476    uint8_t flashAvailable;
3477    if (gCamCapability[cameraId]->flash_available)
3478        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
3479    else
3480        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
3481    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
3482            &flashAvailable, 1);
3483
3484    uint8_t avail_ae_modes[5];
3485    size = 0;
3486    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
3487        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
3488        size++;
3489    }
3490    if (flashAvailable) {
3491        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
3492        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
3493        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
3494    }
3495    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
3496                      avail_ae_modes,
3497                      size);
3498
3499    int32_t sensitivity_range[2];
3500    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
3501    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
3502    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
3503                      sensitivity_range,
3504                      sizeof(sensitivity_range) / sizeof(int32_t));
3505
3506    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
3507                      &gCamCapability[cameraId]->max_analog_sensitivity,
3508                      1);
3509
3510    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
3511    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
3512                      &sensor_orientation,
3513                      1);
3514
3515    int32_t max_output_streams[3] = {1, 3, 1};
3516    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
3517                      max_output_streams,
3518                      3);
3519
3520    uint8_t avail_leds = 0;
3521    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
3522                      &avail_leds, 0);
3523
3524    uint8_t focus_dist_calibrated;
3525    int32_t val = lookupFwkName(FOCUS_CALIBRATION_MAP,
3526            sizeof(FOCUS_CALIBRATION_MAP)/sizeof(FOCUS_CALIBRATION_MAP[0]),
3527            gCamCapability[cameraId]->focus_dist_calibrated);
3528    if (val != NAME_NOT_FOUND) {
3529        focus_dist_calibrated = (uint8_t)val;
3530        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
3531                     &focus_dist_calibrated, 1);
3532    }
3533
3534    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
3535    size = 0;
3536    for (int i = 0; i < gCamCapability[cameraId]->supported_test_pattern_modes_cnt;
3537            i++) {
3538        int32_t val = lookupFwkName(TEST_PATTERN_MAP,
3539                                    sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
3540                                    gCamCapability[cameraId]->supported_test_pattern_modes[i]);
3541        if (val != NAME_NOT_FOUND) {
3542            avail_testpattern_modes[size] = val;
3543            size++;
3544        }
3545    }
3546    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
3547                      avail_testpattern_modes,
3548                      size);
3549
3550    uint8_t max_pipeline_depth = kMaxInFlight + EMPTY_PIPELINE_DELAY;
3551    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
3552                      &max_pipeline_depth,
3553                      1);
3554
3555    int32_t partial_result_count = 2;
3556    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
3557                      &partial_result_count,
3558                       1);
3559
3560    uint8_t available_capabilities[] =
3561        {ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
3562         ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR,
3563         ANDROID_REQUEST_AVAILABLE_CAPABILITIES_GCAM};
3564    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3565                      available_capabilities,
3566                      3);
3567
3568    int32_t max_input_streams = 0;
3569    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
3570                      &max_input_streams,
3571                      1);
3572
3573    int32_t io_format_map[] = {};
3574    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
3575                      io_format_map, 0);
3576
3577    int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
3578    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
3579                      &max_latency,
3580                      1);
3581
3582    float optical_axis_angle[2];
3583    optical_axis_angle[0] = 0; //need to verify
3584    optical_axis_angle[1] = 0; //need to verify
3585    staticInfo.update(ANDROID_LENS_OPTICAL_AXIS_ANGLE,
3586                      optical_axis_angle,
3587                      2);
3588
3589    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST};
3590    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
3591                      available_hot_pixel_modes,
3592                      1);
3593
3594    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
3595                                      ANDROID_EDGE_MODE_FAST};
3596    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
3597                      available_edge_modes,
3598                      2);
3599
3600    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
3601                                           ANDROID_NOISE_REDUCTION_MODE_FAST};
3602    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
3603                      available_noise_red_modes,
3604                      2);
3605
3606    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
3607                                         ANDROID_TONEMAP_MODE_FAST};
3608    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
3609                      available_tonemap_modes,
3610                      2);
3611
3612    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
3613    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
3614                      available_hot_pixel_map_modes,
3615                      1);
3616
3617    uint8_t fwkReferenceIlluminant = lookupFwkName(REFERENCE_ILLUMINANT_MAP,
3618        sizeof(REFERENCE_ILLUMINANT_MAP) / sizeof(REFERENCE_ILLUMINANT_MAP[0]),
3619        gCamCapability[cameraId]->reference_illuminant1);
3620    staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1,
3621                      &fwkReferenceIlluminant, 1);
3622
3623    fwkReferenceIlluminant = lookupFwkName(REFERENCE_ILLUMINANT_MAP,
3624        sizeof(REFERENCE_ILLUMINANT_MAP) / sizeof(REFERENCE_ILLUMINANT_MAP[0]),
3625        gCamCapability[cameraId]->reference_illuminant2);
3626    staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
3627                      &fwkReferenceIlluminant, 1);
3628
3629    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1,
3630                      (camera_metadata_rational_t*)gCamCapability[cameraId]->forward_matrix1,
3631                      3*3);
3632
3633    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2,
3634                      (camera_metadata_rational_t*)gCamCapability[cameraId]->forward_matrix2,
3635                      3*3);
3636
3637    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1,
3638                   (camera_metadata_rational_t*) gCamCapability[cameraId]->color_transform1,
3639                      3*3);
3640
3641    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2,
3642                   (camera_metadata_rational_t*) gCamCapability[cameraId]->color_transform2,
3643                      3*3);
3644
3645    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
3646                   (camera_metadata_rational_t*) gCamCapability[cameraId]->calibration_transform1,
3647                      3*3);
3648
3649    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2,
3650                   (camera_metadata_rational_t*) gCamCapability[cameraId]->calibration_transform2,
3651                      3*3);
3652
3653
3654    int32_t available_request_keys[] = {ANDROID_COLOR_CORRECTION_MODE,
3655       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
3656       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
3657       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
3658       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
3659       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
3660       ANDROID_CONTROL_AF_REGIONS, ANDROID_CONTROL_AF_TRIGGER,
3661       ANDROID_CONTROL_AWB_LOCK, ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_AWB_REGIONS,
3662       ANDROID_CONTROL_CAPTURE_INTENT, ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
3663       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
3664       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE, ANDROID_EDGE_STRENGTH,
3665       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
3666       ANDROID_JPEG_GPS_COORDINATES,
3667       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
3668       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
3669       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
3670       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
3671       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
3672       ANDROID_NOISE_REDUCTION_STRENGTH, ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
3673       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
3674       ANDROID_SENSOR_FRAME_DURATION,
3675       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
3676       ANDROID_SHADING_STRENGTH, ANDROID_STATISTICS_FACE_DETECT_MODE,
3677       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
3678       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
3679       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
3680       ANDROID_BLACK_LEVEL_LOCK };
3681    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
3682                      available_request_keys,
3683                      sizeof(available_request_keys)/sizeof(int32_t));
3684
3685    int32_t available_result_keys[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
3686       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
3687       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE, ANDROID_CONTROL_AF_REGIONS,
3688       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_AWB_REGIONS,
3689       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
3690       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
3691       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
3692       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
3693       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
3694       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
3695       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
3696       ANDROID_NOISE_REDUCTION_MODE, ANDROID_QUIRKS_PARTIAL_RESULT, ANDROID_REQUEST_ID,
3697       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
3698       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
3699       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
3700       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
3701       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
3702       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
3703       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
3704       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
3705       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_IDS,
3706       ANDROID_STATISTICS_FACE_LANDMARKS, ANDROID_STATISTICS_FACE_RECTANGLES,
3707       ANDROID_STATISTICS_FACE_SCORES};
3708    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
3709                      available_result_keys,
3710                      sizeof(available_result_keys)/sizeof(int32_t));
3711
3712    int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
3713       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
3714       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
3715       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
3716       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
3717       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
3718       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
3719       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
3720       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
3721       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
3722       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
3723       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
3724       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
3725       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
3726       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
3727       ANDROID_LENS_FACING, ANDROID_LENS_OPTICAL_AXIS_ANGLE,ANDROID_LENS_POSITION,
3728       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
3729       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3730       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
3731       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
3732       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
3733       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
3734       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
3735       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
3736       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
3737       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
3738       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
3739       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
3740       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
3741       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
3742       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
3743       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
3744       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
3745       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
3746       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
3747       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
3748       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
3749       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
3750       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
3751       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
3752       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
3753       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
3754       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
3755       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
3756       ANDROID_TONEMAP_MAX_CURVE_POINTS, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
3757    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
3758                      available_characteristics_keys,
3759                      sizeof(available_characteristics_keys)/sizeof(int32_t));
3760
3761    /*available stall durations depend on the hw + sw and will be different for different devices */
3762    /*have to add for raw after implementation*/
3763    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
3764    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
3765
3766    size_t available_stall_size = gCamCapability[cameraId]->picture_sizes_tbl_cnt * 4;
3767    int64_t available_stall_durations[available_stall_size];
3768    idx = 0;
3769    for (uint32_t j = 0; j < stall_formats_count; j++) {
3770       if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
3771          for (uint32_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3772             available_stall_durations[idx]   = stall_formats[j];
3773             available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3774             available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3775             available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
3776             idx+=4;
3777          }
3778       } else {
3779          for (uint32_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
3780             available_stall_durations[idx]   = stall_formats[j];
3781             available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
3782             available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
3783             available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
3784             idx+=4;
3785          }
3786       }
3787    }
3788    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
3789                      available_stall_durations,
3790                      idx);
3791
3792    gStaticMetadata[cameraId] = staticInfo.release();
3793    return rc;
3794}
3795
3796/*===========================================================================
3797 * FUNCTION   : makeTable
3798 *
3799 * DESCRIPTION: make a table of sizes
3800 *
3801 * PARAMETERS :
3802 *
3803 *
3804 *==========================================================================*/
3805void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
3806                                          int32_t* sizeTable)
3807{
3808    int j = 0;
3809    for (int i = 0; i < size; i++) {
3810        sizeTable[j] = dimTable[i].width;
3811        sizeTable[j+1] = dimTable[i].height;
3812        j+=2;
3813    }
3814}
3815
3816/*===========================================================================
3817 * FUNCTION   : makeFPSTable
3818 *
3819 * DESCRIPTION: make a table of fps ranges
3820 *
3821 * PARAMETERS :
3822 *
3823 *==========================================================================*/
3824void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
3825                                          int32_t* fpsRangesTable)
3826{
3827    int j = 0;
3828    for (int i = 0; i < size; i++) {
3829        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
3830        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
3831        j+=2;
3832    }
3833}
3834
3835/*===========================================================================
3836 * FUNCTION   : makeOverridesList
3837 *
3838 * DESCRIPTION: make a list of scene mode overrides
3839 *
3840 * PARAMETERS :
3841 *
3842 *
3843 *==========================================================================*/
3844void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
3845                                                  uint8_t size, uint8_t* overridesList,
3846                                                  uint8_t* supported_indexes,
3847                                                  int camera_id)
3848{
3849    /*daemon will give a list of overrides for all scene modes.
3850      However we should send the fwk only the overrides for the scene modes
3851      supported by the framework*/
3852    int j = 0, index = 0, supt = 0;
3853    uint8_t focus_override;
3854    for (int i = 0; i < size; i++) {
3855        supt = 0;
3856        index = supported_indexes[i];
3857        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
3858        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
3859                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
3860                                                    overridesTable[index].awb_mode);
3861        focus_override = (uint8_t)overridesTable[index].af_mode;
3862        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
3863           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
3864              supt = 1;
3865              break;
3866           }
3867        }
3868        if (supt) {
3869           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
3870                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
3871                                              focus_override);
3872        } else {
3873           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
3874        }
3875        j+=3;
3876    }
3877}
3878
3879/*===========================================================================
3880 * FUNCTION   : getPreviewHalPixelFormat
3881 *
3882 * DESCRIPTION: convert the format to type recognized by framework
3883 *
3884 * PARAMETERS : format : the format from backend
3885 *
3886 ** RETURN    : format recognized by framework
3887 *
3888 *==========================================================================*/
3889int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
3890{
3891    int32_t halPixelFormat;
3892
3893    switch (format) {
3894    case CAM_FORMAT_YUV_420_NV12:
3895        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
3896        break;
3897    case CAM_FORMAT_YUV_420_NV21:
3898        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
3899        break;
3900    case CAM_FORMAT_YUV_420_NV21_ADRENO:
3901        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
3902        break;
3903    case CAM_FORMAT_YUV_420_YV12:
3904        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
3905        break;
3906    case CAM_FORMAT_YUV_422_NV16:
3907    case CAM_FORMAT_YUV_422_NV61:
3908    default:
3909        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
3910        break;
3911    }
3912    return halPixelFormat;
3913}
3914
3915/*===========================================================================
3916 * FUNCTION   : getSensorSensitivity
3917 *
3918 * DESCRIPTION: convert iso_mode to an integer value
3919 *
3920 * PARAMETERS : iso_mode : the iso_mode supported by sensor
3921 *
3922 ** RETURN    : sensitivity supported by sensor
3923 *
3924 *==========================================================================*/
3925int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
3926{
3927    int32_t sensitivity;
3928
3929    switch (iso_mode) {
3930    case CAM_ISO_MODE_100:
3931        sensitivity = 100;
3932        break;
3933    case CAM_ISO_MODE_200:
3934        sensitivity = 200;
3935        break;
3936    case CAM_ISO_MODE_400:
3937        sensitivity = 400;
3938        break;
3939    case CAM_ISO_MODE_800:
3940        sensitivity = 800;
3941        break;
3942    case CAM_ISO_MODE_1600:
3943        sensitivity = 1600;
3944        break;
3945    default:
3946        sensitivity = -1;
3947        break;
3948    }
3949    return sensitivity;
3950}
3951
3952/*===========================================================================
3953 * FUNCTION   : AddSetMetaEntryToBatch
3954 *
3955 * DESCRIPTION: add set parameter entry into batch
3956 *
3957 * PARAMETERS :
3958 *   @p_table     : ptr to parameter buffer
3959 *   @paramType   : parameter type
3960 *   @paramLength : length of parameter value
3961 *   @paramValue  : ptr to parameter value
3962 *
3963 * RETURN     : int32_t type of status
3964 *              NO_ERROR  -- success
3965 *              none-zero failure code
3966 *==========================================================================*/
3967int32_t QCamera3HardwareInterface::AddSetMetaEntryToBatch(metadata_buffer_t *p_table,
3968                                                          unsigned int paramType,
3969                                                          uint32_t paramLength,
3970                                                          void *paramValue)
3971{
3972    int position = paramType;
3973    int current, next;
3974
3975    /*************************************************************************
3976    *                 Code to take care of linking next flags                *
3977    *************************************************************************/
3978    current = GET_FIRST_PARAM_ID(p_table);
3979    if (position == current){
3980        //DO NOTHING
3981    } else if (position < current){
3982        SET_NEXT_PARAM_ID(position, p_table, current);
3983        SET_FIRST_PARAM_ID(p_table, position);
3984    } else {
3985        /* Search for the position in the linked list where we need to slot in*/
3986        while (position > GET_NEXT_PARAM_ID(current, p_table))
3987            current = GET_NEXT_PARAM_ID(current, p_table);
3988
3989        /*If node already exists no need to alter linking*/
3990        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
3991            next = GET_NEXT_PARAM_ID(current, p_table);
3992            SET_NEXT_PARAM_ID(current, p_table, position);
3993            SET_NEXT_PARAM_ID(position, p_table, next);
3994        }
3995    }
3996
3997    /*************************************************************************
3998    *                   Copy contents into entry                             *
3999    *************************************************************************/
4000
4001    if (paramLength > sizeof(parm_type_t)) {
4002        ALOGE("%s:Size of input larger than max entry size",__func__);
4003        return BAD_VALUE;
4004    }
4005    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
4006    SET_PARM_VALID_BIT(paramType,p_table,1);
4007    return NO_ERROR;
4008}
4009
4010/*===========================================================================
4011 * FUNCTION   : lookupFwkName
4012 *
4013 * DESCRIPTION: In case the enum is not same in fwk and backend
4014 *              make sure the parameter is correctly propogated
4015 *
4016 * PARAMETERS  :
4017 *   @arr      : map between the two enums
4018 *   @len      : len of the map
4019 *   @hal_name : name of the hal_parm to map
4020 *
4021 * RETURN     : int type of status
4022 *              fwk_name  -- success
4023 *              none-zero failure code
4024 *==========================================================================*/
4025int32_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
4026                                             int len, int hal_name)
4027{
4028
4029    for (int i = 0; i < len; i++) {
4030        if (arr[i].hal_name == hal_name)
4031            return arr[i].fwk_name;
4032    }
4033
4034    /* Not able to find matching framework type is not necessarily
4035     * an error case. This happens when mm-camera supports more attributes
4036     * than the frameworks do */
4037    ALOGD("%s: Cannot find matching framework type", __func__);
4038    return NAME_NOT_FOUND;
4039}
4040
4041/*===========================================================================
4042 * FUNCTION   : lookupHalName
4043 *
4044 * DESCRIPTION: In case the enum is not same in fwk and backend
4045 *              make sure the parameter is correctly propogated
4046 *
4047 * PARAMETERS  :
4048 *   @arr      : map between the two enums
4049 *   @len      : len of the map
4050 *   @fwk_name : name of the hal_parm to map
4051 *
4052 * RETURN     : int32_t type of status
4053 *              hal_name  -- success
4054 *              none-zero failure code
4055 *==========================================================================*/
4056int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
4057                                             int len, unsigned int fwk_name)
4058{
4059    for (int i = 0; i < len; i++) {
4060       if (arr[i].fwk_name == fwk_name)
4061           return arr[i].hal_name;
4062    }
4063    ALOGE("%s: Cannot find matching hal type", __func__);
4064    return NAME_NOT_FOUND;
4065}
4066
4067/*===========================================================================
4068 * FUNCTION   : getCapabilities
4069 *
4070 * DESCRIPTION: query camera capabilities
4071 *
4072 * PARAMETERS :
4073 *   @cameraId  : camera Id
4074 *   @info      : camera info struct to be filled in with camera capabilities
4075 *
4076 * RETURN     : int32_t type of status
4077 *              NO_ERROR  -- success
4078 *              none-zero failure code
4079 *==========================================================================*/
4080int QCamera3HardwareInterface::getCamInfo(int cameraId,
4081                                    struct camera_info *info)
4082{
4083    int rc = 0;
4084
4085    if (NULL == gCamCapability[cameraId]) {
4086        rc = initCapabilities(cameraId);
4087        if (rc < 0) {
4088            //pthread_mutex_unlock(&g_camlock);
4089            return rc;
4090        }
4091    }
4092
4093    if (NULL == gStaticMetadata[cameraId]) {
4094        rc = initStaticMetadata(cameraId);
4095        if (rc < 0) {
4096            return rc;
4097        }
4098    }
4099
4100    switch(gCamCapability[cameraId]->position) {
4101    case CAM_POSITION_BACK:
4102        info->facing = CAMERA_FACING_BACK;
4103        break;
4104
4105    case CAM_POSITION_FRONT:
4106        info->facing = CAMERA_FACING_FRONT;
4107        break;
4108
4109    default:
4110        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
4111        rc = -1;
4112        break;
4113    }
4114
4115
4116    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
4117    info->device_version = CAMERA_DEVICE_API_VERSION_3_2;
4118    info->static_camera_characteristics = gStaticMetadata[cameraId];
4119
4120    return rc;
4121}
4122
4123/*===========================================================================
4124 * FUNCTION   : translateCapabilityToMetadata
4125 *
4126 * DESCRIPTION: translate the capability into camera_metadata_t
4127 *
4128 * PARAMETERS : type of the request
4129 *
4130 *
4131 * RETURN     : success: camera_metadata_t*
4132 *              failure: NULL
4133 *
4134 *==========================================================================*/
4135camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
4136{
4137    pthread_mutex_lock(&mMutex);
4138
4139    if (mDefaultMetadata[type] != NULL) {
4140        pthread_mutex_unlock(&mMutex);
4141        return mDefaultMetadata[type];
4142    }
4143    //first time we are handling this request
4144    //fill up the metadata structure using the wrapper class
4145    CameraMetadata settings;
4146    //translate from cam_capability_t to camera_metadata_tag_t
4147    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
4148    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
4149    int32_t defaultRequestID = 0;
4150    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
4151
4152    uint8_t controlIntent = 0;
4153    uint8_t focusMode;
4154    switch (type) {
4155      case CAMERA3_TEMPLATE_PREVIEW:
4156        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
4157        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
4158        break;
4159      case CAMERA3_TEMPLATE_STILL_CAPTURE:
4160        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
4161        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
4162        break;
4163      case CAMERA3_TEMPLATE_VIDEO_RECORD:
4164        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
4165        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
4166        break;
4167      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
4168        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
4169        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
4170        break;
4171      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
4172        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
4173        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
4174        break;
4175      case CAMERA3_TEMPLATE_MANUAL:
4176        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
4177        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
4178        break;
4179      default:
4180        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
4181        break;
4182    }
4183    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
4184
4185    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
4186        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
4187    }
4188    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
4189
4190    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
4191            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
4192
4193    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
4194    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4195
4196    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
4197    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
4198
4199    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
4200    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
4201
4202    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
4203    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
4204
4205    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
4206    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
4207
4208    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
4209    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
4210
4211    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
4212    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
4213
4214    /*flash*/
4215    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
4216    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
4217
4218    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
4219    settings.update(ANDROID_FLASH_FIRING_POWER,
4220            &flashFiringLevel, 1);
4221
4222    /* lens */
4223    float default_aperture = gCamCapability[mCameraId]->apertures[0];
4224    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
4225
4226    if (gCamCapability[mCameraId]->filter_densities_count) {
4227        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
4228        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
4229                        gCamCapability[mCameraId]->filter_densities_count);
4230    }
4231
4232    float default_focal_length = gCamCapability[mCameraId]->focal_length;
4233    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
4234
4235    float default_focus_distance = 0;
4236    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
4237
4238    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
4239    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
4240
4241    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
4242    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
4243
4244    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
4245    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
4246
4247    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_FULL;
4248    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
4249
4250    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
4251    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
4252
4253    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
4254    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
4255
4256    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
4257    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
4258
4259    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
4260    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
4261
4262    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
4263    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
4264
4265    /* Exposure time(Update the Min Exposure Time)*/
4266    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
4267    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
4268
4269    /* frame duration */
4270    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
4271    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
4272
4273    /* sensitivity */
4274    static const int32_t default_sensitivity = 100;
4275    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
4276
4277    /*edge mode*/
4278    static const uint8_t edge_mode = ANDROID_EDGE_MODE_FAST;
4279    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
4280
4281    /*noise reduction mode*/
4282    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
4283    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
4284
4285    /*color correction mode*/
4286    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
4287    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
4288
4289    /*transform matrix mode*/
4290    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
4291    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
4292
4293    uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
4294    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
4295
4296    int32_t scaler_crop_region[4];
4297    scaler_crop_region[0] = 0;
4298    scaler_crop_region[1] = 0;
4299    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
4300    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
4301    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
4302
4303    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
4304    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
4305
4306    static const uint8_t vs_mode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
4307    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vs_mode, 1);
4308
4309    uint8_t opt_stab_mode = (gCamCapability[mCameraId]->optical_stab_modes_count == 2)?
4310                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON :
4311                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
4312    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &opt_stab_mode, 1);
4313
4314    /*focus distance*/
4315    float focus_distance = 0.0;
4316    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
4317
4318    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
4319    float max_range = 0.0;
4320    float max_fixed_fps = 0.0;
4321    int32_t fps_range[2] = {0, 0};
4322    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
4323            i++) {
4324        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
4325            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
4326        if (type == CAMERA3_TEMPLATE_PREVIEW ||
4327                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
4328                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
4329            if (range > max_range) {
4330                fps_range[0] =
4331                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
4332                fps_range[1] =
4333                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
4334                max_range = range;
4335            }
4336        } else {
4337            if (range < 0.01 && max_fixed_fps <
4338                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
4339                fps_range[0] =
4340                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
4341                fps_range[1] =
4342                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
4343                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
4344            }
4345        }
4346    }
4347    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
4348
4349    /*precapture trigger*/
4350    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
4351    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
4352
4353    /*af trigger*/
4354    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
4355    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
4356
4357    /* ae & af regions */
4358    int32_t active_region[] = {
4359            gCamCapability[mCameraId]->active_array_size.left,
4360            gCamCapability[mCameraId]->active_array_size.top,
4361            gCamCapability[mCameraId]->active_array_size.left +
4362                    gCamCapability[mCameraId]->active_array_size.width,
4363            gCamCapability[mCameraId]->active_array_size.top +
4364                    gCamCapability[mCameraId]->active_array_size.height,
4365            1};
4366    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region, 5);
4367    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region, 5);
4368
4369    /* black level lock */
4370    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
4371    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
4372
4373    /* face detect mode */
4374    uint8_t facedetect_mode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
4375    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &facedetect_mode, 1);
4376
4377    /* lens shading map mode */
4378    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
4379    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
4380
4381    //special defaults for manual template
4382    if (type == CAMERA3_TEMPLATE_MANUAL) {
4383        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
4384        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
4385
4386        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
4387        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
4388
4389        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
4390        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
4391
4392        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
4393        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
4394
4395        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
4396        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
4397
4398        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
4399        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
4400    }
4401    mDefaultMetadata[type] = settings.release();
4402
4403    pthread_mutex_unlock(&mMutex);
4404    return mDefaultMetadata[type];
4405}
4406
4407/*===========================================================================
4408 * FUNCTION   : setFrameParameters
4409 *
4410 * DESCRIPTION: set parameters per frame as requested in the metadata from
4411 *              framework
4412 *
4413 * PARAMETERS :
4414 *   @request   : request that needs to be serviced
4415 *   @streamID : Stream ID of all the requested streams
4416 *
4417 * RETURN     : success: NO_ERROR
4418 *              failure:
4419 *==========================================================================*/
4420int QCamera3HardwareInterface::setFrameParameters(
4421                    camera3_capture_request_t *request,
4422                    cam_stream_ID_t streamID)
4423{
4424    /*translate from camera_metadata_t type to parm_type_t*/
4425    int rc = 0;
4426    int32_t hal_version = CAM_HAL_V3;
4427
4428    memset(mParameters, 0, sizeof(metadata_buffer_t));
4429    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
4430    rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
4431                sizeof(hal_version), &hal_version);
4432    if (rc < 0) {
4433        ALOGE("%s: Failed to set hal version in the parameters", __func__);
4434        return BAD_VALUE;
4435    }
4436
4437    /*we need to update the frame number in the parameters*/
4438    rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
4439                                sizeof(request->frame_number), &(request->frame_number));
4440    if (rc < 0) {
4441        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
4442        return BAD_VALUE;
4443    }
4444
4445    /* Update stream id of all the requested buffers */
4446    rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_STREAM_ID,
4447                                sizeof(cam_stream_ID_t), &streamID);
4448
4449    if (rc < 0) {
4450        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
4451        return BAD_VALUE;
4452    }
4453
4454    if(request->settings != NULL){
4455        rc = translateToHalMetadata(request, mParameters);
4456    }
4457
4458    /*set the parameters to backend*/
4459    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
4460    return rc;
4461}
4462
4463/*===========================================================================
4464 * FUNCTION   : setReprocParameters
4465 *
4466 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
4467 *              queue it to picture channel for reprocessing.
4468 *
4469 * PARAMETERS :
4470 *   @request   : request that needs to be serviced
4471 *
4472 * RETURN     : success: NO_ERROR
4473 *              failure: non zero failure code
4474 *==========================================================================*/
4475int QCamera3HardwareInterface::setReprocParameters(
4476        camera3_capture_request_t *request)
4477{
4478    /*translate from camera_metadata_t type to parm_type_t*/
4479    int rc = 0;
4480    metadata_buffer_t *reprocParam = NULL;
4481
4482    if(request->settings != NULL){
4483        ALOGE("%s: Reprocess settings cannot be NULL", __func__);
4484        return BAD_VALUE;
4485    }
4486    reprocParam = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
4487    if (!reprocParam) {
4488        ALOGE("%s: Failed to allocate reprocessing metadata buffer", __func__);
4489        return NO_MEMORY;
4490    }
4491    memset(reprocParam, 0, sizeof(metadata_buffer_t));
4492    reprocParam->first_flagged_entry = CAM_INTF_PARM_MAX;
4493
4494    /*we need to update the frame number in the parameters*/
4495    rc = AddSetMetaEntryToBatch(reprocParam, CAM_INTF_META_FRAME_NUMBER,
4496                                sizeof(request->frame_number), &(request->frame_number));
4497    if (rc < 0) {
4498        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
4499        return BAD_VALUE;
4500    }
4501
4502
4503    rc = translateToHalMetadata(request, reprocParam);
4504    if (rc < 0) {
4505        ALOGE("%s: Failed to translate reproc request", __func__);
4506        delete reprocParam;
4507        return rc;
4508    }
4509    /*queue metadata for reprocessing*/
4510    rc = mPictureChannel->queueReprocMetadata(reprocParam);
4511    if (rc < 0) {
4512        ALOGE("%s: Failed to queue reprocessing metadata", __func__);
4513        delete reprocParam;
4514    }
4515
4516    return rc;
4517}
4518
4519/*===========================================================================
4520 * FUNCTION   : translateToHalMetadata
4521 *
4522 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
4523 *
4524 *
4525 * PARAMETERS :
4526 *   @request  : request sent from framework
4527 *
4528 *
4529 * RETURN     : success: NO_ERROR
4530 *              failure:
4531 *==========================================================================*/
4532int QCamera3HardwareInterface::translateToHalMetadata
4533                                  (const camera3_capture_request_t *request,
4534                                   metadata_buffer_t *hal_metadata)
4535{
4536    int rc = 0;
4537    CameraMetadata frame_settings;
4538    frame_settings = request->settings;
4539
4540    /* Do not change the order of the following list unless you know what you are
4541     * doing.
4542     * The order is laid out in such a way that parameters in the front of the table
4543     * may be used to override the parameters later in the table. Examples are:
4544     * 1. META_MODE should precede AEC/AWB/AF MODE
4545     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
4546     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
4547     * 4. Any mode should precede it's corresponding settings
4548     */
4549    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4550        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4551        rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_MODE,
4552                sizeof(metaMode), &metaMode);
4553        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4554           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4555           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
4556                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
4557                                             fwk_sceneMode);
4558           rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
4559                sizeof(sceneMode), &sceneMode);
4560        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
4561           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
4562           rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
4563                sizeof(sceneMode), &sceneMode);
4564        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
4565           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
4566           rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
4567                sizeof(sceneMode), &sceneMode);
4568        }
4569    }
4570
4571    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
4572        uint8_t fwk_aeMode =
4573            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
4574        uint8_t aeMode;
4575        int32_t redeye;
4576
4577        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
4578            aeMode = CAM_AE_MODE_OFF;
4579        } else {
4580            aeMode = CAM_AE_MODE_ON;
4581        }
4582        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
4583            redeye = 1;
4584        } else {
4585            redeye = 0;
4586        }
4587
4588        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
4589                                          sizeof(AE_FLASH_MODE_MAP),
4590                                          fwk_aeMode);
4591        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AEC_MODE,
4592                sizeof(aeMode), &aeMode);
4593        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
4594                sizeof(flashMode), &flashMode);
4595        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION,
4596                sizeof(redeye), &redeye);
4597    }
4598
4599    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
4600        uint8_t fwk_whiteLevel =
4601            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
4602        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
4603                sizeof(WHITE_BALANCE_MODES_MAP),
4604                fwk_whiteLevel);
4605        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE,
4606                sizeof(whiteLevel), &whiteLevel);
4607    }
4608
4609    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
4610        uint8_t fwk_focusMode =
4611            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
4612        uint8_t focusMode;
4613        focusMode = lookupHalName(FOCUS_MODES_MAP,
4614                                   sizeof(FOCUS_MODES_MAP),
4615                                   fwk_focusMode);
4616        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_FOCUS_MODE,
4617                sizeof(focusMode), &focusMode);
4618    }
4619
4620    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
4621        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
4622        rc = AddSetMetaEntryToBatch(hal_metadata,
4623                CAM_INTF_META_LENS_FOCUS_DISTANCE,
4624                sizeof(focalDistance), &focalDistance);
4625    }
4626
4627    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
4628        uint8_t fwk_antibandingMode =
4629            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
4630        uint8_t hal_antibandingMode = lookupHalName(ANTIBANDING_MODES_MAP,
4631                     sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
4632                     fwk_antibandingMode);
4633        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
4634                sizeof(hal_antibandingMode), &hal_antibandingMode);
4635    }
4636
4637    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
4638        int32_t expCompensation = frame_settings.find(
4639            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
4640        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
4641            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
4642        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
4643            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
4644        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV,
4645          sizeof(expCompensation), &expCompensation);
4646    }
4647
4648    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
4649        int32_t expCompensation = frame_settings.find(
4650            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
4651        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
4652            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
4653        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
4654            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
4655        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV,
4656          sizeof(expCompensation), &expCompensation);
4657    }
4658
4659    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
4660        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
4661        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_AEC_LOCK,
4662                sizeof(aeLock), &aeLock);
4663    }
4664    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4665        cam_fps_range_t fps_range;
4666        fps_range.min_fps =
4667            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
4668        fps_range.max_fps =
4669            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
4670        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_FPS_RANGE,
4671                sizeof(fps_range), &fps_range);
4672    }
4673
4674    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
4675        uint8_t awbLock =
4676            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
4677        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_AWB_LOCK,
4678                sizeof(awbLock), &awbLock);
4679    }
4680
4681    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
4682        uint8_t fwk_effectMode =
4683            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
4684        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
4685                sizeof(EFFECT_MODES_MAP),
4686                fwk_effectMode);
4687        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EFFECT,
4688                sizeof(effectMode), &effectMode);
4689    }
4690
4691    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
4692        uint8_t colorCorrectMode =
4693            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
4694        rc =
4695            AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
4696                    sizeof(colorCorrectMode), &colorCorrectMode);
4697    }
4698
4699    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
4700        cam_color_correct_gains_t colorCorrectGains;
4701        for (int i = 0; i < 4; i++) {
4702            colorCorrectGains.gains[i] =
4703                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
4704        }
4705        rc =
4706            AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
4707                    sizeof(colorCorrectGains), &colorCorrectGains);
4708    }
4709
4710    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
4711        cam_color_correct_matrix_t colorCorrectTransform;
4712        cam_rational_type_t transform_elem;
4713        int num = 0;
4714        for (int i = 0; i < 3; i++) {
4715           for (int j = 0; j < 3; j++) {
4716              transform_elem.numerator =
4717                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
4718              transform_elem.denominator =
4719                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
4720              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
4721              num++;
4722           }
4723        }
4724        rc =
4725            AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
4726                    sizeof(colorCorrectTransform), &colorCorrectTransform);
4727    }
4728
4729    cam_trigger_t aecTrigger;
4730    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
4731    aecTrigger.trigger_id = -1;
4732    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
4733        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
4734        aecTrigger.trigger =
4735            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
4736        aecTrigger.trigger_id =
4737            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
4738        rc = AddSetMetaEntryToBatch(hal_metadata,
4739                CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
4740                sizeof(aecTrigger), &aecTrigger);
4741    }
4742    /*af_trigger must come with a trigger id*/
4743    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
4744        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
4745        cam_trigger_t af_trigger;
4746        af_trigger.trigger =
4747            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
4748        af_trigger.trigger_id =
4749            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
4750        rc = AddSetMetaEntryToBatch(hal_metadata,
4751                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
4752    }
4753
4754    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
4755        int32_t demosaic =
4756            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
4757        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_DEMOSAIC,
4758                sizeof(demosaic), &demosaic);
4759    }
4760
4761    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
4762        cam_edge_application_t edge_application;
4763        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
4764        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
4765            edge_application.sharpness = 0;
4766        } else {
4767            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
4768                uint8_t edgeStrength =
4769                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
4770                edge_application.sharpness = (int32_t)edgeStrength;
4771            } else {
4772                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
4773            }
4774        }
4775        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_EDGE_MODE,
4776                sizeof(edge_application), &edge_application);
4777    }
4778
4779    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
4780        int32_t respectFlashMode = 1;
4781        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
4782            uint8_t fwk_aeMode =
4783                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
4784            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
4785                respectFlashMode = 0;
4786                ALOGV("%s: AE Mode controls flash, ignore android.flash.mode",
4787                    __func__);
4788            }
4789        }
4790        if (respectFlashMode) {
4791            uint8_t flashMode =
4792                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
4793            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
4794                                          sizeof(FLASH_MODES_MAP),
4795                                          flashMode);
4796            ALOGV("%s: flash mode after mapping %d", __func__, flashMode);
4797            // To check: CAM_INTF_META_FLASH_MODE usage
4798            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
4799                          sizeof(flashMode), &flashMode);
4800        }
4801    }
4802
4803    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
4804        uint8_t flashPower =
4805            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
4806        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_FLASH_POWER,
4807                sizeof(flashPower), &flashPower);
4808    }
4809
4810    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
4811        int64_t flashFiringTime =
4812            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
4813        rc = AddSetMetaEntryToBatch(hal_metadata,
4814                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
4815    }
4816
4817    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
4818        uint8_t hotPixelMode =
4819            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
4820        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
4821                sizeof(hotPixelMode), &hotPixelMode);
4822    }
4823
4824    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
4825        float lensAperture =
4826            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
4827        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_APERTURE,
4828                sizeof(lensAperture), &lensAperture);
4829    }
4830
4831    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
4832        float filterDensity =
4833            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
4834        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
4835                sizeof(filterDensity), &filterDensity);
4836    }
4837
4838    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
4839        float focalLength =
4840            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
4841        rc = AddSetMetaEntryToBatch(hal_metadata,
4842                CAM_INTF_META_LENS_FOCAL_LENGTH,
4843                sizeof(focalLength), &focalLength);
4844    }
4845
4846    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
4847        uint8_t optStabMode =
4848            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
4849        rc = AddSetMetaEntryToBatch(hal_metadata,
4850                CAM_INTF_META_LENS_OPT_STAB_MODE,
4851                sizeof(optStabMode), &optStabMode);
4852    }
4853
4854    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4855        uint8_t noiseRedMode =
4856            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4857        rc = AddSetMetaEntryToBatch(hal_metadata,
4858                CAM_INTF_META_NOISE_REDUCTION_MODE,
4859                sizeof(noiseRedMode), &noiseRedMode);
4860    }
4861
4862    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
4863        uint8_t noiseRedStrength =
4864            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
4865        rc = AddSetMetaEntryToBatch(hal_metadata,
4866                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
4867                sizeof(noiseRedStrength), &noiseRedStrength);
4868    }
4869
4870    cam_crop_region_t scalerCropRegion;
4871    bool scalerCropSet = false;
4872    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
4873        scalerCropRegion.left =
4874            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
4875        scalerCropRegion.top =
4876            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
4877        scalerCropRegion.width =
4878            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
4879        scalerCropRegion.height =
4880            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
4881        rc = AddSetMetaEntryToBatch(hal_metadata,
4882                CAM_INTF_META_SCALER_CROP_REGION,
4883                sizeof(scalerCropRegion), &scalerCropRegion);
4884        scalerCropSet = true;
4885    }
4886
4887    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
4888        int64_t sensorExpTime =
4889            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
4890        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
4891        rc = AddSetMetaEntryToBatch(hal_metadata,
4892                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
4893                sizeof(sensorExpTime), &sensorExpTime);
4894    }
4895
4896    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
4897        int64_t sensorFrameDuration =
4898            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
4899        int64_t minFrameDuration = getMinFrameDuration(request);
4900        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
4901        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
4902            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
4903        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
4904        rc = AddSetMetaEntryToBatch(hal_metadata,
4905                CAM_INTF_META_SENSOR_FRAME_DURATION,
4906                sizeof(sensorFrameDuration), &sensorFrameDuration);
4907    }
4908
4909    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
4910        int32_t sensorSensitivity =
4911            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
4912        if (sensorSensitivity <
4913                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
4914            sensorSensitivity =
4915                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
4916        if (sensorSensitivity >
4917                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
4918            sensorSensitivity =
4919                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
4920        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
4921        rc = AddSetMetaEntryToBatch(hal_metadata,
4922                CAM_INTF_META_SENSOR_SENSITIVITY,
4923                sizeof(sensorSensitivity), &sensorSensitivity);
4924    }
4925
4926    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
4927        int32_t shadingMode =
4928            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
4929        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_MODE,
4930                sizeof(shadingMode), &shadingMode);
4931    }
4932
4933    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
4934        uint8_t shadingStrength =
4935            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
4936        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_STRENGTH,
4937                sizeof(shadingStrength), &shadingStrength);
4938    }
4939
4940    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
4941        uint8_t fwk_facedetectMode =
4942            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
4943        uint8_t facedetectMode =
4944            lookupHalName(FACEDETECT_MODES_MAP,
4945                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
4946        rc = AddSetMetaEntryToBatch(hal_metadata,
4947                CAM_INTF_META_STATS_FACEDETECT_MODE,
4948                sizeof(facedetectMode), &facedetectMode);
4949    }
4950
4951    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
4952        uint8_t histogramMode =
4953            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
4954        rc = AddSetMetaEntryToBatch(hal_metadata,
4955                CAM_INTF_META_STATS_HISTOGRAM_MODE,
4956                sizeof(histogramMode), &histogramMode);
4957    }
4958
4959    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
4960        uint8_t sharpnessMapMode =
4961            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
4962        rc = AddSetMetaEntryToBatch(hal_metadata,
4963                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
4964                sizeof(sharpnessMapMode), &sharpnessMapMode);
4965    }
4966
4967    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
4968        uint8_t tonemapMode =
4969            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
4970        rc = AddSetMetaEntryToBatch(hal_metadata,
4971                CAM_INTF_META_TONEMAP_MODE,
4972                sizeof(tonemapMode), &tonemapMode);
4973    }
4974    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
4975    /*All tonemap channels will have the same number of points*/
4976    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
4977        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
4978        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
4979        cam_rgb_tonemap_curves tonemapCurves;
4980        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
4981
4982        /* ch0 = G*/
4983        int point = 0;
4984        cam_tonemap_curve_t tonemapCurveGreen;
4985        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
4986            for (int j = 0; j < 2; j++) {
4987               tonemapCurveGreen.tonemap_points[i][j] =
4988                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
4989               point++;
4990            }
4991        }
4992        tonemapCurves.curves[0] = tonemapCurveGreen;
4993
4994        /* ch 1 = B */
4995        point = 0;
4996        cam_tonemap_curve_t tonemapCurveBlue;
4997        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
4998            for (int j = 0; j < 2; j++) {
4999               tonemapCurveBlue.tonemap_points[i][j] =
5000                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
5001               point++;
5002            }
5003        }
5004        tonemapCurves.curves[1] = tonemapCurveBlue;
5005
5006        /* ch 2 = R */
5007        point = 0;
5008        cam_tonemap_curve_t tonemapCurveRed;
5009        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
5010            for (int j = 0; j < 2; j++) {
5011               tonemapCurveRed.tonemap_points[i][j] =
5012                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
5013               point++;
5014            }
5015        }
5016        tonemapCurves.curves[2] = tonemapCurveRed;
5017
5018        rc = AddSetMetaEntryToBatch(hal_metadata,
5019                CAM_INTF_META_TONEMAP_CURVES,
5020                sizeof(tonemapCurves), &tonemapCurves);
5021    }
5022
5023    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5024        uint8_t captureIntent =
5025            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5026        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
5027                sizeof(captureIntent), &captureIntent);
5028    }
5029
5030    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
5031        uint8_t blackLevelLock =
5032            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
5033        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
5034                sizeof(blackLevelLock), &blackLevelLock);
5035    }
5036
5037    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5038        uint8_t lensShadingMapMode =
5039            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5040        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5041                sizeof(lensShadingMapMode), &lensShadingMapMode);
5042    }
5043
5044    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
5045        cam_area_t roi;
5046        bool reset = true;
5047        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
5048        if (scalerCropSet) {
5049            reset = resetIfNeededROI(&roi, &scalerCropRegion);
5050        }
5051        if (reset) {
5052            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AEC_ROI,
5053                    sizeof(roi), &roi);
5054        }
5055    }
5056
5057    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
5058        cam_area_t roi;
5059        bool reset = true;
5060        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
5061        if (scalerCropSet) {
5062            reset = resetIfNeededROI(&roi, &scalerCropRegion);
5063        }
5064        if (reset) {
5065            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AF_ROI,
5066                    sizeof(roi), &roi);
5067        }
5068    }
5069
5070    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
5071        cam_area_t roi;
5072        bool reset = true;
5073        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
5074        if (scalerCropSet) {
5075            reset = resetIfNeededROI(&roi, &scalerCropRegion);
5076        }
5077        if (reset) {
5078            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AWB_REGIONS,
5079                    sizeof(roi), &roi);
5080        }
5081    }
5082
5083    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
5084        cam_test_pattern_data_t testPatternData;
5085        uint32_t fwk_testPatternMode = frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
5086        uint8_t testPatternMode = lookupHalName(TEST_PATTERN_MAP,
5087               sizeof(TEST_PATTERN_MAP), fwk_testPatternMode);
5088
5089        memset(&testPatternData, 0, sizeof(testPatternData));
5090        testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
5091        if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
5092                frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
5093            int32_t* fwk_testPatternData = frame_settings.find(
5094                    ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
5095            testPatternData.r = fwk_testPatternData[0];
5096            testPatternData.b = fwk_testPatternData[3];
5097            switch (gCamCapability[mCameraId]->color_arrangement) {
5098            case CAM_FILTER_ARRANGEMENT_RGGB:
5099            case CAM_FILTER_ARRANGEMENT_GRBG:
5100                testPatternData.gr = fwk_testPatternData[1];
5101                testPatternData.gb = fwk_testPatternData[2];
5102                break;
5103            case CAM_FILTER_ARRANGEMENT_GBRG:
5104            case CAM_FILTER_ARRANGEMENT_BGGR:
5105                testPatternData.gr = fwk_testPatternData[2];
5106                testPatternData.gb = fwk_testPatternData[1];
5107                break;
5108            default:
5109                ALOGE("%s: color arrangement %d is not supported", __func__,
5110                    gCamCapability[mCameraId]->color_arrangement);
5111                break;
5112            }
5113        }
5114        rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_TEST_PATTERN_DATA,
5115            sizeof(testPatternData), &testPatternData);
5116    }
5117
5118    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
5119        double *gps_coords =
5120            frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d;
5121        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES, sizeof(double)*3, gps_coords);
5122    }
5123
5124    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
5125        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
5126        const char *gps_methods_src = (const char *)
5127                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
5128        uint32_t count = frame_settings.find(
5129                ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
5130        memset(gps_methods, 0, sizeof(gps_methods));
5131        strncpy(gps_methods, gps_methods_src, sizeof(gps_methods));
5132        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS, sizeof(gps_methods), gps_methods);
5133    }
5134
5135    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
5136        int64_t gps_timestamp =
5137            frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
5138        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP, sizeof(int64_t), &gps_timestamp);
5139    }
5140
5141    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
5142        int32_t orientation =
5143            frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
5144        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, sizeof(orientation), &orientation);
5145    }
5146
5147    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
5148        int8_t quality =
5149            frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
5150        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_QUALITY, sizeof(quality), &quality);
5151    }
5152
5153    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
5154        int8_t thumb_quality =
5155            frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
5156        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY, sizeof(thumb_quality), &thumb_quality);
5157    }
5158
5159    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
5160        cam_dimension_t dim;
5161        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
5162        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
5163        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, sizeof(dim), &dim);
5164    }
5165
5166    // Internal metadata
5167    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
5168        uint8_t* privatedata =
5169            frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS).data.u8;
5170        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
5171            sizeof(uint8_t) * MAX_METADATA_PAYLOAD_SIZE, privatedata);
5172    }
5173
5174    // EV step
5175    rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV_STEP,
5176            sizeof(cam_rational_type_t), &(gCamCapability[mCameraId]->exp_compensation_step));
5177
5178    return rc;
5179}
5180
5181/*===========================================================================
5182 * FUNCTION   : captureResultCb
5183 *
5184 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
5185 *
5186 * PARAMETERS :
5187 *   @frame  : frame information from mm-camera-interface
5188 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
5189 *   @userdata: userdata
5190 *
5191 * RETURN     : NONE
5192 *==========================================================================*/
5193void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
5194                camera3_stream_buffer_t *buffer,
5195                uint32_t frame_number, void *userdata)
5196{
5197    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
5198    if (hw == NULL) {
5199        ALOGE("%s: Invalid hw %p", __func__, hw);
5200        return;
5201    }
5202
5203    hw->captureResultCb(metadata, buffer, frame_number);
5204    return;
5205}
5206
5207
5208/*===========================================================================
5209 * FUNCTION   : initialize
5210 *
5211 * DESCRIPTION: Pass framework callback pointers to HAL
5212 *
5213 * PARAMETERS :
5214 *
5215 *
5216 * RETURN     : Success : 0
5217 *              Failure: -ENODEV
5218 *==========================================================================*/
5219
5220int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
5221                                  const camera3_callback_ops_t *callback_ops)
5222{
5223    ALOGV("%s: E", __func__);
5224    QCamera3HardwareInterface *hw =
5225        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5226    if (!hw) {
5227        ALOGE("%s: NULL camera device", __func__);
5228        return -ENODEV;
5229    }
5230
5231    int rc = hw->initialize(callback_ops);
5232    ALOGV("%s: X", __func__);
5233    return rc;
5234}
5235
5236/*===========================================================================
5237 * FUNCTION   : configure_streams
5238 *
5239 * DESCRIPTION:
5240 *
5241 * PARAMETERS :
5242 *
5243 *
5244 * RETURN     : Success: 0
5245 *              Failure: -EINVAL (if stream configuration is invalid)
5246 *                       -ENODEV (fatal error)
5247 *==========================================================================*/
5248
5249int QCamera3HardwareInterface::configure_streams(
5250        const struct camera3_device *device,
5251        camera3_stream_configuration_t *stream_list)
5252{
5253    ALOGV("%s: E", __func__);
5254    QCamera3HardwareInterface *hw =
5255        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5256    if (!hw) {
5257        ALOGE("%s: NULL camera device", __func__);
5258        return -ENODEV;
5259    }
5260    int rc = hw->configureStreams(stream_list);
5261    ALOGV("%s: X", __func__);
5262    return rc;
5263}
5264
5265/*===========================================================================
5266 * FUNCTION   : register_stream_buffers
5267 *
5268 * DESCRIPTION: Register stream buffers with the device
5269 *
5270 * PARAMETERS :
5271 *
5272 * RETURN     :
5273 *==========================================================================*/
5274int QCamera3HardwareInterface::register_stream_buffers(
5275        const struct camera3_device *device,
5276        const camera3_stream_buffer_set_t *buffer_set)
5277{
5278    ALOGV("%s: E", __func__);
5279    QCamera3HardwareInterface *hw =
5280        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5281    if (!hw) {
5282        ALOGE("%s: NULL camera device", __func__);
5283        return -ENODEV;
5284    }
5285    int rc = hw->registerStreamBuffers(buffer_set);
5286    ALOGV("%s: X", __func__);
5287    return rc;
5288}
5289
5290/*===========================================================================
5291 * FUNCTION   : construct_default_request_settings
5292 *
5293 * DESCRIPTION: Configure a settings buffer to meet the required use case
5294 *
5295 * PARAMETERS :
5296 *
5297 *
5298 * RETURN     : Success: Return valid metadata
5299 *              Failure: Return NULL
5300 *==========================================================================*/
5301const camera_metadata_t* QCamera3HardwareInterface::
5302    construct_default_request_settings(const struct camera3_device *device,
5303                                        int type)
5304{
5305
5306    ALOGV("%s: E", __func__);
5307    camera_metadata_t* fwk_metadata = NULL;
5308    QCamera3HardwareInterface *hw =
5309        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5310    if (!hw) {
5311        ALOGE("%s: NULL camera device", __func__);
5312        return NULL;
5313    }
5314
5315    fwk_metadata = hw->translateCapabilityToMetadata(type);
5316
5317    ALOGV("%s: X", __func__);
5318    return fwk_metadata;
5319}
5320
5321/*===========================================================================
5322 * FUNCTION   : process_capture_request
5323 *
5324 * DESCRIPTION:
5325 *
5326 * PARAMETERS :
5327 *
5328 *
5329 * RETURN     :
5330 *==========================================================================*/
5331int QCamera3HardwareInterface::process_capture_request(
5332                    const struct camera3_device *device,
5333                    camera3_capture_request_t *request)
5334{
5335    ALOGV("%s: E", __func__);
5336    QCamera3HardwareInterface *hw =
5337        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5338    if (!hw) {
5339        ALOGE("%s: NULL camera device", __func__);
5340        return -EINVAL;
5341    }
5342
5343    int rc = hw->processCaptureRequest(request);
5344    ALOGV("%s: X", __func__);
5345    return rc;
5346}
5347
5348/*===========================================================================
5349 * FUNCTION   : dump
5350 *
5351 * DESCRIPTION:
5352 *
5353 * PARAMETERS :
5354 *
5355 *
5356 * RETURN     :
5357 *==========================================================================*/
5358
5359void QCamera3HardwareInterface::dump(
5360                const struct camera3_device *device, int fd)
5361{
5362    ALOGV("%s: E", __func__);
5363    QCamera3HardwareInterface *hw =
5364        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5365    if (!hw) {
5366        ALOGE("%s: NULL camera device", __func__);
5367        return;
5368    }
5369
5370    hw->dump(fd);
5371    ALOGV("%s: X", __func__);
5372    return;
5373}
5374
5375/*===========================================================================
5376 * FUNCTION   : flush
5377 *
5378 * DESCRIPTION:
5379 *
5380 * PARAMETERS :
5381 *
5382 *
5383 * RETURN     :
5384 *==========================================================================*/
5385
5386int QCamera3HardwareInterface::flush(
5387                const struct camera3_device *device)
5388{
5389    int rc;
5390    ALOGV("%s: E", __func__);
5391    QCamera3HardwareInterface *hw =
5392        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5393    if (!hw) {
5394        ALOGE("%s: NULL camera device", __func__);
5395        return -EINVAL;
5396    }
5397
5398    rc = hw->flush();
5399    ALOGV("%s: X", __func__);
5400    return rc;
5401}
5402
5403/*===========================================================================
5404 * FUNCTION   : close_camera_device
5405 *
5406 * DESCRIPTION:
5407 *
5408 * PARAMETERS :
5409 *
5410 *
5411 * RETURN     :
5412 *==========================================================================*/
5413int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
5414{
5415    ALOGV("%s: E", __func__);
5416    int ret = NO_ERROR;
5417    QCamera3HardwareInterface *hw =
5418        reinterpret_cast<QCamera3HardwareInterface *>(
5419            reinterpret_cast<camera3_device_t *>(device)->priv);
5420    if (!hw) {
5421        ALOGE("NULL camera device");
5422        return BAD_VALUE;
5423    }
5424    delete hw;
5425
5426    pthread_mutex_lock(&mCameraSessionLock);
5427    mCameraSessionActive = 0;
5428    pthread_mutex_unlock(&mCameraSessionLock);
5429    ALOGV("%s: X", __func__);
5430    return ret;
5431}
5432
5433/*===========================================================================
5434 * FUNCTION   : getWaveletDenoiseProcessPlate
5435 *
5436 * DESCRIPTION: query wavelet denoise process plate
5437 *
5438 * PARAMETERS : None
5439 *
5440 * RETURN     : WNR prcocess plate vlaue
5441 *==========================================================================*/
5442cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
5443{
5444    char prop[PROPERTY_VALUE_MAX];
5445    memset(prop, 0, sizeof(prop));
5446    property_get("persist.denoise.process.plates", prop, "0");
5447    int processPlate = atoi(prop);
5448    switch(processPlate) {
5449    case 0:
5450        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
5451    case 1:
5452        return CAM_WAVELET_DENOISE_CBCR_ONLY;
5453    case 2:
5454        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
5455    case 3:
5456        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
5457    default:
5458        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
5459    }
5460}
5461
5462/*===========================================================================
5463 * FUNCTION   : needRotationReprocess
5464 *
5465 * DESCRIPTION: if rotation needs to be done by reprocess in pp
5466 *
5467 * PARAMETERS : none
5468 *
5469 * RETURN     : true: needed
5470 *              false: no need
5471 *==========================================================================*/
5472bool QCamera3HardwareInterface::needRotationReprocess()
5473{
5474    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
5475        // current rotation is not zero, and pp has the capability to process rotation
5476        ALOGD("%s: need do reprocess for rotation", __func__);
5477        return true;
5478    }
5479
5480    return false;
5481}
5482
5483/*===========================================================================
5484 * FUNCTION   : needReprocess
5485 *
5486 * DESCRIPTION: if reprocess in needed
5487 *
5488 * PARAMETERS : none
5489 *
5490 * RETURN     : true: needed
5491 *              false: no need
5492 *==========================================================================*/
5493bool QCamera3HardwareInterface::needReprocess()
5494{
5495    if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
5496        // TODO: add for ZSL HDR later
5497        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
5498        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
5499        return true;
5500    }
5501    return needRotationReprocess();
5502}
5503
5504/*===========================================================================
5505 * FUNCTION   : addOfflineReprocChannel
5506 *
5507 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
5508 *              coming from input channel
5509 *
5510 * PARAMETERS :
5511 *   @pInputChannel : ptr to input channel whose frames will be post-processed
5512 *
5513 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
5514 *==========================================================================*/
5515QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
5516              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle, metadata_buffer_t *metadata)
5517{
5518    int32_t rc = NO_ERROR;
5519    QCamera3ReprocessChannel *pChannel = NULL;
5520    if (pInputChannel == NULL) {
5521        ALOGE("%s: input channel obj is NULL", __func__);
5522        return NULL;
5523    }
5524
5525    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
5526            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
5527    if (NULL == pChannel) {
5528        ALOGE("%s: no mem for reprocess channel", __func__);
5529        return NULL;
5530    }
5531
5532    rc = pChannel->initialize();
5533    if (rc != NO_ERROR) {
5534        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
5535        delete pChannel;
5536        return NULL;
5537    }
5538
5539    // pp feature config
5540    cam_pp_feature_config_t pp_config;
5541    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
5542
5543    if (IS_PARM_VALID(CAM_INTF_META_EDGE_MODE, metadata)) {
5544        cam_edge_application_t *edge = (cam_edge_application_t *)
5545                POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
5546        if (edge->edge_mode != CAM_EDGE_MODE_OFF) {
5547            pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
5548            pp_config.sharpness = edge->sharpness;
5549        }
5550    }
5551
5552    if (IS_PARM_VALID(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata)) {
5553        uint8_t *noise_mode = (uint8_t *)POINTER_OF(
5554                CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
5555        if (*noise_mode != CAM_NOISE_REDUCTION_MODE_OFF) {
5556            pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
5557            pp_config.denoise2d.denoise_enable = 1;
5558            pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
5559        }
5560    }
5561
5562    if (IS_PARM_VALID(CAM_INTF_META_JPEG_ORIENTATION, metadata)) {
5563        int32_t *rotation = (int32_t *)POINTER_OF(
5564                CAM_INTF_META_JPEG_ORIENTATION, metadata);
5565
5566        if (needRotationReprocess()) {
5567            pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
5568            if (*rotation == 0) {
5569                pp_config.rotation = ROTATE_0;
5570            } else if (*rotation == 90) {
5571                pp_config.rotation = ROTATE_90;
5572            } else if (*rotation == 180) {
5573                pp_config.rotation = ROTATE_180;
5574            } else if (*rotation == 270) {
5575                pp_config.rotation = ROTATE_270;
5576            }
5577        }
5578    }
5579
5580    rc = pChannel->addReprocStreamsFromSource(pp_config,
5581                                             pInputChannel,
5582                                             mMetadataChannel);
5583
5584    if (rc != NO_ERROR) {
5585        delete pChannel;
5586        return NULL;
5587    }
5588    return pChannel;
5589}
5590
5591}; //end namespace qcamera
5592